diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..268209c7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +.vscode +build +FFmpeg +player/360SCVPAPI.h +player/MediaSource/360SCVPAPI.h +player/MediaSource/OmafDashAccessApi.h +player/OmafStructure.h +player/data_type.h +player/error.h +player/ns_def.h diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..73d6a207 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,36 @@ +language: generic +os: linux +dist: bionic + +env: + global: + - DOCKER_CACHE_FILE=/home/travis/docker/cache.tar.gz + - DOCKER_REPOSITORY=immersive + - DOCKER_TAG=ci + +cache: + directories: + - /home/travis/docker/ + +before_install: + - | # Load docker image. + if [ -f ${DOCKER_CACHE_FILE} ]; then + gunzip -c ${DOCKER_CACHE_FILE} | sudo docker load || true + fi + +install: + - | # Build docker image from cache or from scratch. + if [ -f ${DOCKER_CACHE_FILE} ]; then + sudo docker build --cache-from ${DOCKER_REPOSITORY}:${DOCKER_TAG} -t ${DOCKER_REPOSITORY}:${DOCKER_TAG} -f src/external/dockerfile src/external + else + sudo docker build -t ${DOCKER_REPOSITORY}:${DOCKER_TAG} -f src/external/dockerfile src/external + fi + +script: + - sudo docker run -it -v `pwd`:`pwd` -w `pwd` ${DOCKER_REPOSITORY}:${DOCKER_TAG} bash -c 'cd src/external && ./build.sh ci && ./build.sh test && ./run_test.sh' + - | # Save built image to Travis cache directory only if not a pull request. + if [ ${TRAVIS_PULL_REQUEST} == "false" ]; then + mkdir -p $(dirname ${DOCKER_CACHE_FILE}) + sudo docker save $(docker history -q ${DOCKER_REPOSITORY}:${DOCKER_TAG} | grep -v '') ${DOCKER_REPOSITORY}:${DOCKER_TAG} | gzip > ${DOCKER_CACHE_FILE} + echo "Saved cache successfuly" + fi diff --git a/OMAF-Sample/README.md b/OMAF-Sample/README.md old mode 100644 new mode 100755 index 46463d65..414ec1e8 --- a/OMAF-Sample/README.md +++ b/OMAF-Sample/README.md @@ -1,72 +1,123 @@ # Immersive Video OMAF Sample ## Introduction - The Immersive Video Delivery OMAF Sample provides a quick trial to setup E2E OMAF-Compliant 360 video streaming. OMAF 360 Video streaming sample can support both VOD and Live streaming for 4K and 8K contents. + The Immersive Video Delivery OMAF Sample provides a quick trial to setup E2E OMAF-Compliant 360 video streaming. OMAF 360 Video streaming sample can support both VOD and Live streaming for 4K and 8K contents. -## Software Requirements + OMAF sample can be deployed with [Kubernetes](#kubernetes-deployment-steps) or directly with [Docker image](#docker-image-deployment-steps). - - Server OS : CentOS Linux release
7.6.1810 (Core) +## Software Requirements + + - Server OS : CentOS Linux release 7.6.1810 (Core) - Client OS : Ubuntu 18.04 LTS - - Docker version : 1.13.1 + - Docker version : 19.03.12 -## Hardware Tested +## Test Environment Hardware | Platform | Server | Client | |:----:|:----:|:----:| | CPU SKU | Intel® Xeon® Platinum
8280M CPU @ 2.70GHz | Intel® Core™ i7-6770HQ
CPU @ 2.60GHz x 8 | | Memory | 128G | 16G | -## Installation +--- + +## Docker image deployment steps + +### Installation - [Install docker engine in server](https://docs.docker.com/install) - Server : ```bash - cd OMAF-Sample/server && ./deploy.sh # Proxy config is optional. - docker image ls # Created an image. [REPOSITORY:immersive_server, TAG:v0.1] + cd path_to/Immersive-Video-Sample/OMAF-Sample/server + mkdir build && cd build + cmake .. -DHTTP_PROXY= # proxy is optional + make build -j $(nproc) + docker image ls # [REPOSITORY:immersive_server, TAG:v1.4] ``` - + - Client : - ```bash cd OMAF-Sample/client && ./deploy.sh - cd package # Copy the packages to any client machine to install. - sudo dpkg -i immersive-client-1.0.0-1.el7_amd64.deb ``` -## How To Run (HTTPS) +### How To Run (HTTPS) - Server : - ```bash - docker run -p 5000:443 -p 5001:8080 -it immersive_server:v0.1 bash # Map the port. - /usr/local/nginx/sbin/nginx # Start nginx. - cd Sample-Videos && ./run.sh # :[4k,8k] :[LIVE,VOD] + docker run --privileged -p 30001:443 -p 30002:8080 -it immersive_server:v1.4 bash # Map the port. + cd /usr/local/nginx/conf/ + ./configure.sh CN Shanghai A B C D E@F.com # './configure.sh -h' for details. + /usr/local/nginx/sbin/nginx # Start nginx. + cd /home/immersive/Sample-Videos && ./run.sh # :[4K,8K] :[LIVE,VOD] ``` +For details in FFmpeg plugins' parameters, refer to the [FFmpeg usage doc](../src/doc/Immersive_Video_Delivery_FFmpeg_usage.md). + - Client : +```bash + sudo su + cd path_to/Immersive-Video-Sample/src/build/client/player + export LD_LIBRARY_PATH=/usr/local/lib/:$LD_LIBRARY_PATH + vim config.xml # Set up configuration, details in table at bottom. + ./render # Press 'q' button to quit. +``` + +--- + +## Kubernetes deployment steps +### Installation + + - Node set up steps same as [docker image deployment](#docker-image-deployment-steps). + + - Master set up steps: + 1. Follow the [instructions](https://kubernetes.io/docs/setup) to setup your Kubernetes cluster. + 2. All cluster nodes must have the same user (uid) and group (gid). + 3. Setup password-less access from the Kubernetes controller to each worker node: + ```bash + ssh-keygen + ssh-copy-id + ``` + + +### How To Run + - Master start/stop services as follows: +``` + cd path_to/Immersive_Video_Sample/OMAF-Sample/server + mkdir build && cd build + cmake .. + make start -j $(nproc) # choose to start or stop +``` + +- Client : ```bash sudo su - cd /usr/bin/immersive/ - export LD_LIBRARY_PATH=/usr/lib64/immersive-client:$LD_LIBRARY_PATH - vim config.xml # Set up configuration, details as following table - ./render # Press 's' button to start + cd path_to/Immersive-Video-Sample/src/build/client/player + export LD_LIBRARY_PATH=/usr/local/lib/:$LD_LIBRARY_PATH + vim config.xml # Set up configuration, details in table at bottom. + ./render # Press 'q' button to quit. ``` + +--- + **Config.xml** -| Parameters | Descriptions | Examples | -| :----:| :----: | :----: | -| windowWidth | The width of render window | 960 for 4k, 1920 for 8k | + +| **Parameters** | **Descriptions** | **examples** | +| --- | --- | --- | +| windowWidth | The width of render window | 960 for 4k, 1920 for 8k | | windowHeight | The height of render window | 960 for 4k, 1920 for 8k | | url | The resource URL path | Remote URL | | sourceType | Source type | 0 is for Dash Source | -| decoderType | FFmpeg Decoder type | 0 is for software decoder | -| contextType | OpenGL context type | 0 is for glfw | -| useDMABuffer | DMA Buffer flag | 0 means no DMA buffer | +| enableExtractor | extractor track path or later binding path | 1 is for extractor track and 0 is for later binding | +| StreamDumpedOption | dump packet streams or not | 0 for false, 1 for true | | viewportHFOV | Viewport horizon FOV degree | 80 | | viewportVFOV | Viewport vertical FOV degree | 80 | | viewportWidth | Viewport width | 960 for 4k, 1920 for 8k | | viewportHeight | Viewport height | 960 for 4k, 1920 for 8k | | cachePath | Cache path | /tmp/cache | - +| minLogLevel | min log level | INFO / WARNING / ERROR / FATAL | +| maxVideoDecodeWidth | max video decoded width | decoded width that is supported | +| maxVideoDecodeHeight | max video decoded height | decoded height that is supported | +| predict | viewport prediction plugin | 0 is disable and 1 is enable | +| PathOf360SCVPPlugins | path of 360SCVP plugins | needed for planar format rendering | - - **Note** : So far, some parameters settings are limited. URL need to be a remote dash source URL, example : `https://172.18.0.2:5000`. The parameter sourceType must set to 0, which represents dash source. The parameter decoderType must set to 0, which stands for FFmpeg software decoder. The parameter contextType need to be 0, which represents glfw context. And useDMABuffer flag should be set to 0. + - **Note** : So far, some parameters settings are limited. URL need to be a remote dash source URL, choose `./run.sh 8K LIVE` for example : `https://xxx.xxx.xxx.xxx:30001/LIVE8K/Test.mpd`. The parameter `sourceType` must set to 0, which represents dash source. The parameter `decoderType` must set to 0, which stands for FFmpeg software decoder. The parameter `contextType` need to be 0, which represents glfw context. And `useDMABuffer` flag should be set to 0. diff --git a/OMAF-Sample/client/deploy.sh b/OMAF-Sample/client/deploy.sh index 4ae4d725..8a1ce9cd 100755 --- a/OMAF-Sample/client/deploy.sh +++ b/OMAF-Sample/client/deploy.sh @@ -1,4 +1,3 @@ #!/bin/bash -ex cd ../../src/external -./build_client.sh -./fpm.sh 1.0.0 +./build.sh client y diff --git a/OMAF-Sample/server/.gitkeep b/OMAF-Sample/server/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/OMAF-Sample/server/CMakeLists.txt b/OMAF-Sample/server/CMakeLists.txt new file mode 100644 index 00000000..c40e59d4 --- /dev/null +++ b/OMAF-Sample/server/CMakeLists.txt @@ -0,0 +1,11 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) + +ADD_CUSTOM_TARGET(build ALL + ${CMAKE_CURRENT_SOURCE_DIR}/deploy.sh + ${CMAKE_CURRENT_SOURCE_DIR} ${HTTP_PROXY}) + +ADD_CUSTOM_TARGET(start ALL + ${CMAKE_CURRENT_SOURCE_DIR}/start.sh) + +ADD_CUSTOM_TARGET(stop ALL + ${CMAKE_CURRENT_SOURCE_DIR}/stop.sh) diff --git a/OMAF-Sample/server/Dockerfile b/OMAF-Sample/server/Dockerfile index fe8665f8..13897085 100644 --- a/OMAF-Sample/server/Dockerfile +++ b/OMAF-Sample/server/Dockerfile @@ -1,24 +1,46 @@ FROM centos:7.6.1810 -RUN if [ -n "${HTTP_PROXY}" ] ; then \ - echo "proxy=${HTTP_PROXY}" >> /etc/yum.conf && \ - echo "http_proxy=${HTTP_PROXY}" >> /etc/wgetrc && \ - echo "https_proxy=${HTTP_PROXY}" >> /etc/wgetrc ; \ +RUN if [ -n "${http_proxy}" ] ; then \ + echo "proxy=${http_proxy}" >> /etc/yum.conf && \ + echo "http_proxy=${http_proxy}" >> /etc/wgetrc && \ + echo "https_proxy=${https_proxy}" >> /etc/wgetrc ; \ fi -RUN yum install -y -q openssl centos-release-scl scl-utils gmp gmp-devel mpfr mpfr-devel libmpc libmpc-devel patch autoconf libtool automake libcurl-devel wget git -y && \ - rm -rf /var/cache/yum/* && \ - if [ -n "${HTTP_PROXY}" ]; then \ - git config --global http.proxy ${HTTP_PROXY} && \ - git config --global https.proxy ${HTTP_PROXY} ; \ - fi - -RUN yum install -y yum install devtoolset-7-gcc* libevent-devel.x86_64 openssl-devel bc numactl -y && \ - rm -rf /var/cache/yum/* - WORKDIR /home/immersive ARG WORKDIR=/home/immersive +# Install denpendency +RUN yum install -y centos-release-scl-rh && \ + yum install -y wget git bzip2 xz sudo devtoolset-7-gcc* && \ + wget https://download-ib01.fedoraproject.org/pub/epel/7/x86_64/Packages/e/epel-release-7-13.noarch.rpm && \ + rpm -Uvh epel-release*rpm && \ + yum install -y openssl centos-release-scl scl-utils gmp gmp-devel && \ + yum install -y mpfr mpfr-devel libmpc libmpc-devel patch autoconf && \ + yum install -y libtool automake libcurl-devel libxml2-devel && \ + yum install -y libevent-devel.x86_64 openssl-devel bc redhat-lsb && \ + yum install -y libXrandr libXrandr-devel libXinerama libXinerama-devel && \ + yum install -y libXcursor libXcursor-devel libXi libXi-devel glm-devel && \ + yum install -y mesa-libGL mesa-libGL-devel mesa-libGLU && \ + yum install -y mesa-libGLU-devel mesa-libGLES-devel mesa-libEGL-devel && \ + yum install -y SDL2 SDL2-devel libcurl4-openssl-dev libglfw3 && \ + yum install -y libXv-devel glfw glfw-devel xz-devel lzma -y && \ + yum install -y uuid.x86_64 uuid-devel.x86_64 popt-devel.x86_64 -y && \ + yum install -y numactl.x86_64 numactl-devel.x86_64 numactl-libs.x86_64 -y && \ + rm -rf /var/cache/yum/* && rm -rf * && \ + if [ -n "${http_proxy}" ]; then \ + git config --global http.proxy ${http_proxy} && \ + git config --global https.proxy ${https_proxy} ; \ + fi + +RUN yum -y install rh-ruby23 rh-ruby23-ruby-devel rh-ruby23-rubygem* && \ + source /opt/rh/rh-ruby23/enable && \ + yum -y install rpm-build && \ + if [ -n "${http_proxy}" ]; then \ + gem install -p ${http_proxy} fpm ; \ + else \ + gem install fpm ; \ + fi + # Build zlib ARG ZLIB_VER=1.2.11 ARG ZILB_REPO=http://zlib.net/zlib-${ZLIB_VER}.tar.gz @@ -53,20 +75,6 @@ RUN wget -O - ${YASM_REPO} | tar xz && \ make install && \ cd ${WORKDIR} && rm -rf ./* -# Build libxml2 -ARG LIBXML2_VER=2.9.6 -ARG LIBXML2_REPO=http://www.xmlsoft.org/sources/libxml2-${LIBXML2_VER}.tar.gz -RUN wget -O - ${LIBXML2_REPO} | tar xz && \ - cd libxml2-${LIBXML2_VER} && \ - source /opt/rh/devtoolset-7/enable && \ - ././configure --with-python=no && \ - make -j$(nproc) && \ - make install && \ - cd ${WORKDIR} && rm -rf ./* - -RUN yum install -y xz && \ - rm -rf /var/cache/yum/* - # Build CURL ARG CURL_VER=7.66.0 ARG CURL_REPO=https://curl.haxx.se/download/curl-${CURL_VER}.tar.xz @@ -78,8 +86,6 @@ RUN wget ${CURL_REPO} && \ ./configure --with-darwinssl && \ make -j$(nproc) && \ make install && \ - cd /usr/local/include/libxml2 && \ - mv libxml ../ && \ cd ${WORKDIR} && rm -rf ./* # Install BOOST @@ -107,25 +113,35 @@ RUN git clone https://github.com/OpenVisualCloud/SVT-HEVC.git && \ RUN git clone https://github.com/google/glog.git && \ cd glog && \ source /opt/rh/devtoolset-7/enable && \ - ./autogen.sh && \ - ./configure && \ - make -j8 && \ - make install && \ + sed -i '23s/OFF/ON/' CMakeLists.txt && \ + cmake -H. -Bbuild -G "Unix Makefiles" && \ + cmake --build build && \ + cmake --build build --target install && \ cd ${WORKDIR} && rm -rf ./glog +# Install lttng +RUN source /opt/rh/devtoolset-7/enable && \ + wget -c https://lttng.org/files/urcu/userspace-rcu-latest-0.11.tar.bz2 && \ + tar -xjf userspace-rcu-latest-0.11.tar.bz2 && \ + cd userspace-rcu-0.11.* && \ + ./configure && \ + make -j $(nproc) && \ + make install && ldconfig && \ + cd ../ && rm -rf userspace-rcu-0.11.* && \ + wget -c http://lttng.org/files/lttng-ust/lttng-ust-latest-2.11.tar.bz2 && \ + tar -xjf lttng-ust-latest-2.11.tar.bz2 && \ + cd lttng-ust-2.11.* && \ + ./configure --disable-man-pages && \ + make -j $(nproc) && \ + make install && ldconfig && \ + cd ../ && rm -rf lttng-ust-2.11.* + # Copy source COPY ./src ${WORKDIR} - -# Install openHEVC -RUN git clone https://github.com/OpenHEVC/openHEVC.git && \ - cd openHEVC && \ - source /opt/rh/devtoolset-7/enable && \ - git checkout ffmpeg_update && \ - patch -p1 < ../external/ffmpeg_update_add_circle_list_for_to_free_frame.patch && \ - ./configure --libdir=/usr/lib64 --disable-sdl2 && \ - make -j `nproc` && \ - make install && \ - cd ${WORKDIR} && rm -rf ./openHEVC +COPY src/ffmpeg/dependency/*.so /usr/local/lib/ +COPY src/ffmpeg/dependency/*.pc /usr/local/lib/pkgconfig/ +COPY src/ffmpeg/dependency/*.h /usr/local/include/ +COPY src/ffmpeg/dependency/WorkerServer /root # Install Thrift ARG THRIFT_VER=0.12.0 @@ -140,35 +156,18 @@ RUN wget -O - ${THRIFT_REPO} | tar xz && \ make install && \ cd ${WORKDIR} && rm -rf ./thrift-${THRIFT_VER}* -# Install OMAF -RUN git clone https://github.com/nokiatech/omaf.git && \ - ln -s ${PWD}/omaf/Mp4/srcs ${PWD}/OmafDashAccess/mp4lib && \ - cd omaf && \ - source /opt/rh/devtoolset-7/enable && \ - patch -p1 < ../external/nokia_omaf_patch_for_extrator_reader.diff && \ - cd Mp4/srcs && \ - mkdir build && \ - cd build && \ - cmake .. && \ - make -j`nproc` && \ - cp -r ../api/streamsegmenter ../../../../VROmafPacking/ && \ - cp lib/libstreamsegmenter_static_fpic.a /usr/local/lib/ && \ - cp lib/libstreamsegmenter_static.a /usr/local/lib/ && \ - cp lib/libmp4vr_static_fpic.a /usr/local/lib/ && \ - cp lib/libmp4vr_static.a /usr/local/lib/ - -# Install Server FFmpeg Dependency -RUN git clone https://github.com/FFmpeg/FFmpeg.git && \ - cd FFmpeg && \ +# Install openHEVC +RUN git clone https://github.com/OpenHEVC/openHEVC.git && \ + cd openHEVC && \ source /opt/rh/devtoolset-7/enable && \ - git checkout release/4.1 && \ - git checkout c2ac3b8e6a040e33d53fa13548848c8ba981a8e4 && \ - cd - && patch -p1 < ffmpeg/patches/FFmpeg_OMAF.patch && \ - mkdir -p external/FFmpeg && cd external/FFmpeg && \ - ../../FFmpeg/configure --prefix=/usr --libdir=/usr/local/lib --enable-static --enable-shared --enable-gpl --enable-nonfree --disable-optimizations --disable-vaapi && \ + git config --global user.email "you@example.com" && \ + git config --global user.name "Your Name" && \ + git checkout ffmpeg_update && \ + git am --whitespace=fix ../external/Update-buffer-operation-and-fix-stream-loop-coredump.patch && \ + ./configure --libdir=/usr/lib64 --disable-sdl2 && \ make -j `nproc` && \ make install && \ - cd ${WORKDIR} && rm -rf ./external/FFmpeg + cd ${WORKDIR} && rm -rf ./openHEVC # Build Nginx ARG NGINX_VER=1.13.1 @@ -183,21 +182,34 @@ RUN wget -O - ${NGINX_REPO} | tar xz && \ EXPOSE 443 EXPOSE 8080 +# Build safe string lib +RUN git clone https://github.com/intel/safestringlib.git && \ + cd safestringlib && \ + source /opt/rh/devtoolset-7/enable && \ + cmake . && \ + make -j `nproc` -f Makefile && \ + cp libsafestring_shared.so /usr/local/lib/ && \ + mkdir -p /usr/local/include/safestringlib/ && \ + cp ./include/* /usr/local/include/safestringlib/ && \ + cd ${WORKDIR} && rm -rf ./safestringlib + # Configure And Run Nginx COPY nginx_conf /usr/local/nginx/conf -COPY src/ffmpeg/dependency/*.so /usr/local/lib/ -COPY src/ffmpeg/dependency/*.pc /usr/local/lib/pkgconfig/ -COPY src/ffmpeg/dependency/*.h /usr/local/include/ -COPY src/ffmpeg/dependency/WorkerServer /root # Build Server -RUN mkdir build && cd build && \ +RUN cd external && \ source /opt/rh/devtoolset-7/enable && \ - export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH && \ - cmake -DCMAKE_BUILD_TYPE=Debug -DTARGET=server .. && \ - make -j `nproc` && \ - make install && \ - mv ffmpeg/ffmpeg ../Sample-Videos && \ + ./install_FFmpeg.sh server && \ + ./build.sh server n && \ + mv ../build/server/ffmpeg/ffmpeg ../Sample-Videos && \ cd .. && rm -rf `ls | grep -v Sample-Videos` COPY run.sh ${WORKDIR}/Sample-Videos + +# Unset proxy if necessary +RUN if [ -n "${http_proxy}" ]; then \ + sed -i '$d' /etc/yum.conf && \ + sed -i '1,$d' /etc/wgetrc && \ + git config --global --unset http.proxy && \ + git config --global --unset https.proxy ; \ + fi diff --git a/OMAF-Sample/server/deploy.sh b/OMAF-Sample/server/deploy.sh index d959432e..0d582697 100755 --- a/OMAF-Sample/server/deploy.sh +++ b/OMAF-Sample/server/deploy.sh @@ -1,33 +1,40 @@ #!/bin/bash -ex -PROXY=$1 - parameters_usage(){ - echo 'Usage: 1. : proxy setting. [optional]' + echo 'Usage: 1. : original file path' + echo ' 2. : proxy setting. [optional]' } -mkdir -p src -cd ../.. -cp -r src/360SCVP OMAF-Sample/server/src -cp -r src/external OMAF-Sample/server/src -cp -r src/ffmpeg OMAF-Sample/server/src -cp -r src/OmafDashAccess OMAF-Sample/server/src -cp -r src/player OMAF-Sample/server/src -cp -r src/utils OMAF-Sample/server/src -cp -r src/VROmafPacking OMAF-Sample/server/src -cp -r src/CMakeLists.txt OMAF-Sample/server/src -cp -r Sample-Videos OMAF-Sample/server/src -cd OMAF-Sample/server +REPOPATH=`echo $1 | awk -F "OMAF-Sample" '{print $1}'` +SRCPATH="${REPOPATH}src/" +DSTPATH="${REPOPATH}OMAF-Sample/server/src/" + +mkdir -p ${DSTPATH} +cd ${DSTPATH}.. + +cp -r ${SRCPATH}360SCVP ${DSTPATH} +cp -r ${SRCPATH}external ${DSTPATH} +cp -r ${SRCPATH}ffmpeg ${DSTPATH} +cp -r ${SRCPATH}player ${DSTPATH} +cp -r ${SRCPATH}utils ${DSTPATH} +cp -r ${SRCPATH}isolib ${DSTPATH} +cp -r ${SRCPATH}trace ${DSTPATH} +cp -r ${SRCPATH}plugins ${DSTPATH} +cp -r ${SRCPATH}VROmafPacking ${DSTPATH} +cp -r ${SRCPATH}OmafDashAccess ${DSTPATH} +cp -r ${SRCPATH}CMakeLists.txt ${DSTPATH} +cp -r ${REPOPATH}Sample-Videos ${DSTPATH} -if [ $# = 0 ] ; then - docker build -t immersive_server:v0.1 . -elif [ $# = 1 ] ; then +if [ $# = 1 ] ; then + docker build -t immersive_server:v1.4 . +elif [ $# = 2 ] ; then if [ "$1" = "-h" ] ; then parameters_usage else - PROXY=$1 - docker build -t immersive_server:v0.1 --build-arg HTTP_PROXY=${PROXY} --build-arg HTTPS_PROXY=${PROXY} . - echo 'case 1' + PROXY=$2 + docker build -t immersive_server:v1.4 \ + --build-arg http_proxy=${PROXY} \ + --build-arg https_proxy=${PROXY} . echo "PROXY:${PROXY}" fi else @@ -35,4 +42,4 @@ else exit 0 fi -rm -rf src +rm -rf ${DSTPATH} diff --git a/OMAF-Sample/server/deploy.yaml b/OMAF-Sample/server/deploy.yaml new file mode 100644 index 00000000..f5794a84 --- /dev/null +++ b/OMAF-Sample/server/deploy.yaml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: immersive-server + labels: + app: immersive-server +spec: + replicas: 1 + selector: + matchLabels: + app: immersive-server + template: + metadata: + labels: + app: immersive-server + spec: + enableServiceLinks: false + containers: + - name: immersive-server + image: immersive_server:v1.4 + command: [ "/bin/sh","-c" ] + args: + - cd /usr/local/nginx/conf/ && + ./configure.sh CN Shanghai A B C D E@F.com && + /usr/local/nginx/sbin/nginx && + cd /home/immersive/Sample-Videos && + ./run.sh 4K LIVE > output.log 2>&1 + imagePullPolicy: IfNotPresent + ports: + - containerPort: 443 + - containerPort: 8080 diff --git a/OMAF-Sample/server/nginx_conf/configure.sh b/OMAF-Sample/server/nginx_conf/configure.sh new file mode 100755 index 00000000..d5f54066 --- /dev/null +++ b/OMAF-Sample/server/nginx_conf/configure.sh @@ -0,0 +1,50 @@ +#!/bin/bash -e + +COUNTRY_NAME=$1 +STATE_OR_PROVIENCE_NAME=$2 +LOCALITY_NAME=$3 +DEFAULT_CITY=$4 +ORGANIZATION_NAME=$5 +DEFAULT_COMPANY_LTD=$6 +COMMON_NAME=$7 +EMAIL_ADDRESS=$8 + +parameters_usage(){ + echo 'Usage: 1. : A 2 letter code. eg. [CN].' + echo ' 2. : Full name. eg. [Shanghai].' + echo ' 3. : eg. [Somewhere].' + echo ' 4. : eg. [Somegroup].' + echo ' 5. : eg. [Default.Ltd]' + echo ' 6. : eg. [Nobody].' + echo ' 7. : eg. [Nobody@email.com].' +} + +if [ "${ITEM}" = "-h" ] || [ $# != 7 ] ; then + parameters_usage + exit 0 +fi + +cd /usr/local/nginx/conf +mkdir -p ssl +cd ssl + +openssl req -x509 -nodes -days 30 -newkey rsa:4096 -keyout ./self.key -out ./self.crt << EOL +${COUNTRY_NAME} +${STATE_OR_PROVIENCE_NAME} +${LOCALITY_NAME} +${DEFAULT_CITY} +${ORGANIZATION_NAME} +${DEFAULT_COMPANY_LTD} +${COMMON_NAME} +${EMAIL_ADDRESS} +EOL + +chmod 640 self.key +chmod 644 self.crt + +openssl dhparam -dsaparam -out ./dhparam.pem 4096 + +chmod 644 dhparam.pem + +openssl rsa -noout -text -in self.key +openssl x509 -noout -text -in self.crt diff --git a/OMAF-Sample/server/nginx_conf/ssl/dhparam.pem b/OMAF-Sample/server/nginx_conf/ssl/dhparam.pem deleted file mode 100644 index c57d7c42..00000000 --- a/OMAF-Sample/server/nginx_conf/ssl/dhparam.pem +++ /dev/null @@ -1,25 +0,0 @@ ------BEGIN X9.42 DH PARAMETERS----- -MIIELAKCAgEAkO7+2L4hzsAcDxeWMG8Vyu4Sv+gvr0u6h4II+OhFieDpcpFCiuFk -xZK+yDFvK9hUZLWQZn4yoGh0f8vMgHEku+3NS+NU9tM9JHOy+0LLYW3gglvhRVkG -eCO9Y+wDSwRnFefVGCKYBCPiafhsK5y04LhgYQvWMcfZNR4yQHmyPid4hrhnE2VM -nJc/WCGJlbnUCdz4Zl7uV62UvJ1mgyGKPXkkyktsb2vEuUV5vjyRn/IseL/Kj/0z -/9Sk9iJ9oXSnlZfIeGVvk6qermi2h00A69FYLXl/InH7SoITzAYFzUOlPxiA6ycJ -nv6ZAiuD6+Nt5LgnaSOBGJ4W4cqWVKU7bx3fMnujN8z1Yprui6E2doir8taBjoUk -2c/HvTAYkBeUjNwXHZkGmSpFql1KR7Gcz7dhxhskETfK56e3yiDBZryMbr2x+eJM -aqojlrmP7iTf+fnOqRvh+TjOKM78ImLyMXAhgn47VlB2dCxP8zFsSm5xIBHKwrIg -OcGKw6J7eMaM/1qH9C6SNvL9Xiy8ZBRru4CQrLk2vDWWkuzbTs1/MnNxI68ib3hi -zkxlpDs61IwNRY6Uu45s3l3aYgxsHKFkE5fp/RZbF5bxALCLw4imC66iWmU2uyY7 -8DZ9+XnPBqC1rrfQoxEdCeSOVAfWNUatQ1SOlE9emLzbcm9ABRFdoJkCggIAOis7 -4r6Wa3hj/qqHJ8tPeYxts8Ini7jm6UEhtmmMeo5OwGM2uAlQ7KUmDWY8PxAl5hfq -YPYNIGPkKrurlFzQQgZeYD9LL8V0wHBd5DGsfFoXchCcGUmmi0HSr8U8lEShTiHP -OXZz5rcGFaoyb3vHpwUMQ2T8Y3DwlPB4H5Lzj7tc7dljSdBkFQD9lge9rDwcxkTX -Ee5vQ+kMdKw+vA9pB8lXWEtzk6jni0nXFe3cFcIzoyr9Mwd5iwlqOZrhdQROVN5n -jMg8whcOay0qalhnwCTSTuHe2ZgpeWE38HTUzL+reP4MndIQYTM67jjLBTiyx6ca -jrdRwKEJSVj5F7BP4RlEx2PpIxppekdW+hH9zrWDj8uWvM5P+/injUcnoSEhhNbh -w4Ta6jjaEuHOnXrHLYmbDtMif64UhRDmsQMntfrYIhKIcEXTICyQ0DjjYdSs2kr9 -BvoGc3cxXCNQBxtbU3wM59u+WLzGbHXk02v757oUlm738/HZAgRGHokPdd3RZ/b1 -YE6QQ2jpu0kihxPh0gKV/p5JZxwgxIPnV/HnOg27yCMA+2dLiKXabgqD0vI6ajdo -bxoxKmkTV2J/MrXhDC0jaGZD2s9SmXuKnFh1s7PLQQTwBQtvklGPdxxlKrx5tZy/ -tE9DlhT80aMIvPGoRLZH3Nxbq05d96dHGZHkpEACIQC8Fl0Wzh/Y1jy9U5UJ42FA -M88DPQ0wvQWA0r0wXXWz6Q== ------END X9.42 DH PARAMETERS----- diff --git a/OMAF-Sample/server/nginx_conf/ssl/self.crt b/OMAF-Sample/server/nginx_conf/ssl/self.crt deleted file mode 100644 index 952a6390..00000000 --- a/OMAF-Sample/server/nginx_conf/ssl/self.crt +++ /dev/null @@ -1,33 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIJAJLADIL2CIo4MA0GCSqGSIb3DQEBCwUAMHUxCzAJBgNV -BAYTAkNOMQswCQYDVQQIDAJTSDERMA8GA1UEBwwIU2hhbmdoYWkxDjAMBgNVBAoM -BVppemh1MRowGAYDVQQLDBFEYXRhIENlbnRlciBHcm91cDEaMBgGA1UEAwwRSW50 -ZWwgQ29ycG9yYXRpb24wHhcNMTkxMDMxMDEyMTMwWhcNMTkxMTMwMDEyMTMwWjB1 -MQswCQYDVQQGEwJDTjELMAkGA1UECAwCU0gxETAPBgNVBAcMCFNoYW5naGFpMQ4w -DAYDVQQKDAVaaXpodTEaMBgGA1UECwwRRGF0YSBDZW50ZXIgR3JvdXAxGjAYBgNV -BAMMEUludGVsIENvcnBvcmF0aW9uMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAujhPV1Ynd1ZZP12cd16JhftMfiKzwNEG0q7oa5RaxuLKJY37LqeGYmJ7 -G7/hG/rmIRMq4uwo3tms6xxf1eAGxkVz/kIUdIEola0P65zZz+Ac2yusbajw9RS/ -d5wlpOalWMHXj3h6y/WR6znmoskdmwOEPcc0dOE3Ynp0p5I9ZWcpGsOsjnTGLT9+ -SPVNxWfOrBpQEuO5wizd/5EzFbOVZLHJQUcyr4Qsfl9zQNx8I2howPZYVOemBchw -D/14hMmgL6s3KdBhde2UzAN+z/bjviqqbb84j886Xr2zmVzIQhpvFfrDYZiU6nB6 -t6gmpdMAb90mdKOXmpG/ZGqjqb5LyNoTSnq0ngJfxpoKPEURiNc4b2nvxbFlEP8X -9SLCz3hF/pGWcq1AoH1CZqY2e4KcBAQYG8AIe940KkqdejFkFZI0Kzq5NRt3kK+P -u1ZVx9Ry8q7TSYm/mFO4N/+Ve79JMLH+8CqrVi6O8L7ACrET94BslKkgs9NgsPb5 -Kega6TF/Z6JXATh77vL7Zau1CrqvKOWh2dU38+Ggj/PNo5y6kss6r3DbMvkmp3+M -jZ6sIFiz1oGLiIqo4dSeJLT1aJq2udU0NSTEF/TH7Hh4cQYo4DMe9N6FESIRALoF -ME0nQj0HaZLUv7kOtgOq0IHUczH6eoYDfXlI8maCrsq0EEXy6iMCAwEAAaNQME4w -HQYDVR0OBBYEFJthCSMhXF3Ro/AFW388yhILBKf8MB8GA1UdIwQYMBaAFJthCSMh -XF3Ro/AFW388yhILBKf8MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIB -AC50/XRmFbnFIfH2dyr7GtRpefjg5FWCyVy3QykHAJfExeRj1e/6/DTHkiIHp03D -P+kjoQnZXVJr+u25vYM24g05j8yb5ylO0EmHB2xaG2XJs+6c+8szzUnh3qFIDR6J -hFdK/fuDH8gtah4UxZaTgQSteWH0pA+CSLWaWZlKKWaP11ciTBc5PIadxfjHxlvR -wQcAII4Mo1H/zCW3y8kZtaEb6O1KDD/t1as16Q9QPkI8LcaEVudVGLeCVFdS6mhv -o5rVn6DIurTmv0f9r8/aXAWkHIfnYyTLvso8rzkR41Nb3L7TEvlInfUaOyxYzmPY -xcp1rVDBD2ElnNf16U2kO+vOpHXRtaBduGJa4qQ7se1fxqQMkV8tEX3kaHxXnvA0 -289IOxi1iX5u/xvZD1eBpm032MHOxMb+xvNVqtm9yLQo6Q/zsmxaDd8KYvW1BCon -XC+PiD4CQNWzZzfbcPhZFEaCBT4vDFK+7CQ7eEzHHJVHnOkZ+kFsMmRZjqKi/yaJ -6Goeh+Dlwr9tnYDfrVGN6uZm8mxFpBVuO+OQ+YBJtzY73gzHxV/oDIvnH5KCFNP8 -HP3j/TXqcTfQl5njcRVwDoKEDpKfcofTCQOfeKAjW1wMljC3kIbUxP4JS/yDFgxm -6DdR0BUJBKt3xcTRtKVZlvKhJJ06LPTcwhNz9zADbYNH ------END CERTIFICATE----- diff --git a/OMAF-Sample/server/nginx_conf/ssl/self.key b/OMAF-Sample/server/nginx_conf/ssl/self.key deleted file mode 100644 index b4d0ca35..00000000 --- a/OMAF-Sample/server/nginx_conf/ssl/self.key +++ /dev/null @@ -1,52 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC6OE9XVid3Vlk/ -XZx3XomF+0x+IrPA0QbSruhrlFrG4soljfsup4ZiYnsbv+Eb+uYhEyri7Cje2azr -HF/V4AbGRXP+QhR0gSiVrQ/rnNnP4BzbK6xtqPD1FL93nCWk5qVYwdePeHrL9ZHr -OeaiyR2bA4Q9xzR04TdienSnkj1lZykaw6yOdMYtP35I9U3FZ86sGlAS47nCLN3/ -kTMVs5VksclBRzKvhCx+X3NA3HwjaGjA9lhU56YFyHAP/XiEyaAvqzcp0GF17ZTM -A37P9uO+KqptvziPzzpevbOZXMhCGm8V+sNhmJTqcHq3qCal0wBv3SZ0o5eakb9k -aqOpvkvI2hNKerSeAl/Gmgo8RRGI1zhvae/FsWUQ/xf1IsLPeEX+kZZyrUCgfUJm -pjZ7gpwEBBgbwAh73jQqSp16MWQVkjQrOrk1G3eQr4+7VlXH1HLyrtNJib+YU7g3 -/5V7v0kwsf7wKqtWLo7wvsAKsRP3gGyUqSCz02Cw9vkp6BrpMX9nolcBOHvu8vtl -q7UKuq8o5aHZ1Tfz4aCP882jnLqSyzqvcNsy+Sanf4yNnqwgWLPWgYuIiqjh1J4k -tPVomra51TQ1JMQX9MfseHhxBijgMx703oURIhEAugUwTSdCPQdpktS/uQ62A6rQ -gdRzMfp6hgN9eUjyZoKuyrQQRfLqIwIDAQABAoICAFdLglYMux+ESP4dYfXLSWiX -bzpKBj3DPxGQkL+toghN1Dlan381ZRfF1rQhdONq8Gpc0+PglSAypo7olDxscoCL -ml58l54t1dqkcCwhMTHze1N626/kXGBnQ4JMnpD41lYPOH+3GMl36tl8IRP6d3ht -Y5aaRYrs8MunX/wV39YjT++N/twg/a3Eb+y/oZ+ClC21gAf63N+brM3bQTlRIB0+ -AnwpZidFLJ5pHFwxf7gRf9nFIY9bsEZRucalKdQFuKPf/VIKr0VbBD/G+7F9qTFC -SvZ7h4ZSvKghAYpJJk9mmOCS1vhvUI+9/qkNmGBv02qdHyPadH0vCauHGgW+gUNu -UmBmztDODRaZjxYN+YaHvkdym7BFSVGFwgPWx6pFwd62JtuQs4EafCds2LIVbp9B -3nNPtuq7jzW2bB7immrnlESlUXAX8+ruV3uVs1o4U4AgcEkAa/9Pd0ZAQMC95LAY -p/wAYjf5+cMCnjnqFSt/WNs5NLSa/DBj5hZxq08dfe2jfv/4BQan1/LH0gVNgtTh -TUUSDh9t/++wvE3/G0ataHJGE30sfIZxIsJEfm9VhCnnahY85FapmZm+e6rNDk/F -IDbGacEFGFHqzGSE2P5tqrHA+Cn/ILM5UlXDuLxzz+/lhV3ycrvcEGloR3mgTqNf -DDT1aLTcVlgPFz4rkYYRAoIBAQD2awySEMBHWdXnHFPS8ZMbMB+QM7K+XOlUT4PU -yqdyskLF0H+CEfPMEJocWcaPv89ggMVQjjIkaRQkF0EJKE+GY5t7/2ZqlTCFEUlc -IyUzapP7TrhIqZK2fxAQBYdoarI6ZBMME2IpQT6DbVham5T94OSeeB0q1VWJDD5M -5iXKLgiHS4ZVGOeZnOO7o6rXmOkAb4c8vBLU4YeytNAIzCsKUo4yrC1Ga5pXQ7J8 -t7dDpUech+DNP8o6QA+BhymiZ/McHwSJwFNWEbGnZ4/nHluDicVtbrdyJtDwou0B -YK3b6Q85609FmdT3ClphdNBFgmdxrNSvWJOmE9ujnxb/dZtfAoIBAQDBdgW8YgHJ -o8qLyNSOD1LNG6JSM2AnIcNhn/tfTkI1Tl4V0m9MMBwEsf3LLG1ecoG629cK2VhB -EXzuIR5z7zMKAUovs3q32bPCJbOc+rkQRxywezS82lyDUfCo9WO6CLmE0fAhwhjz -NPTOvIkJsxWglztumkY7nItoq/0GGeiT7ETHYNh5QIq//aRap30fVPV7fExNOpZv -3Jl9PjwPLecn/Z1MEW4xCqe3f6QGmcSPXqhlfURSUAwrwX+cJTkuVAkj17Ybt/YG -vm/8uihKJHB3CzL6R3Bog32MTuNvYsjfEuOxYQCo6gCS7rXzK06qEjeYqq/ogaRJ -2OXfcUHY4eu9AoIBAEsqdkgr8esR3SzFOFCVPfFCKa6cBn8I6me8ZZNISfGzrKiY -o/xyK3Lo8YmKXQZGVSSzwvyCk4sFvo4PxdfcQv3Ubn5RdtU2Mvm0PzNqyRM5BHDZ -H1+S0lz2v1iTxy0ztrAXDLRXSU8aRs5iUVw6LyWOL06jhwokzxWh9XMRlR9qEVHr -2MsmcKcr80+uahOf4hsu6gq9t+TV8sWxX+//D3XznsqYOKtNcWQDE3P1umJZDVIT -JKCXP8Gq/s/dAfc3TDIcyA6Y6xLLDOzqqzutx+6E3QM3jZ+1dXCh0jfkXMRDAFED -lUe+1/aPYuhtcIZzc8h6Ftpx43/6eLodci90mXMCggEBALVXIuNFXvWagYZr1sne -lTfLemjg3XVw0YrWAjOA0NDUx55e5ZKd2fWQvhvPZilM3kvAPB9gQbikYt13z+rO -QaJUOSzjaNmiyDvpSSaRYh1e+wZ2RqXrbSJvEU3ezv8XpIZan2OVY/u9HOgYk7+y -zUf92mi/VawU/hw8cu1gJcFai9Hz9pYZqLV0VLXhH4jaA00f4wnVLCRovLtINdXo -qi+zK9mc0RRJL5Lyo1bIlhXpnaGcPQmHHRF6zFoaDjJd9T9SuNHcq1IA+koEWTW+ -WGlN5xfxFO3yqyF05P2ItE9djm67RXoP9j/EYGsyogcLEnAZNtCTjOLn2/mJWTap -9mUCggEBAKi8kqzITE07WlwzrZzBBO2/URJ9zSLhyY1dJBNojQFGEW0uTzWfiVe+ -uJtWzUWKJS1xI11xIXfFPWdUC0m+b29DlhLoXtaPptV30sT8wuMAhGlV0+ZhoE6V -PlHSuKQXbXJ0Igke4LizOaceVs58Tn5w0mXtzXXHNVwEXdPypcA1PG42HO/G2xLD -Dn7ZbnNw4ZfTR33IaaOLyTMAuVdIZYFu84orCtXKZiDn/kN9J8JnDhW+UZGr/vFN -ZQn5KScK/9Vj0ALnXcFFSOqfLdaGajZ5g/hwHmc76/DwexPScuVKfuOBR1lCu37Z -WbFMNzwnFBC+CKq2A+5b/1WaJcFcwz8= ------END PRIVATE KEY----- diff --git a/OMAF-Sample/server/run.sh b/OMAF-Sample/server/run.sh index 71f3d573..c999882f 100755 --- a/OMAF-Sample/server/run.sh +++ b/OMAF-Sample/server/run.sh @@ -1,21 +1,23 @@ #!/bin/bash -x -RES=$1 -TYPE=$2 +RES=${1:-"4K"} +TYPE=${2:-"LIVE"} +PROTOCOL=${3:-"HTTP"} IP=$(cat /etc/hosts | tail -n 1 | awk '{print $1}') parameters_usage(){ - echo 'Usage: 1. : [ 4k , 8k ]' + echo 'Usage: 1. : [ 4K , 8K ]' echo ' 2. : [ LIVE , VOD ]' + echo ' 3. : [ HTTP , HTTPS ]' } pkill -f WorkerServer -if [ "${RES}" = "-h" ] || [ $# != 2 ] ; then +if [ "${RES}" = "-h" ] ; then parameters_usage exit 0 fi -if [ "${RES}" != "4k" ] && [ "${RES}" != "8k" ] ; then +if [ "${RES}" != "4K" ] && [ "${RES}" != "8K" ] ; then parameters_usage exit 0 fi @@ -24,46 +26,122 @@ if [ "${TYPE}" != "LIVE" ] && [ "${TYPE}" != "VOD" ] ; then exit 0 fi -ffmpeg_4k_LIVE(){ - ./ffmpeg -re -stream_loop -1 -i $1 -input_type 1 -rc 1 -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 -config_file:0 config_high.txt -la_depth:0 0 -r:0 30 -g:0 15 -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -tile_row:1 2 -tile_column:1 4 -config_file:1 config_low.txt -la_depth:1 0 -r:1 30 -g:1 15 -b:1 5M -map 0:v -vframes 10000 -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 -base_url https://$2:443/LIVE4k/ -out_name Test /usr/local/nginx/html/LIVE4k/ -} +if [ "${PROTOCOL}" = "HTTPS" ] ; then + URLBASE="https://$2:443" +elif [ "${PROTOCOL}" = "HTTP" ] ; then + URLBASE="http://$2:8080" +fi -ffmpeg_4k_VOD(){ - numactl -c 1 ./ffmpeg -stream_loop -1 -i $1 -input_type 1 -rc 1 -r 30 -c:v:0 distributed_encoder -s:0 3840x1920 -g:0 15 -tile_row:0 6 -tile_column:0 10 -la_depth:0 0 -config_file:0 config_high.txt -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -sws_flags neighbor -g:1 15 -tile_row:1 1 -tile_column:1 4 -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v -vframes 500 -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -base_url https://$2:443/VOD4k/ -out_name Test /usr/local/nginx/html/VOD4k/ +ffmpeg_4K_LIVE(){ + ./ffmpeg -re -stream_loop -1 \ + -i $1 -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder \ + -s:0 3840x1920 \ + -tile_row:0 6 -tile_column:0 12 \ + -config_file:0 config_high.txt \ + -la_depth:0 0 -r:0 30 -g:0 15 \ + -b:0 30M -map 0:v \ + -c:v:1 distributed_encoder \ + -s:1 1024x640 -sws_flags neighbor \ + -tile_row:1 2 -tile_column:1 2 \ + -config_file:1 config_low.txt \ + -la_depth:1 0 -r:1 30 -g:1 15 \ + -b:1 5M -map 0:v -vframes 1000000 \ + -f omaf_packing \ + -is_live 1 -split_tile 1 -seg_duration 1 \ + -window_size 20 -extra_window_size 30 \ + -base_url ${URLBASE}/LIVE4K/ \ + -out_name Test /usr/local/nginx/html/LIVE4K/ } -ffmpeg_8k_LIVE(){ - numactl -c 1 ./ffmpeg -stream_loop -1 -re -i $1 -input_type 1 -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 -la_depth:0 0 -config_file:0 config_high.txt -b:0 50M -map 0:v -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -extractors_per_thread 4 -base_url https://$2:443/LIVE8k/ -out_name Test /usr/local/nginx/html/LIVE8k/ +ffmpeg_4K_VOD(){ + ./ffmpeg -stream_loop -1 \ + -i $1 -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder \ + -s:0 3840x1920 \ + -tile_row:0 6 -tile_column:0 12 \ + -config_file:0 config_high.txt \ + -la_depth:0 0 -r:0 30 -g:0 15 \ + -b:0 30M -map 0:v \ + -c:v:1 distributed_encoder \ + -s:1 1024x640 -sws_flags neighbor \ + -tile_row:1 2 -tile_column:1 2 \ + -config_file:1 config_low.txt \ + -la_depth:1 0 -r:1 30 -g:1 15 \ + -b:1 2M -map 0:v -vframes 500 \ + -f omaf_packing \ + -is_live 0 -split_tile 1 -seg_duration 1 \ + -base_url ${URLBASE}/VOD4K/ \ + -out_name Test /usr/local/nginx/html/VOD4K/ } -ffmpeg_8k_VOD(){ - numactl -c 1 ./ffmpeg -stream_loop -1 -i $1 -input_type 1 -rc 1 -r 25 -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 -la_depth:0 0 -config_file:0 config_high.txt -b:0 50M -map 0:v -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v -vframes 500 -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -base_url https://$2:443/VOD8k/ -out_name Test /usr/local/nginx/html/VOD8k/ +ffmpeg_8K_LIVE(){ + numactl -c 1 ./ffmpeg -re -stream_loop -1 \ + -i $1 -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder \ + -s:0 7680x3840 \ + -tile_row:0 6 -tile_column:0 12 \ + -config_file:0 config_high.txt \ + -la_depth:0 0 -r:0 25 -g:0 25 \ + -b:0 50M -map 0:v \ + -c:v:1 distributed_encoder \ + -s:1 1280x1280 -sws_flags neighbor \ + -tile_row:1 2 -tile_column:1 2 \ + -config_file:1 config_low.txt \ + -la_depth:1 0 -r:1 25 -g:1 25 \ + -b:1 2M -map 0:v -vframes 1000000 \ + -f omaf_packing \ + -is_live 1 -split_tile 1 -seg_duration 1 \ + -extractors_per_thread 4 \ + -base_url ${URLBASE}/LIVE8K/ \ + -out_name Test /usr/local/nginx/html/LIVE8K/ } -/usr/local/nginx/sbin/nginx +ffmpeg_8K_VOD(){ + numactl -c 1 ./ffmpeg -stream_loop -1 \ + -i $1 -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder \ + -s:0 7680x3840 \ + -tile_row:0 6 -tile_column:0 12 \ + -config_file:0 config_high.txt \ + -la_depth:0 0 -r:0 25 -g:0 25 \ + -b:0 50M -map 0:v \ + -c:v:1 distributed_encoder \ + -s:1 1280x1280 -sws_flags neighbor \ + -tile_row:1 2 -tile_column:1 2 \ + -config_file:1 config_low.txt \ + -la_depth:1 0 -r:1 25 -g:1 25 \ + -b:1 2M -map 0:v -vframes 500 \ + -f omaf_packing \ + -is_live 0 -split_tile 1 -seg_duration 1 \ + -base_url ${URLBASE}/VOD8K/ \ + -out_name Test /usr/local/nginx/html/VOD8K/ +} export LD_LIBRARY_PATH=/usr/local/lib/:/usr/local/lib64:$LD_LIBRARY_PATH -if [ "${RES}" = "4k" ] ; then +if [ "${RES}" = "4K" ] ; then VIDEO="test1_h265_3840x2048_30fps_30M_200frames.mp4" - echo "ip 127.0.0.1 port 9089" > config_low.txt - echo "ip 127.0.0.1 port 9090" > config_high.txt + echo "ip local port 9090" > config_high.txt + echo "ip local port 9089" > config_low.txt if [ "${TYPE}" = "LIVE" ] ; then - ffmpeg_4k_LIVE ${VIDEO} ${IP} + ffmpeg_4K_LIVE ${VIDEO} ${IP} else - ffmpeg_4k_VOD ${VIDEO} ${IP} + ffmpeg_4K_VOD ${VIDEO} ${IP} fi + else - VIDEO="test1_h265_8k_25fps_60M_100frames.mp4" - echo "ip 127.0.0.1 port 9089 numa 1" > config_low.txt - echo "ip 127.0.0.1 port 9090 numa 2" > config_high.txt + VIDEO="test1_h264_8k_25fps_30M_250frames.mp4" + echo "ip local port 9090 numa 1" > config_high.txt + echo "ip local port 9089 numa 2" > config_low.txt if [ "${TYPE}" = "LIVE" ] ; then - ffmpeg_8k_LIVE ${VIDEO} ${IP} + ffmpeg_8K_LIVE ${VIDEO} ${IP} else - ffmpeg_8k_VOD ${VIDEO} ${IP} + ffmpeg_8K_VOD ${VIDEO} ${IP} fi + fi diff --git a/OMAF-Sample/server/service.yaml b/OMAF-Sample/server/service.yaml new file mode 100644 index 00000000..e6aff7a7 --- /dev/null +++ b/OMAF-Sample/server/service.yaml @@ -0,0 +1,22 @@ +apiVersion: v1 +kind: Service +metadata: + name: immersive-service + labels: + app: immersive-server +spec: + type: NodePort + ports: + - port: 443 + protocol: TCP + targetPort: 443 + nodePort: 30001 + name: https + - port: 8080 + targetPort: 8080 + nodePort: 30002 + name: http + externalIPs: + - NODEIP + selector: + app: immersive-server diff --git a/OMAF-Sample/server/start.sh b/OMAF-Sample/server/start.sh new file mode 100755 index 00000000..61ded147 --- /dev/null +++ b/OMAF-Sample/server/start.sh @@ -0,0 +1,12 @@ +#!/bin/bash -ex + +DIR=$(dirname $(readlink -f "$0")) +HOSTNAME=$(kubectl get nodes | grep -v master | awk 'NR == 1 {next} {print $1}') +NODEIP=$(kubectl get node -o wide | \ + grep ${HOSTNAME} | \ + grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b") + +for YAML in $(find "${DIR}" -maxdepth 1 -name "*.yaml" -print); do + sed -i "s/NODEIP/${NODEIP}/g" ${YAML} + kubectl apply -f "${YAML}" +done diff --git a/OMAF-Sample/server/stop.sh b/OMAF-Sample/server/stop.sh new file mode 100755 index 00000000..eb1d4d65 --- /dev/null +++ b/OMAF-Sample/server/stop.sh @@ -0,0 +1,7 @@ +#!/bin/bash -e + +DIR=$(dirname $(readlink -f "$0")) + +for YAML in $(find "${DIR}" -maxdepth 1 -name "*.yaml" -print); do + kubectl delete --wait=false -f "${YAML}" --ingore-not-found=true 2>/dev/null +done diff --git a/README.md b/README.md index 1a2dbcae..e98974a5 100755 --- a/README.md +++ b/README.md @@ -1,15 +1,14 @@ + + # Open Visual Cloud Immersive Video Samples [![Travis Build Status](https://travis-ci.com/OpenVisualCloud/Immersive-Video-Sample.svg?branch=master)](https://travis-ci.com/OpenVisualCloud/Immersive-Video-Sample) -[![Stable release](https://img.shields.io/badge/latest_release-v1.0-green.svg)](https://github.com/OpenVisualCloud/Immersive-Video-Sample/releases/tag/v1.0.0) +[![Stable release](https://img.shields.io/badge/latest_release-v1.4.0-green.svg)](https://github.com/OpenVisualCloud/Immersive-Video-Sample/releases/latest) [![Contributions](https://img.shields.io/badge/contributions-welcome-blue.svg)](https://github.com/OpenVisualCloud/Immersive-Video-Sample/wiki) -This Immersive Video project includes 2 samples which are based on different streaming frameworks. - -OMAF sample is based on OMAF standard, uses MPEG DASH as the protocol to deliver tiled 360 video stream, and it can support both VOD and live streaming mode. - - WebRTC sample enables tiled 360 video streaming based on WebRTC protocol and Open WebRTC Toolkit media server framework for low-latency streaming. - - Both of the samples use SVT-HEVC with MCTS supported to do 360 video tiled encoding and achieve real-time performance for 4K and 8K contents. +This Immersive Video project includes 2 samples which are based on different streaming frameworks. +OMAF sample is based on OMAF standard, uses MPEG DASH as the protocol to deliver tiled 360 video stream, and it can support both VOD and live streaming mode. +WebRTC sample enables tiled 360 video streaming based on WebRTC protocol and Open WebRTC Toolkit media server framework for low-latency streaming. +Both of the samples use SVT-HEVC with MCTS supported to do 360 video tiled encoding and achieve real-time performance for 4K and 8K contents. # What's in this project The Immersive Video project contains below components: @@ -24,7 +23,7 @@ The Immersive Video samples may run on Linux* 64 bit operating systems. The list - **Server**: CentOS 7.6 # License -OMAF sample is licensed under OSI-approved BSD 3-Clause license and LGPLv2.0 license for different components, see [OMAF-Sample LICENSE](src/LICENSE). +OMAF sample is licensed under the BSD 3-Clause "New" or "Revised" License, except that "FFMPEG Plugins" is under the LGPLv2.0 license, see [OMAF-Sample LICENSE](src/LICENSE). WebRTC sample is licensed under Apache License 2.0, see [WebRTC-Sample LICENSE](WebRTC-Sample/owt-server/LICENSE). @@ -32,4 +31,4 @@ WebRTC sample is licensed under Apache License 2.0, see [WebRTC-Sample LICENSE]( See [CONTRIBUTING](CONTRIBUTING.md) for details. Thank you! # How to report bugs -Use the [Issues](https://github.com/OpenVisualCloud/Immersive-Video-Sample/issues) tab on GitHub. \ No newline at end of file +Use the [Issues](https://github.com/OpenVisualCloud/Immersive-Video-Sample/issues) tab on GitHub. diff --git a/Sample-Videos/.gitkeep b/Sample-Videos/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/Sample-Videos/test1_h265_8k_25fps_60M_100frames.mp4 b/Sample-Videos/test1_h264_8k_25fps_30M_250frames.mp4 similarity index 65% rename from Sample-Videos/test1_h265_8k_25fps_60M_100frames.mp4 rename to Sample-Videos/test1_h264_8k_25fps_30M_250frames.mp4 index 287a55f6..75b43723 100644 Binary files a/Sample-Videos/test1_h265_8k_25fps_60M_100frames.mp4 and b/Sample-Videos/test1_h264_8k_25fps_30M_250frames.mp4 differ diff --git a/Sample-Videos/test1_h265_8k_30fps_60M_100frames.mp4 b/Sample-Videos/test1_h265_8k_30fps_60M_100frames.mp4 new file mode 100644 index 00000000..12564605 Binary files /dev/null and b/Sample-Videos/test1_h265_8k_30fps_60M_100frames.mp4 differ diff --git a/WebRTC-Sample/README.md b/WebRTC-Sample/README.md index 55db67b8..0d7fc91b 100755 --- a/WebRTC-Sample/README.md +++ b/WebRTC-Sample/README.md @@ -65,7 +65,7 @@ make Use the following commands to start or stop services via docker compose: ```bash -cd WebRTC-Sample/owt-server +cd WebRTC-Sample/owt-server/build # start 4k service make start_owt_immersive_4k @@ -74,7 +74,7 @@ make start_owt_immersive_4k make start_owt_immersive_8k # stop service -make start +make stop ``` # OWT-LINUX-PLAYER @@ -85,6 +85,7 @@ The OWT-LINIX-PLAYER is an immersive 360 video player on linux, with WebRTC back - __Linux* Operating Systems (64-bit):__ - Ubuntu 18.04 LTS + - Ubuntu 20.04 LTS ## Build @@ -114,12 +115,15 @@ vi config.xml >*2 is WebRTC source type* - "resolution": 4k >*Supported 4k or 8k resolution* -- "server_url": http://owt-server-ip:3001 +- "server_url": http://**owt-server-ip**:3001 ### Start player ```bash cd WebRTC-Sample/owt-linux-player/player - -./render # Press "s" key to start playing +source ./setupvars.sh +./render ``` +- **Press "s" key to start playing** +- Press "arrow" keys to change viewport +- Press "Esc" key to exit diff --git a/WebRTC-Sample/owt-linux-player/build_player.sh b/WebRTC-Sample/owt-linux-player/build_player.sh index e539654b..5d8d6ddf 100755 --- a/WebRTC-Sample/owt-linux-player/build_player.sh +++ b/WebRTC-Sample/owt-linux-player/build_player.sh @@ -1,26 +1,41 @@ -#!/bin/bash +#!/bin/bash -ex ROOT=`pwd` WEBRTC_LINUX_CLIENT_SDK=${ROOT}/webrtc_linux_client_sdk/release PLAYER_BUILD=${ROOT}/player/ -# configure -PLAYER_SRC=${ROOT}/../../src/player - -mkdir -p ${PLAYER_BUILD}/Build -cd ${PLAYER_BUILD}/Build +# player lib configure +PLAYER_LIB_SRC=${ROOT}/../../src/player/player_lib +PLAYER_LIB_BUILD=${ROOT}/player/player_lib_build +mkdir -p ${PLAYER_LIB_BUILD} +cd ${PLAYER_LIB_BUILD} +cmake -DLINUX_OS=ON -DUSE_OMAF=OFF -DUSE_WEBRTC=ON -DWEBRTC_LINUX_SDK=${WEBRTC_LINUX_CLIENT_SDK} ${PLAYER_LIB_SRC} +make -j -cmake -DUSE_OMAF=OFF -DUSE_WEBRTC=ON -DWEBRTC_LINUX_SDK=${WEBRTC_LINUX_CLIENT_SDK} ${PLAYER_SRC} +if [ -f ${WEBRTC_LINUX_CLIENT_SDK}/lib/libMediaPlayer.so ]; then + rm -v ${WEBRTC_LINUX_CLIENT_SDK}/lib/libMediaPlayer.so +fi +cp -v ${PLAYER_LIB_BUILD}/libMediaPlayer.so ${WEBRTC_LINUX_CLIENT_SDK}/lib/ + +# player app configure +PLAYER_APP_SRC=${ROOT}/../../src/player/app +PLAYER_APP_BUILD=${ROOT}/player/player_app_build +mkdir -p ${PLAYER_APP_BUILD}/ +cd ${PLAYER_APP_BUILD}/ +cmake -DUSE_OMAF=OFF -DUSE_WEBRTC=ON -DWEBRTC_LINUX_SDK=${WEBRTC_LINUX_CLIENT_SDK} ${PLAYER_APP_SRC} make -j rm -fv ${PLAYER_BUILD}/render -cp -v ${PLAYER_BUILD}/Build/render ${PLAYER_BUILD}/render +cp -v ${PLAYER_APP_BUILD}/render ${PLAYER_BUILD}/render if [ ! -f ${PLAYER_BUILD}/config.xml ]; then - cp -v ${PLAYER_SRC}/config.xml ${PLAYER_BUILD}/config.xml + cp -v ${PLAYER_APP_SRC}/linux/config.xml ${PLAYER_BUILD}/config.xml sed 's|.*|2|g' -i ${PLAYER_BUILD}/config.xml sed 's|.*|4k|g' -i ${PLAYER_BUILD}/config.xml sed 's|.*|http://owt-server-ip:3001|g' -i ${PLAYER_BUILD}/config.xml fi +if [ ! -f ${PLAYER_BUILD}/setupvars.sh ]; then + echo "export LD_LIBRARY_PATH=${WEBRTC_LINUX_CLIENT_SDK}/lib:\$LD_LIBRARY_PATH" > ${PLAYER_BUILD}/setupvars.sh +fi diff --git a/WebRTC-Sample/owt-linux-player/build_webrtc_linux_client_sdk.sh b/WebRTC-Sample/owt-linux-player/build_webrtc_linux_client_sdk.sh index 402bf6de..a0da91e3 100755 --- a/WebRTC-Sample/owt-linux-player/build_webrtc_linux_client_sdk.sh +++ b/WebRTC-Sample/owt-linux-player/build_webrtc_linux_client_sdk.sh @@ -1,16 +1,31 @@ -#!/bin/bash +#!/bin/bash -e + +SUDO="" +if [[ $EUID -ne 0 ]]; then + SUDO="sudo -E" +fi ROOT=`pwd`/webrtc_linux_client_sdk BUILD=${ROOT}/Build PREFIX=${ROOT}/release DEPS=${BUILD}/deps +PATCHES=${ROOT}/../patches install_dependencies() { - sudo -E apt-get update - sudo -E apt install -y git build-essential wget python cmake pkg-config libglib2.0-dev libgtk-3-dev libasound2-dev libpulse-dev + ${SUDO} apt-get update + ${SUDO} apt install -y git build-essential wget python cmake pkg-config libglib2.0-dev libgtk-3-dev libasound2-dev libpulse-dev # player - sudo -E apt install -y yasm libgoogle-glog-dev libva-dev libglm-dev libglfw3-dev libgles2-mesa-dev libglu1-mesa-dev liblzma-dev + ${SUDO} apt install -y yasm libgoogle-glog-dev libva-dev libglm-dev libglfw3-dev libgles2-mesa-dev libglu1-mesa-dev liblzma-dev + + # set git user + if [ -z "`git config --get user.email`" ]; then + git config --global user.email "you@example.com" + fi + + if [ -z "`git config --get user.name`" ]; then + git config --global user.name "Your Name" + fi } install_openssl() { @@ -22,7 +37,7 @@ install_openssl() { tar -zxvf openssl-1.1.0l.tar.gz cd openssl-1.1.0l - ./config no-shared -m64 --prefix=${PREFIX} --openssldir=${PREFIX} + ./config shared -m64 --prefix=${PREFIX} --openssldir=${PREFIX} make -j make install } @@ -37,7 +52,7 @@ install_boost() { cd boost_1_67_0 ./bootstrap.sh - ./b2 -j`nproc` variant=release link=static runtime-link=shared --with-system --with-random --with-date_time --with-regex --with-thread --with-filesystem --with-chrono --with-atomic + ./b2 -j`nproc` variant=release link=shared runtime-link=shared --with-system --with-random --with-date_time --with-regex --with-thread --with-filesystem --with-chrono --with-atomic } install_socket_io_client() { @@ -46,17 +61,22 @@ install_socket_io_client() { rm -rf socket.io-client-cpp git clone --recurse-submodules https://github.com/socketio/socket.io-client-cpp.git - cd socket.io-client-cpp/lib/websocketpp + cd socket.io-client-cpp + git reset --hard 6063cb1d612f6ca0232d4134a018053fb8faea20 + + cd lib/websocketpp git pull origin master + git reset --hard 1b11fd301531e6df35a6107c1e8665b1e77a2d8e + cd ../.. mkdir -p build cd build - cmake -DBOOST_ROOT:STRING=${DEPS}/boost_1_67_0 -DOPENSSL_ROOT_DIR:STRING=${PREFIX} ../ + cmake -DCMAKE_C_FLAGS="-fPIC" -DCMAKE_CXX_FLAGS="-fPIC" -DBOOST_ROOT:STRING=${DEPS}/boost_1_67_0 -DOPENSSL_ROOT_DIR:STRING=${PREFIX} ../ make -j make install - cp -v lib/Release/* ${PREFIX}/lib + cp -v lib/Release/libsioclient* ${PREFIX}/lib cp -v include/* ${PREFIX}/include } @@ -92,12 +112,18 @@ install_owt_client_native () { cd owt-client-native gen_gclient + rm -fr src git clone https://github.com/open-webrtc-toolkit/owt-client-native.git src cd src git checkout 2a9d948b59502559843d63775a395affb10cb128 + sed -i 's/2fa91a1fc71b324ab46483777d7e6da90c57d3c6/28f5c7fd13db33267dcd7ad18851e9750c59d69a/g' DEPS gclient sync --no-history + cd third_party/webrtc + patch -p1 < ${PATCHES}/webrtc-Implement-FOV-RTCP-feedback.patch + cd - + patch -p1 < ${PATCHES}/sdk-Implement-FOV-RTCP-feedback.patch sed -i 's/rtc_use_h264=true/rtc_use_h264=false/g' scripts/build_linux.py python scripts/build_linux.py --gn_gen --sdk --arch x64 --ssl_root ${PREFIX} --scheme release --output_path "out" @@ -118,11 +144,30 @@ install_ffmpeg(){ rm -fr ${DIR} tar xf ${SRC} cd ${DIR} - ./configure --prefix=${PREFIX} --disable-shared --enable-static --disable-vaapi + ./configure --prefix=${PREFIX} --enable-shared --disable-static --disable-vaapi make -j make install } +install_safestringlib(){ + cd ${BUILD} + + rm -fr safestringlib + git clone https://github.com/intel/safestringlib.git + + cd safestringlib + git checkout 245c4b8cff1d2e7338b7f3a82828fc8e72b29549 + + mkdir build + cd build + cmake .. + make -j + + cp -v libsafestring_shared.so ${PREFIX}/lib/ + mkdir -p ${PREFIX}/include/safestringlib + cp -rfv ../include/* ${PREFIX}/include/safestringlib/ +} + install_360scvp(){ cd ${BUILD} @@ -130,6 +175,10 @@ install_360scvp(){ mkdir 360scvp cd 360scvp + + sed -i "s@INCLUDE_DIRECTORIES\(.*\)@INCLUDE_DIRECTORIES\1\nINCLUDE_DIRECTORIES(${PREFIX}/include)@" ${BUILD}/../../../../src/360SCVP/CMakeLists.txt + sed -i "s@LINK_DIRECTORIES\(.*\)@LINK_DIRECTORIES\1\nLINK_DIRECTORIES(${PREFIX}/lib)@" ${BUILD}/../../../../src/360SCVP/CMakeLists.txt + cmake -DCMAKE_INSTALL_PREFIX=${PREFIX} ${BUILD}/../../../../src/360SCVP/ make -j make install @@ -146,4 +195,5 @@ install_owt_client_native # player install_ffmpeg +install_safestringlib install_360scvp diff --git a/WebRTC-Sample/owt-linux-player/patches/sdk-Implement-FOV-RTCP-feedback.patch b/WebRTC-Sample/owt-linux-player/patches/sdk-Implement-FOV-RTCP-feedback.patch new file mode 100644 index 00000000..0ddd2f6c --- /dev/null +++ b/WebRTC-Sample/owt-linux-player/patches/sdk-Implement-FOV-RTCP-feedback.patch @@ -0,0 +1,78 @@ +From 2dbf35985d047749b0396d960864f21240d1e2b0 Mon Sep 17 00:00:00 2001 +From: Jianhui Dai +Date: Wed, 30 Sep 2020 13:08:08 +0800 +Subject: [PATCH] Implement FOV RTCP feedback + +--- + .../sdk/base/customizedvideodecoderproxy.cc | 21 ++++++++++++++++++- + .../cpp/owt/base/videodecoderinterface.h | 10 +++++++++ + 2 files changed, 30 insertions(+), 1 deletion(-) + +diff --git a/talk/owt/sdk/base/customizedvideodecoderproxy.cc b/talk/owt/sdk/base/customizedvideodecoderproxy.cc +index 6fef4ee..f2ba857 100644 +--- a/talk/owt/sdk/base/customizedvideodecoderproxy.cc ++++ b/talk/owt/sdk/base/customizedvideodecoderproxy.cc +@@ -9,7 +9,9 @@ namespace base { + + CustomizedVideoDecoderProxy::CustomizedVideoDecoderProxy(VideoCodecType type, + VideoDecoderInterface* external_video_decoder) +- : codec_type_(type), decoded_image_callback_(nullptr), external_decoder_(external_video_decoder) {} ++ : codec_type_(type), decoded_image_callback_(nullptr), external_decoder_(external_video_decoder) { ++ external_decoder_->SetCustomizedVideoDecoderProxy(this); ++ } + + CustomizedVideoDecoderProxy::~CustomizedVideoDecoderProxy() { + if (external_decoder_) { +@@ -88,5 +90,22 @@ CustomizedVideoDecoderProxy::Create(VideoCodecType type, + VideoDecoderInterface* external_video_decoder) { + return absl::make_unique(type, external_video_decoder); + } ++ ++void VideoDecoderInterface::SetCustomizedVideoDecoderProxy(void *customized_video_decoder_proxy) ++{ ++ customized_video_decoder_proxy_ = customized_video_decoder_proxy; ++} ++ ++void *VideoDecoderInterface::GetCustomizedVideoDecoderProxy(void) ++{ ++ return customized_video_decoder_proxy_; ++} ++ ++void VideoDecoderInterface::SendFOVFeedback(uint16_t yaw, uint16_t pitch) ++{ ++ if (customized_video_decoder_proxy_) ++ static_cast(customized_video_decoder_proxy_)->SendFOVFeedback(yaw, pitch); ++} ++ + } + } +diff --git a/talk/owt/sdk/include/cpp/owt/base/videodecoderinterface.h b/talk/owt/sdk/include/cpp/owt/base/videodecoderinterface.h +index dced0fe..02cc35b 100644 +--- a/talk/owt/sdk/include/cpp/owt/base/videodecoderinterface.h ++++ b/talk/owt/sdk/include/cpp/owt/base/videodecoderinterface.h +@@ -26,6 +26,8 @@ struct VideoEncodedFrame { + */ + class VideoDecoderInterface { + public: ++ VideoDecoderInterface() : customized_video_decoder_proxy_(nullptr) {} ++ + /** + @brief Destructor + */ +@@ -51,6 +53,14 @@ class VideoDecoderInterface { + @brief This function generates the customized decoder for each peer connection + */ + virtual VideoDecoderInterface* Copy() = 0; ++ ++ void SetCustomizedVideoDecoderProxy(void *customized_video_decoder_proxy); ++ void *GetCustomizedVideoDecoderProxy(void); ++ ++ void SendFOVFeedback(uint16_t yaw, uint16_t pitch); ++ ++ private: ++ void *customized_video_decoder_proxy_; + }; + } + } +-- +2.17.1 diff --git a/WebRTC-Sample/owt-linux-player/patches/webrtc-Implement-FOV-RTCP-feedback.patch b/WebRTC-Sample/owt-linux-player/patches/webrtc-Implement-FOV-RTCP-feedback.patch new file mode 100644 index 00000000..0cdc5fd9 --- /dev/null +++ b/WebRTC-Sample/owt-linux-player/patches/webrtc-Implement-FOV-RTCP-feedback.patch @@ -0,0 +1,535 @@ +From e305332b8a8b7909fa677a18b2b843b5a2d7e145 Mon Sep 17 00:00:00 2001 +From: Jianhui Dai +Date: Wed, 30 Sep 2020 13:05:40 +0800 +Subject: [PATCH] Implement FOV RTCP feedback + +--- + api/video_codecs/video_decoder.cc | 9 ++ + api/video_codecs/video_decoder.h | 8 ++ + call/video_receive_stream.h | 2 + + modules/rtp_rtcp/BUILD.gn | 2 + + modules/rtp_rtcp/include/rtp_rtcp.h | 3 + + modules/rtp_rtcp/include/rtp_rtcp_defines.h | 3 +- + .../source/rtcp_packet/fov_feedback.cc | 101 ++++++++++++++++++ + .../source/rtcp_packet/fov_feedback.h | 52 +++++++++ + modules/rtp_rtcp/source/rtcp_sender.cc | 15 +++ + modules/rtp_rtcp/source/rtcp_sender.h | 7 ++ + modules/rtp_rtcp/source/rtp_rtcp_impl.cc | 9 ++ + modules/rtp_rtcp/source/rtp_rtcp_impl.h | 3 + + modules/video_coding/video_receiver2.cc | 5 +- + modules/video_coding/video_receiver2.h | 5 +- + video/rtp_video_stream_receiver.cc | 4 + + video/rtp_video_stream_receiver.h | 2 + + video/video_receive_stream.cc | 6 +- + video/video_receive_stream.h | 2 + + 18 files changed, 234 insertions(+), 4 deletions(-) + create mode 100644 modules/rtp_rtcp/source/rtcp_packet/fov_feedback.cc + create mode 100644 modules/rtp_rtcp/source/rtcp_packet/fov_feedback.h + +diff --git a/api/video_codecs/video_decoder.cc b/api/video_codecs/video_decoder.cc +index b181323911..04001072dc 100644 +--- a/api/video_codecs/video_decoder.cc ++++ b/api/video_codecs/video_decoder.cc +@@ -9,6 +9,7 @@ + */ + + #include "api/video_codecs/video_decoder.h" ++#include "call/video_receive_stream.h" + + namespace webrtc { + +@@ -32,4 +33,12 @@ const char* VideoDecoder::ImplementationName() const { + return "unknown"; + } + ++void VideoDecoder::SetVideoStreamReceiver(void *video_stream_receiver) { ++ video_stream_receiver_ = video_stream_receiver; ++} ++ ++void VideoDecoder::SendFOVFeedback(uint16_t yaw, uint16_t pitch) { ++ static_cast(video_stream_receiver_)->SendFOVFeedback(yaw,pitch); ++} ++ + } // namespace webrtc +diff --git a/api/video_codecs/video_decoder.h b/api/video_codecs/video_decoder.h +index 266d653693..51a2e81ce1 100644 +--- a/api/video_codecs/video_decoder.h ++++ b/api/video_codecs/video_decoder.h +@@ -42,6 +42,8 @@ class RTC_EXPORT DecodedImageCallback { + + class RTC_EXPORT VideoDecoder { + public: ++ VideoDecoder():video_stream_receiver_(nullptr) {} ++ + virtual ~VideoDecoder() {} + + virtual int32_t InitDecode(const VideoCodec* codec_settings, +@@ -62,6 +64,12 @@ class RTC_EXPORT VideoDecoder { + virtual bool PrefersLateDecoding() const; + + virtual const char* ImplementationName() const; ++ ++ virtual void SetVideoStreamReceiver(void *video_stream_receiver); ++ virtual void SendFOVFeedback(uint16_t yaw, uint16_t pitch); ++ ++ private: ++ void *video_stream_receiver_; + }; + + } // namespace webrtc +diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h +index 6e087383ba..ce436a91c8 100644 +--- a/call/video_receive_stream.h ++++ b/call/video_receive_stream.h +@@ -277,6 +277,8 @@ class VideoReceiveStream { + virtual void SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) = 0; + ++ virtual void SendFOVFeedback(uint16_t yaw, uint16_t pitch) {} ++ + protected: + virtual ~VideoReceiveStream() {} + }; +diff --git a/modules/rtp_rtcp/BUILD.gn b/modules/rtp_rtcp/BUILD.gn +index b74c17788f..3cb5cdaf45 100644 +--- a/modules/rtp_rtcp/BUILD.gn ++++ b/modules/rtp_rtcp/BUILD.gn +@@ -27,6 +27,7 @@ rtc_source_set("rtp_rtcp_format") { + "source/rtcp_packet/extended_jitter_report.h", + "source/rtcp_packet/extended_reports.h", + "source/rtcp_packet/fir.h", ++ "source/rtcp_packet/fov_feedback.h", + "source/rtcp_packet/loss_notification.h", + "source/rtcp_packet/nack.h", + "source/rtcp_packet/pli.h", +@@ -65,6 +66,7 @@ rtc_source_set("rtp_rtcp_format") { + "source/rtcp_packet/extended_jitter_report.cc", + "source/rtcp_packet/extended_reports.cc", + "source/rtcp_packet/fir.cc", ++ "source/rtcp_packet/fov_feedback.cc", + "source/rtcp_packet/loss_notification.cc", + "source/rtcp_packet/nack.cc", + "source/rtcp_packet/pli.cc", +diff --git a/modules/rtp_rtcp/include/rtp_rtcp.h b/modules/rtp_rtcp/include/rtp_rtcp.h +index 7682b4a628..c4673f2a05 100644 +--- a/modules/rtp_rtcp/include/rtp_rtcp.h ++++ b/modules/rtp_rtcp/include/rtp_rtcp.h +@@ -429,6 +429,9 @@ class RtpRtcp : public Module, public RtcpFeedbackSenderInterface { + uint16_t last_received_seq_num, + bool decodability_flag, + bool buffering_allowed) = 0; ++ ++ virtual void SendFOVFeedback(uint16_t yaw, ++ uint16_t pitch) = 0; + }; + + } // namespace webrtc +diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h +index db6f53ca71..dbc1663db9 100644 +--- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h ++++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h +@@ -97,7 +97,8 @@ enum RTCPPacketType : uint32_t { + kRtcpXrReceiverReferenceTime = 0x40000, + kRtcpXrDlrrReportBlock = 0x80000, + kRtcpTransportFeedback = 0x100000, +- kRtcpXrTargetBitrate = 0x200000 ++ kRtcpXrTargetBitrate = 0x200000, ++ kRtcpFOVFeedback = 0x400000 + }; + + enum RtxMode { +diff --git a/modules/rtp_rtcp/source/rtcp_packet/fov_feedback.cc b/modules/rtp_rtcp/source/rtcp_packet/fov_feedback.cc +new file mode 100644 +index 0000000000..7c2f9417e1 +--- /dev/null ++++ b/modules/rtp_rtcp/source/rtcp_packet/fov_feedback.cc +@@ -0,0 +1,101 @@ ++/* ++ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. ++ * ++ * Use of this source code is governed by a BSD-style license ++ * that can be found in the LICENSE file in the root of the source ++ * tree. An additional intellectual property rights grant can be found ++ * in the file PATENTS. All contributing project authors may ++ * be found in the AUTHORS file in the root of the source tree. ++ */ ++ ++#include "modules/rtp_rtcp/source/rtcp_packet/fov_feedback.h" ++ ++#include "modules/rtp_rtcp/source/byte_io.h" ++#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" ++#include "rtc_base/checks.h" ++#include "rtc_base/logging.h" ++ ++namespace webrtc { ++namespace rtcp { ++constexpr uint8_t FOVFeedback::kFeedbackMessageType; ++// RFC 4585: Feedback format. ++// Common packet format: ++// ++// 0 1 2 3 ++// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++// |V=2|P| FMT | PT | length | ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++// | SSRC of packet sender | ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++// | SSRC of media source | ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++// : Feedback Control Information (FCI) : ++// : : ++// FOV Feedback (RFC xxxx). ++// The Feedback Control Information (FCI) for the FOV Feedback ++// FCI: ++// 0 1 2 3 ++// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++// | Seq nr. | yaw | ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++// | pitch | Reserved = 0 | ++// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ++ ++FOVFeedback::FOVFeedback() = default; ++ ++FOVFeedback::FOVFeedback(const FOVFeedback& FOVFeedback) = default; ++ ++FOVFeedback::~FOVFeedback() = default; ++ ++bool FOVFeedback::Parse(const CommonHeader& packet) { ++ RTC_DCHECK_EQ(packet.type(), kPacketType); ++ RTC_DCHECK_EQ(packet.fmt(), kFeedbackMessageType); ++ ++ if ((packet.payload_size_bytes() - kCommonFeedbackLength) != kFciLength) { ++ RTC_LOG(LS_WARNING) << "Invalid size for a valid FOVFeedback packet."; ++ return false; ++ } ++ ++ ParseCommonFeedback(packet.payload()); ++ ++ const uint8_t* next_fci = packet.payload() + kCommonFeedbackLength; ++ seq_nr_ = ByteReader::ReadBigEndian(next_fci); ++ yaw_ = ByteReader::ReadBigEndian(next_fci + 2); ++ pitch_ = ByteReader::ReadBigEndian(next_fci + 4); ++ return true; ++} ++ ++size_t FOVFeedback::BlockLength() const { ++ return kHeaderLength + kCommonFeedbackLength + kFciLength; ++} ++ ++bool FOVFeedback::Create(uint8_t* packet, ++ size_t* index, ++ size_t max_length, ++ PacketReadyCallback callback) const { ++ while (*index + BlockLength() > max_length) { ++ if (!OnBufferFull(packet, index, callback)) ++ return false; ++ } ++ size_t index_end = *index + BlockLength(); ++ CreateHeader(kFeedbackMessageType, kPacketType, HeaderLength(), packet, ++ index); ++ RTC_DCHECK_EQ(Psfb::media_ssrc(), 0); ++ CreateCommonFeedback(packet + *index); ++ *index += kCommonFeedbackLength; ++ ++ constexpr uint32_t kReserved = 0; ++ ++ ByteWriter::WriteBigEndian(packet + *index, seq_nr_); ++ ByteWriter::WriteBigEndian(packet + *index + 2, yaw_); ++ ByteWriter::WriteBigEndian(packet + *index + 4, pitch_); ++ ByteWriter::WriteBigEndian(packet + *index + 6, kReserved); ++ *index += kFciLength; ++ ++ RTC_CHECK_EQ(*index, index_end); ++ return true; ++} ++} // namespace rtcp ++} // namespace webrtc +diff --git a/modules/rtp_rtcp/source/rtcp_packet/fov_feedback.h b/modules/rtp_rtcp/source/rtcp_packet/fov_feedback.h +new file mode 100644 +index 0000000000..f04dbed1c4 +--- /dev/null ++++ b/modules/rtp_rtcp/source/rtcp_packet/fov_feedback.h +@@ -0,0 +1,52 @@ ++/* ++ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. ++ * ++ * Use of this source code is governed by a BSD-style license ++ * that can be found in the LICENSE file in the root of the source ++ * tree. An additional intellectual property rights grant can be found ++ * in the file PATENTS. All contributing project authors may ++ * be found in the AUTHORS file in the root of the source tree. ++ */ ++ ++#ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_FOVFeedback_H_ ++#define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_FOVFeedback_H_ ++ ++#include ++ ++#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" ++ ++namespace webrtc { ++namespace rtcp { ++class CommonHeader; ++// FOV Feedback (RFC xxxx). ++class FOVFeedback : public Psfb { ++ public: ++ static constexpr uint8_t kFeedbackMessageType = 8; ++ ++ FOVFeedback(); ++ FOVFeedback(const FOVFeedback& FOVFeedback); ++ ~FOVFeedback() override; ++ ++ // Parse assumes header is already parsed and validated. ++ bool Parse(const CommonHeader& packet); ++ ++ void SetSeqNr(uint16_t seq_nr) {seq_nr_ = seq_nr;} ++ void SetFOV(uint16_t yaw, uint16_t pitch) {yaw_ = yaw; pitch_ = pitch;} ++ ++ size_t BlockLength() const override; ++ ++ bool Create(uint8_t* packet, ++ size_t* index, ++ size_t max_length, ++ PacketReadyCallback callback) const override; ++ ++ private: ++ static constexpr size_t kFciLength = 8; ++ ++ uint16_t seq_nr_; ++ uint16_t yaw_; ++ uint16_t pitch_; ++}; ++} // namespace rtcp ++} // namespace webrtc ++#endif // MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_FOVFeedback_H_ +diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc +index 0f119ef235..9f5252f06e 100644 +--- a/modules/rtp_rtcp/source/rtcp_sender.cc ++++ b/modules/rtp_rtcp/source/rtcp_sender.cc +@@ -23,6 +23,7 @@ + #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" + #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" + #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" ++#include "modules/rtp_rtcp/source/rtcp_packet/fov_feedback.h" + #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" + #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" + #include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +@@ -169,6 +170,7 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config) + receive_statistics_(config.receive_statistics), + + sequence_number_fir_(0), ++ sequence_number_fov_feedback_(0), + + remb_bitrate_(0), + +@@ -200,6 +202,7 @@ RTCPSender::RTCPSender(const RtpRtcp::Configuration& config) + builders_[kRtcpTmmbn] = &RTCPSender::BuildTMMBN; + builders_[kRtcpNack] = &RTCPSender::BuildNACK; + builders_[kRtcpAnyExtendedReports] = &RTCPSender::BuildExtendedReports; ++ builders_[kRtcpFOVFeedback] = &RTCPSender::BuildFOVFeedback; + } + + RTCPSender::~RTCPSender() {} +@@ -693,6 +696,18 @@ std::unique_ptr RTCPSender::BuildExtendedReports( + return std::move(xr); + } + ++std::unique_ptr RTCPSender::BuildFOVFeedback(const RtcpContext& ctx) { ++ ++sequence_number_fov_feedback_; ++ ++ rtcp::FOVFeedback* fov_feedback = new rtcp::FOVFeedback(); ++ fov_feedback->SetSenderSsrc(ssrc_); ++ fov_feedback->SetMediaSsrc(remote_ssrc_); ++ fov_feedback->SetSeqNr(sequence_number_fov_feedback_); ++ fov_feedback->SetFOV(ctx.feedback_state_.yaw, ctx.feedback_state_.pitch); ++ ++ return std::unique_ptr(fov_feedback); ++} ++ + int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state, + RTCPPacketType packetType, + int32_t nack_size, +diff --git a/modules/rtp_rtcp/source/rtcp_sender.h b/modules/rtp_rtcp/source/rtcp_sender.h +index 32c1e1dbc1..cd2d8c0289 100644 +--- a/modules/rtp_rtcp/source/rtcp_sender.h ++++ b/modules/rtp_rtcp/source/rtcp_sender.h +@@ -61,6 +61,9 @@ class RTCPSender { + + // Used when generating TMMBR. + ModuleRtpRtcpImpl* module; ++ ++ uint16_t yaw; ++ uint16_t pitch; + }; + + explicit RTCPSender(const RtpRtcp::Configuration& config); +@@ -180,6 +183,8 @@ class RTCPSender { + RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + std::unique_ptr BuildNACK(const RtcpContext& context) + RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); ++ std::unique_ptr BuildFOVFeedback(const RtcpContext& context) ++ RTC_EXCLUSIVE_LOCKS_REQUIRED(critical_section_rtcp_sender_); + + private: + const bool audio_; +@@ -217,6 +222,8 @@ class RTCPSender { + // Full intra request + uint8_t sequence_number_fir_ RTC_GUARDED_BY(critical_section_rtcp_sender_); + ++ uint16_t sequence_number_fov_feedback_ RTC_GUARDED_BY(critical_section_rtcp_sender_); ++ + // Loss Notification + struct LossNotificationState { + uint16_t last_decoded_seq_num; +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +index 4ff584e27f..5341a04b34 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc ++++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +@@ -754,4 +754,13 @@ const RTPSender* ModuleRtpRtcpImpl::RtpSender() const { + return rtp_sender_.get(); + } + ++void ModuleRtpRtcpImpl::SendFOVFeedback(uint16_t yaw, ++ uint16_t pitch) { ++ RTCPSender::FeedbackState state = GetFeedbackState(); ++ ++ state.yaw = yaw; ++ state.pitch = pitch; ++ rtcp_sender_.SendRTCP(state, kRtcpFOVFeedback); ++} ++ + } // namespace webrtc +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h +index 2d6cfff489..f248dfa2f0 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h ++++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h +@@ -286,6 +286,9 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { + + Clock* clock() const { return clock_; } + ++ void SendFOVFeedback(uint16_t yaw, ++ uint16_t pitch) override; ++ + private: + FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, Rtt); + FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly); +diff --git a/modules/video_coding/video_receiver2.cc b/modules/video_coding/video_receiver2.cc +index d1e57d17be..43b18fa165 100644 +--- a/modules/video_coding/video_receiver2.cc ++++ b/modules/video_coding/video_receiver2.cc +@@ -28,9 +28,10 @@ + + namespace webrtc { + +-VideoReceiver2::VideoReceiver2(Clock* clock, VCMTiming* timing) ++VideoReceiver2::VideoReceiver2(Clock* clock, VCMTiming* timing, VideoReceiveStream *video_receive_stream) + : clock_(clock), + timing_(timing), ++ video_receive_stream_(video_receive_stream), + decodedFrameCallback_(timing_, clock_), + codecDataBase_() { + decoder_thread_checker_.Detach(); +@@ -62,6 +63,8 @@ void VideoReceiver2::RegisterExternalDecoder(VideoDecoder* externalDecoder, + return; + } + codecDataBase_.RegisterExternalDecoder(externalDecoder, payloadType); ++ ++ externalDecoder->SetVideoStreamReceiver(video_receive_stream_); + } + + void VideoReceiver2::DecoderThreadStarting() { +diff --git a/modules/video_coding/video_receiver2.h b/modules/video_coding/video_receiver2.h +index 202072a560..097da9773a 100644 +--- a/modules/video_coding/video_receiver2.h ++++ b/modules/video_coding/video_receiver2.h +@@ -17,6 +17,7 @@ + #include "modules/video_coding/timing.h" + #include "rtc_base/thread_checker.h" + #include "system_wrappers/include/clock.h" ++#include "call/video_receive_stream.h" + + namespace webrtc { + +@@ -27,7 +28,7 @@ namespace webrtc { + // VideoCodingModule api. + class VideoReceiver2 { + public: +- VideoReceiver2(Clock* clock, VCMTiming* timing); ++ VideoReceiver2(Clock* clock, VCMTiming* timing, VideoReceiveStream *video_receive_stream = nullptr); + ~VideoReceiver2(); + + int32_t RegisterReceiveCodec(const VideoCodec* receiveCodec, +@@ -68,6 +69,8 @@ class VideoReceiver2 { + #if RTC_DCHECK_IS_ON + bool decoder_thread_is_running_ = false; + #endif ++ ++ VideoReceiveStream *video_receive_stream_; + }; + + } // namespace webrtc +diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc +index 5b4cde2c3c..eb7285504c 100644 +--- a/video/rtp_video_stream_receiver.cc ++++ b/video/rtp_video_stream_receiver.cc +@@ -953,4 +953,8 @@ void RtpVideoStreamReceiver::InsertSpsPpsIntoTracker(uint8_t payload_type) { + sprop_decoder.pps_nalu()); + } + ++void RtpVideoStreamReceiver::SendFOVFeedback(uint16_t yaw, uint16_t pitch) { ++ rtp_rtcp_->SendFOVFeedback(yaw, pitch); ++} ++ + } // namespace webrtc +diff --git a/video/rtp_video_stream_receiver.h b/video/rtp_video_stream_receiver.h +index b664b9f798..cb6b5d9a98 100644 +--- a/video/rtp_video_stream_receiver.h ++++ b/video/rtp_video_stream_receiver.h +@@ -177,6 +177,8 @@ class RtpVideoStreamReceiver : public LossNotificationSender, + void AddSecondarySink(RtpPacketSinkInterface* sink); + void RemoveSecondarySink(const RtpPacketSinkInterface* sink); + ++ void SendFOVFeedback(uint16_t yaw, uint16_t pitch); ++ + private: + // Used for buffering RTCP feedback messages and sending them all together. + // Note: +diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc +index 54505877ab..b70ab2b450 100644 +--- a/video/video_receive_stream.cc ++++ b/video/video_receive_stream.cc +@@ -194,7 +194,7 @@ VideoReceiveStream::VideoReceiveStream( + stats_proxy_(&config_, clock_), + rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), + timing_(timing), +- video_receiver_(clock_, timing_.get()), ++ video_receiver_(clock_, timing_.get(), this), + rtp_video_stream_receiver_(clock_, + &transport_adapter_, + call_stats, +@@ -725,5 +725,9 @@ std::vector VideoReceiveStream::GetSources() const { + return source_tracker_.GetSources(); + } + ++void VideoReceiveStream::SendFOVFeedback(uint16_t yaw, uint16_t pitch) { ++ rtp_video_stream_receiver_.SendFOVFeedback(yaw, pitch); ++} ++ + } // namespace internal + } // namespace webrtc +diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h +index 0d0c66a410..c387aea0ab 100644 +--- a/video/video_receive_stream.h ++++ b/video/video_receive_stream.h +@@ -131,6 +131,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, + + std::vector GetSources() const override; + ++ void SendFOVFeedback(uint16_t yaw, uint16_t pitch) override; ++ + private: + int64_t GetWaitMs() const; + void StartNextDecode() RTC_RUN_ON(decode_queue_); +-- +2.17.1 diff --git a/WebRTC-Sample/owt-server/deployment/docker-compose.yml b/WebRTC-Sample/owt-server/deployment/docker-compose.yml index 28148d88..d63646eb 100644 --- a/WebRTC-Sample/owt-server/deployment/docker-compose.yml +++ b/WebRTC-Sample/owt-server/deployment/docker-compose.yml @@ -3,10 +3,26 @@ version: '3.1' services: owt-immersive-4k: image: xeon-centos76-service-owt-immersive - command: sh -c "/home/init.sh && /home/start.sh && /home/restApi.sh -s 4k && /home/restart.sh && /home/sleep.sh" + command: > + sh -c "/home/init.sh + && /home/start.sh + && /home/restApi.sh -s 4k + && /home/restart.sh + && /home/restApi.sh -c /home/Sample-Videos/test1_h265_3840x2048_30fps_30M_200frames.mp4 + && /home/sleep.sh" network_mode: host + volumes: + - ../../../Sample-Videos:/home/Sample-Videos owt-immersive-8k: image: xeon-centos76-service-owt-immersive - command: sh -c "/home/init.sh && /home/start.sh && /home/restApi.sh -s 8k && /home/restart.sh && /home/sleep.sh" + command: > + sh -c "/home/init.sh + && /home/start.sh + && /home/restApi.sh -s 8k + && /home/restart.sh + && /home/restApi.sh -c /home/Sample-Videos/test1_h265_8k_30fps_60M_100frames.mp4 + && /home/sleep.sh" network_mode: host + volumes: + - ../../../Sample-Videos:/home/Sample-Videos diff --git a/WebRTC-Sample/owt-server/deployment/start.sh b/WebRTC-Sample/owt-server/deployment/start.sh index 55e52e2c..978f42e5 100755 --- a/WebRTC-Sample/owt-server/deployment/start.sh +++ b/WebRTC-Sample/owt-server/deployment/start.sh @@ -1,6 +1,11 @@ #!/bin/bash -e +SUDO="" +if [[ $EUID -ne 0 ]]; then + SUDO="sudo -E" +fi + DIR=$(dirname $(readlink -f "$0")) yml="$DIR/docker-compose.yml" -sudo -E docker-compose -f "$yml" up ${1} +${SUDO} docker-compose -f "$yml" up ${1} diff --git a/WebRTC-Sample/owt-server/deployment/stop.sh b/WebRTC-Sample/owt-server/deployment/stop.sh index 1185fe73..5a60d197 100755 --- a/WebRTC-Sample/owt-server/deployment/stop.sh +++ b/WebRTC-Sample/owt-server/deployment/stop.sh @@ -1,6 +1,11 @@ #!/bin/bash -e +SUDO="" +if [[ $EUID -ne 0 ]]; then + SUDO="sudo -E" +fi + DIR=$(dirname $(readlink -f "$0")) yml="$DIR/docker-compose.yml" -sudo -E docker-compose -f "$yml" down +${SUDO} docker-compose -f "$yml" down diff --git a/WebRTC-Sample/owt-server/image/owt-immersive/Dockerfile b/WebRTC-Sample/owt-server/image/owt-immersive/Dockerfile old mode 100644 new mode 100755 index 2ff1e154..4f992e93 --- a/WebRTC-Sample/owt-server/image/owt-immersive/Dockerfile +++ b/WebRTC-Sample/owt-server/image/owt-immersive/Dockerfile @@ -1,10 +1,7 @@ FROM centos:7.6.1810 AS build WORKDIR /home - -RUN yum install -y -q patch centos-release-scl && \ - yum install -y -q devtoolset-7 && \ - source /opt/rh/devtoolset-7/enable +SHELL ["/bin/bash", "-o", "pipefail", "-c"] # COMMON BUILD TOOLS RUN yum install -y -q bzip2 make autoconf libtool git wget ca-certificates pkg-config gcc gcc-c++ bison flex patch epel-release yum-devel libcurl-devel zlib-devel; @@ -23,7 +20,7 @@ ARG AUTOMAKE_VER=1.14 ARG AUTOMAKE_REPO=https://ftp.gnu.org/pub/gnu/automake/automake-${AUTOMAKE_VER}.tar.xz RUN wget -O - ${AUTOMAKE_REPO} | tar xJ && \ cd automake-${AUTOMAKE_VER} && \ - ./configure --prefix=/usr --libdir=/usr/local/lib64 --disable-doc && \ + ./configure --prefix=/usr --libdir=/usr/local/lib64 --disable-doc && \ make -j8 && \ make install @@ -48,19 +45,43 @@ RUN wget -O - ${YASM_REPO} | tar xz && \ make -j8 && \ make install +# Build libnice +ARG NICE_VER="0.1.4" +ARG NICE_REPO=http://nice.freedesktop.org/releases/libnice-${NICE_VER}.tar.gz +ARG LIBNICE_PATCH_VER="4.3.1" +ARG LIBNICE_PATCH_REPO=https://github.com/open-webrtc-toolkit/owt-server/archive/v${LIBNICE_PATCH_VER}.tar.gz + +RUN yum install -y -q glib2-devel + +RUN wget -O - ${NICE_REPO} | tar xz && \ + cd libnice-${NICE_VER} && \ + wget -O - ${LIBNICE_PATCH_REPO} | tar xz && \ + patch -p1 < owt-server-${LIBNICE_PATCH_VER}/scripts/patches/libnice014-agentlock.patch && \ + patch -p1 < owt-server-${LIBNICE_PATCH_VER}/scripts/patches/libnice014-agentlock-plus.patch && \ + patch -p1 < owt-server-${LIBNICE_PATCH_VER}/scripts/patches/libnice014-removecandidate.patch && \ + patch -p1 < owt-server-${LIBNICE_PATCH_VER}/scripts/patches/libnice014-keepalive.patch && \ + patch -p1 < owt-server-${LIBNICE_PATCH_VER}/scripts/patches/libnice014-startcheck.patch && \ + patch -p1 < owt-server-${LIBNICE_PATCH_VER}/scripts/patches/libnice014-closelock.patch && \ + ./configure --prefix="/usr/local" --libdir=/usr/local/lib64 && \ + make -s V= && \ + make install + + # Build open ssl -ARG OPENSSL_VER="1.0.2t" +ARG OPENSSL_VER="1.1.1h" ARG OPENSSL_REPO=http://www.openssl.org/source/openssl-${OPENSSL_VER}.tar.gz +ARG BUILD_PREFIX=/usr/local/ssl +ARG BUILD_DESTDIR=/home/build RUN wget -O - ${OPENSSL_REPO} | tar xz && \ cd openssl-${OPENSSL_VER} && \ - ./config no-ssl3 --prefix="/usr/local" -fPIC && \ + ./config no-ssl3 --prefix=${BUILD_PREFIX} --openssldir=${BUILD_PREFIX} -Wl,-rpath=${BUILD_PREFIX}/lib -fPIC && \ make depend && \ make -s V=0 && \ make install # Build libre -ARG LIBRE_VER="v0.4.16" +ARG LIBRE_VER="v0.5.0" ARG LIBRE_REPO=https://github.com/creytiv/re.git RUN git clone ${LIBRE_REPO} && \ @@ -97,7 +118,7 @@ RUN curl -o libsrtp-${SRTP2_VER}.tar.gz ${SRTP2_REPO} && \ cd libsrtp-${SRTP2_VER} && \ export PKG_CONFIG_PATH="/usr/local/lib64/pkgconfig" && \ export CFLAGS="-fPIC" && \ - ./configure --enable-openssl --prefix="/usr/local" --with-openssl-dir="/usr/local" && \ + ./configure --enable-openssl --prefix="/usr/local" --with-openssl-dir="/usr/local/ssl/" && \ make -s V=0 && \ make install @@ -129,24 +150,27 @@ RUN wget -O - ${FFMPEG_PATCHES_RELEASE_URL} | tar xz RUN yum install -y -q libass-devel freetype-devel zlib-devel openssl-devel RUN wget -O - ${FFMPEG_REPO} | tar xz && mv FFmpeg-${FFMPEG_VER} FFmpeg && \ - cd FFmpeg ; + cd FFmpeg ; + # Compile FFmpeg RUN cd /home/FFmpeg && \ export PKG_CONFIG_PATH="/usr/local/lib64/pkgconfig" && \ ./configure --prefix="/usr/local" --libdir=/usr/local/lib64 --enable-shared --disable-static --disable-libvpx --disable-vaapi --enable-libfreetype --enable-libfdk-aac --enable-nonfree && \ + make -j8 && \ make install && make install DESTDIR="/home/build" # Install node -ARG NODE_VER=v8.15.0 +ARG NODE_VER=v10.21.0 ARG NODE_REPO=https://nodejs.org/dist/${NODE_VER}/node-${NODE_VER}-linux-x64.tar.xz RUN yum install -y -q ca-certificates wget xz-utils RUN wget ${NODE_REPO} && \ tar xf node-${NODE_VER}-linux-x64.tar.xz && \ - cp node-*/* /usr/local -rf && rm -rf node-* + cp node-*/* /usr/local -rf && \ + rm -rf node-* # Fetch SVT-HEVC ARG SVT_HEVC_VER=v1.4.3 @@ -155,6 +179,7 @@ ARG SVT_HEVC_REPO=https://github.com/intel/SVT-HEVC RUN yum install -y -q patch centos-release-scl && \ yum install -y -q devtoolset-7 +# hadolint ignore=SC1091 RUN git clone ${SVT_HEVC_REPO} && \ cd SVT-HEVC/Build/linux && \ export PKG_CONFIG_PATH="/usr/local/lib64/pkgconfig" && \ @@ -169,6 +194,7 @@ RUN git clone ${SVT_HEVC_REPO} && \ # Build OWT specific modules +ARG OWTSERVER_COMMIT=fd71357d6fdbd57d3c4be2028976bc2b34fff781 ARG OWTSERVER_REPO=https://github.com/open-webrtc-toolkit/owt-server.git ARG OPENH264_MAJOR=1 ARG OPENH264_MINOR=7 @@ -180,10 +206,10 @@ ARG OPENH264_BINARY=https://github.com/cisco/openh264/releases/download/v${OPENH ARG LICODE_COMMIT="8b4692c88f1fc24dedad66b4f40b1f3d804b50ca" ARG LICODE_REPO=https://github.com/lynckia/licode.git ARG LICODE_PATCH_REPO=https://github.com/open-webrtc-toolkit/owt-server/tree/master/scripts/patches/licode/ -ARG NICE_VER="0.1.4" -ARG NICE_REPO=http://nice.freedesktop.org/releases/libnice-${NICE_VER}.tar.gz -ARG SCVP_VER=f440b398b469e96c7cc93c90caa341b4e113abf2 -ARG SCVP_REPO=/home/vcd_immersive-tids +ARG SAFESTRINGLIB_COMMIT="245c4b8cff1d2e7338b7f3a82828fc8e72b29549" +ARG SAFESTRINGLIB_REPO=https://github.com/intel/safestringlib.git +ARG SCVP_VER="9ce286edf4d5976802bf488b4dd90a16ecc28c36" +ARG SCVP_REPO=https://github.com/OpenVisualCloud/Immersive-Video-Sample ARG WEBRTC_REPO=https://github.com/open-webrtc-toolkit/owt-deps-webrtc.git ARG SERVER_PATH=/home/owt-server ARG OWT_SDK_REPO=https://github.com/open-webrtc-toolkit/owt-client-javascript.git @@ -193,36 +219,54 @@ ARG OWT_BRANCH_JS_COMMIT="d727af2927731ff16214d73f57964a992258636d" ARG WEBRTC_COMMIT="c2aa290cfe4f63d5bfbb6540122a5e6bf2783187" ARG FDKAAC_LIB=/home/build/usr/local/lib64 -RUN yum install -y -q python-devel glib2-devel boost-devel log4cxx-devel - -COPY vcd_immersive-tids ${SCVP_REPO} +RUN yum install -y -q python-devel glib2-devel boost-devel log4cxx-devel glog-devel gflags-devel +RUN yum install -y -q patch centos-release-scl devtoolset-7 +ENV PYTHONIOENCODING=UTF-8 +# Install 360scvp +# hadolint ignore=SC1091 +RUN cd /home && \ + source /opt/rh/devtoolset-7/enable && \ + git clone ${SAFESTRINGLIB_REPO} && \ + cd safestringlib && git reset --hard ${SAFESTRINGLIB_COMMIT} && \ + mkdir build && cd build && cmake .. && \ + make -j && \ + mkdir -p /usr/local/lib && \ + cp libsafestring_shared.so /usr/local/lib && \ + mkdir -p /usr/local/lib64 && \ + cp libsafestring_shared.so /usr/local/lib64 && \ + mkdir -p /home/build/usr/local/lib64 && \ + cp libsafestring_shared.so /home/build/usr/local/lib64 && \ + mkdir -p /usr/local/include/safestringlib && \ + cp -rf ../include/* /usr/local/include/safestringlib/ +RUN cd /home && \ + git clone ${SCVP_REPO} && \ + cd Immersive-Video-Sample/src/360SCVP && \ + git reset --hard ${SCVP_VER} && \ + mkdir build && \ + cd build && \ + source /opt/rh/devtoolset-7/enable && \ + cmake -DCMAKE_INSTALL_PREFIX=/usr/local -DCMAKE_INSTALL_LIBDIR=lib64 ../ && \ + make -j && \ + make install DESTDIR=/home/build && \ + make install # 1. Clone OWT server source code # 2. Clone licode source code and patch # 3. Clone webrtc source code and patch -RUN git config --global user.email "you@example.com" && \ - git config --global user.name "Your Name" && \ - git clone -b ${OWT_BRANCH} ${OWTSERVER_REPO} && \ + +# hadolint ignore=SC1091 +RUN git clone -b ${OWT_BRANCH} ${OWTSERVER_REPO} && \ + source /opt/rh/devtoolset-7/enable && \ + + cd ${SERVER_PATH} && git reset --hard ${OWTSERVER_COMMIT} && \ + curl https://patch-diff.githubusercontent.com/raw/open-webrtc-toolkit/owt-server/pull/708.patch | git apply && \ # Install node modules for owt npm config set proxy=${http_proxy} && \ npm config set https-proxy=${http_proxy} && \ - npm install -g --loglevel error node-gyp grunt-cli underscore jsdoc && \ + npm install -g --loglevel error node-gyp@v6.1.0 grunt-cli underscore jsdoc && \ cd ${SERVER_PATH} && npm install nan && \ - # Install libnice for owt - cd ${SERVER_PATH}/third_party && \ - wget -O - ${NICE_REPO} | tar xz && \ - cd libnice-${NICE_VER} && \ - patch -p1 < ${SERVER_PATH}/scripts/patches/libnice014-agentlock.patch && \ - patch -p1 < ${SERVER_PATH}/scripts/patches/libnice014-agentlock-plus.patch && \ - patch -p1 < ${SERVER_PATH}/scripts/patches/libnice014-removecandidate.patch && \ - patch -p1 < ${SERVER_PATH}/scripts/patches/libnice014-keepalive.patch && \ - patch -p1 < ${SERVER_PATH}/scripts/patches/libnice014-startcheck.patch && \ - ./configure --prefix="/usr/local" --libdir=/usr/local/lib64 && \ - make -s V= && \ - make install && \ - # Get openh264 for owt cd ${SERVER_PATH}/third_party && \ mkdir openh264 && cd openh264 && \ @@ -241,37 +285,22 @@ RUN git config --global user.email "you@example.com" && \ cd licode && \ git reset --hard ${LICODE_COMMIT} && \ wget -r -nH --cut-dirs=5 --no-parent ${LICODE_PATCH_REPO} && \ - git am ${SERVER_PATH}/scripts/patches/licode/*.patch && \ + git apply ${SERVER_PATH}/scripts/patches/licode/*.patch && \ + mkdir -p ${SERVER_PATH}/build/libdeps/build/include && \ + cp erizoAPI/lib/json.hpp ${SERVER_PATH}/build/libdeps/build/include && \ # Install webrtc for owt - mkdir -p ${SERVER_PATH}/build/libdeps/build/include && \ - cd ${SERVER_PATH}/build/libdeps/build/include && wget https://github.com/nlohmann/json/releases/download/v3.6.1/json.hpp && \ cd ${SERVER_PATH}/third_party && mkdir webrtc && cd webrtc &&\ export GIT_SSL_NO_VERIFY=1 && \ git clone -b 59-server ${WEBRTC_REPO} src && cd src && \ git reset --hard ${WEBRTC_COMMIT} && \ ./tools-woogeen/install.sh && \ + patch -p1 < ${SERVER_PATH}/scripts/patches/0001-Implement-RtcpFOVObserver.patch && \ ./tools-woogeen/build.sh && \ - # Install 360scvp - cd ${SERVER_PATH}/third_party && \ - git clone ${SCVP_REPO} && \ - cd vcd_immersive-tids/360SCVP && \ - git checkout ${SCVP_VER} && \ - mkdir build && \ - cd build && \ - cmake -DCMAKE_INSTALL_PREFIX=/usr/local -DCMAKE_INSTALL_LIBDIR=lib64 ../ && \ - make -j && \ - make install DESTDIR=/home/build && \ - make install && \ - cp lib360SCVP.so /home/build/usr/local/lib64 && \ - cp lib360SCVP.so /usr/local/lib64 && \ - # Get js client sdk for owt cd /home && git clone -b ${OWT_BRANCH_JS} ${OWT_SDK_REPO} && cd owt-client-javascript/scripts && git reset --hard ${OWT_BRANCH_JS_COMMIT} && npm install && grunt && \ - mkdir ${SERVER_PATH}/third_party/quic-lib && \ export LD_LIBRARY_PATH=/usr/local/lib64 && \ - cd ${SERVER_PATH}/third_party/quic-lib && wget https://github.com/open-webrtc-toolkit/owt-deps-quic/releases/download/v0.1/dist.tgz && tar xzf dist.tgz && \ #Build and pack owt cd ${SERVER_PATH} && export CPLUS_INCLUDE_PATH=/usr/local/include/svt-hevc && export PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig && ./scripts/build.js -t mcu -r -c && \ ./scripts/pack.js -t all --install-module --no-pseudo --sample-path /home/owt-client-javascript/dist/samples/conference @@ -283,23 +312,28 @@ WORKDIR /home # Prerequisites # Install node -ARG NODE_VER=v8.15.0 +ARG NODE_VER=v10.21.0 ARG NODE_REPO=https://nodejs.org/dist/${NODE_VER}/node-${NODE_VER}-linux-x64.tar.xz RUN yum install -y -q ca-certificates wget xz-utils RUN wget ${NODE_REPO} && \ tar xf node-${NODE_VER}-linux-x64.tar.xz && \ - cp node-*/* /usr/local -rf && rm -rf node-* + cp node-*/* /usr/local -rf && \ + rm -rf node-* COPY --from=build /home/owt-server/dist /home/owt COPY --from=build /home/build / - -ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib64 -RUN yum install epel-release boost-system boost-thread log4cxx glib2 freetype-devel -y && \ - yum install rabbitmq-server mongodb mongodb-server -y && \ +ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib64: +RUN echo "[mongodb-org-3.6]" >> /etc/yum.repos.d/mongodb-org-3.6.repo && \ + echo "name=MongoDB Repository" >> /etc/yum.repos.d/mongodb-org-3.6.repo && \ + echo "baseurl=https://repo.mongodb.org/yum/redhat/7/mongodb-org/3.6/x86_64/" >> /etc/yum.repos.d/mongodb-org-3.6.repo && \ + echo "gpgcheck=1" >> /etc/yum.repos.d/mongodb-org-3.6.repo && \ + echo "enabled=1" >> /etc/yum.repos.d/mongodb-org-3.6.repo && \ + echo "gpgkey=https://www.mongodb.org/static/pgp/server-3.6.asc" >> /etc/yum.repos.d/mongodb-org-3.6.repo && \ + yum install epel-release boost-system boost-thread log4cxx glib2 freetype-devel -y && \ + yum install rabbitmq-server mongodb-org glog-devel gflags-devel -y && \ yum remove -y -q epel-release && \ - export PKG_CONFIG_PATH="/usr/local/lib64/pkgconfig" && \ rm -rf /var/cache/yum/*; COPY scripts/init.sh scripts/restApi.sh scripts/restart.sh scripts/sleep.sh scripts/start.sh /home/ @@ -308,3 +342,4 @@ RUN cd /home/scripts &&\ npm config set proxy=${http_proxy} && \ npm config set https-proxy=${http_proxy} && \ npm install + diff --git a/WebRTC-Sample/owt-server/image/owt-immersive/scripts/scripts/package.json b/WebRTC-Sample/owt-server/image/owt-immersive/scripts/scripts/package.json old mode 100755 new mode 100644 index d240e588..0b498723 --- a/WebRTC-Sample/owt-server/image/owt-immersive/scripts/scripts/package.json +++ b/WebRTC-Sample/owt-server/image/owt-immersive/scripts/scripts/package.json @@ -9,7 +9,7 @@ "fraction.js": "*", "log4js": "^1.1.1", "mongojs": "", - "mongoose": "^4.13.6", + "mongoose": "^5.8.6", "package.json": "^2.0.1", "toml": "*" }, diff --git a/WebRTC-Sample/owt-server/script/build.sh b/WebRTC-Sample/owt-server/script/build.sh index 21226baf..cbd0db83 100644 --- a/WebRTC-Sample/owt-server/script/build.sh +++ b/WebRTC-Sample/owt-server/script/build.sh @@ -1,9 +1,14 @@ #!/bin/bash -e -if test -z "${DIR}"; then +SUDO="" +if [[ $EUID -ne 0 ]]; then + SUDO="sudo -E" +fi + +if test -z "${DIR}"; then echo "This script should not be called directly." exit -1 -fi +fi USER="docker" GROUP="docker" @@ -15,9 +20,9 @@ for dep in '.8.*' '.7.*' '.6.*' '.5.*' '.4.*' '.3.*' '.2.*' '.1.*' ''; do if test -z "$image"; then image="$IMAGE"; fi if grep -q 'AS build' "$dockerfile"; then - sudo docker build --network=host --file="$dockerfile" --target build -t "$image:build" "$DIR" $(env | grep -E '_(proxy|REPO|VER)=' | sed 's/^/--build-arg /') --build-arg USER=${USER} --build-arg GROUP=${GROUP} --build-arg UID=$(id -u) --build-arg GID=$(id -g) + ${SUDO} docker build --network=host --file="$dockerfile" --target build -t "$image:build" "$DIR" $(env | grep -E '_(proxy|REPO|VER)=' | sed 's/^/--build-arg /') --build-arg USER=${USER} --build-arg GROUP=${GROUP} --build-arg UID=$(id -u) --build-arg GID=$(id -g) fi - sudo docker build --network=host --file="$dockerfile" -t "$image:latest" "$DIR" $(env | grep -E '_(proxy|REPO|VER)=' | sed 's/^/--build-arg /') --build-arg USER=${USER} --build-arg GROUP=${GROUP} --build-arg UID=$(id -u) --build-arg GID=$(id -g) + ${SUDO} docker build --network=host --file="$dockerfile" -t "$image:latest" "$DIR" $(env | grep -E '_(proxy|REPO|VER)=' | sed 's/^/--build-arg /') --build-arg USER=${USER} --build-arg GROUP=${GROUP} --build-arg UID=$(id -u) --build-arg GID=$(id -g) done done diff --git a/src/360SCVP/360SCVPAPI.h b/src/360SCVP/360SCVPAPI.h index a5b55f95..e5deb282 100644 --- a/src/360SCVP/360SCVPAPI.h +++ b/src/360SCVP/360SCVPAPI.h @@ -27,6 +27,7 @@ #define _360SCVP_API_H_ #include "stdint.h" #include +#include "pose.h" #ifdef __cplusplus extern "C" { @@ -41,7 +42,6 @@ extern "C" { #define ID_SCVP_PARAM_SEI_VIEWPORT 1006 #define ID_SCVP_BITSTREAMS_HEADER 1007 #define ID_SCVP_RWPK_INFO 1008 - #define DEFAULT_REGION_NUM 1000 typedef enum SliceType { @@ -51,11 +51,18 @@ typedef enum SliceType { E_SLICE_IDR = 3, }SliceType; +/*! + * + * The enum type support both sphere projection and planar. + * The planar video, like pan-zoom usage, uses E_VIDEO_PLANAR + * + */ typedef enum EGeometryType { E_SVIDEO_EQUIRECT = 0, E_SVIDEO_CUBEMAP, E_SVIDEO_VIEWPORT, + E_SVIDEO_PLANAR, E_SVIDEO_TYPE_NUM, }EGeometryType; @@ -66,7 +73,7 @@ typedef enum EGeometryType * 1(merge + viewport), used in webrtc use case * 2(parsing one nal), used in the omaf * 3(parsing for client), used in the client player -* +* 4(only viewport calculation), can be used in OMAF DASH access */ typedef enum UsageType { @@ -74,6 +81,7 @@ typedef enum UsageType E_MERGE_AND_VIEWPORT, E_PARSER_ONENAL, E_PARSER_FOR_CLIENT, + E_VIEWPORT_ONLY, E_PARSER_TYPE_NUM, }UsageType; @@ -99,12 +107,73 @@ typedef enum H265SEIType E_OMNI_VIEWPORT }H265SEIType; +/*! + * Plugin Type definitions: + * + * E_PLUGIN_TILE_SELECTION: For plugins which implemented tile selection + * + */ +typedef enum PluginType +{ + E_PLUGIN_TILE_SELECTION = 0, + E_PLUGIN_TYPE_NUM, +}PluginType; + +/*! + * Plugin Format definitions: + * + * E_PLUGIN_EQUIRECT: For plugins which is used on ERP projection + * E_PLUGIN_CUBEMAP: For plugins which is used on Cubemap projection + * E_PLUGIN_PLANAR: For plugins which is used on Planar projection + * + */ +typedef enum PluginFormat +{ + E_PLUGIN_EQUIRECT = 0, + E_PLUGIN_CUBEMAP, + E_PLUGIN_PLANAR, + E_PLUGIN_FORMAT_NUM +}PluginFormat; + +/*! + * Plugin Defintion for 360SCVP + * + * PluginType: Tile selection, etc. + * PluginFormat: Indicate the video projection: Equirect, cubemap, panzoom, etc. + * PluginLibPath: Plugin library file full name + * + */ +typedef struct PLUGIN_DEF +{ + PluginType pluginType; + PluginFormat pluginFormat; + char* pluginLibPath; +}PluginDef; + +/*! + * + * The structure support both sphere projection and planar video + * x: The top left X-axis coordinate of the tile + * y: The top left Y-axis coordinate of the tile + * idx: The tile index. + * faceId: Which projection face the tile locates. Only used for 3D projections. + * streamId: Which stream the tile locates. Only used for 2D projection. + * Offset: + * The four offsets provide the viewport area points distance to the current tile points. + * Users can write their own implementation to utilize these information for render module + * + */ typedef struct TILE_DEF { int32_t x; int32_t y; int32_t idx; int32_t faceId; + int32_t streamId; //for planar tile selection only + int32_t upLeftXOffset; + int32_t upLeftYOffset; + int32_t downRightXOffset; + int32_t downRightYOffset; } TileDef; typedef struct CC_DEF @@ -163,6 +232,8 @@ typedef struct RECTANGUALAR_REGION_WIZE_PACKING //! \brief: define the overall region wise packing information //! of each video stream, including regions number, detailed //! region wise packing information for each region, and so on +//! add three variables(high resolution region number, low resolution stream width and height) to support WeRTC sample player +//! add timestamp in SEI to track frame //! typedef struct REGION_WIZE_PACKING { @@ -173,6 +244,10 @@ typedef struct REGION_WIZE_PACKING uint16_t packedPicWidth; uint16_t packedPicHeight; RectangularRegionWisePacking *rectRegionPacking; + uint8_t numHiRegions; + uint32_t lowResPicWidth; + uint32_t lowResPicHeight; + uint32_t timeStamp; }RegionWisePacking; typedef struct VIEW_PORT @@ -255,7 +330,7 @@ typedef struct PARAM_BSHEADER }Param_BSHeader; //! -//! \brief This structure is for the pitcure parameters +//! \brief This structure is for the pitcure parameters //! typedef struct PARAM_PICTURE { @@ -267,6 +342,47 @@ typedef struct PARAM_PICTURE int32_t maxCUWidth; }Param_PicInfo; +//Enumeration type for indicating rotation information in input Cubemap projected picture +typedef enum +{ + NO_TRANSFORM = 0, + MIRRORING_HORIZONTALLY, + ROTATION_180_ANTICLOCKWISE, + ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR, + ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR, + ROTATION_90_ANTICLOCKWISE, + ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR, + ROTATION_270_ANTICLOCKWISE, +}E_TransformType; + +typedef struct PARAM_FACEPPROPERTY +{ + int faceWidth; + int faceHeight; + int idFace; + E_TransformType rotFace; +}Param_FaceProperty; + +typedef struct PARAM_VIDEOFPSTRUCT +{ + int rows; + int cols; + Param_FaceProperty faces[6][6]; +}Param_VideoFPStruct; + +//! +//! \brief This structure stores the image width/height and tile width/height for each resolution +//! Utilized for multi-resolution use cases. +//! +//! +typedef struct STREAM_INFO +{ + unsigned int FrameWidth; + unsigned int FrameHeight; + int32_t TileWidth; + int32_t TileHeight; +}Stream_Info; + //! //! \brief This structure is for the viewport output parameters //! @@ -291,8 +407,8 @@ typedef struct PARAM_VIEWPORT_OUTPUT //! //! \param ViewportWidth, input, the width for the viewport //! \param ViewportHeight, input, the height for the viewport -//! \param viewPortPitch, input, the angle rotated aroud z -//! \param viewPortYaw, input, the angle rotated aroud x +//! \param viewPortPitch, input, the angle rotated aroud x(-90 ~ 90) +//! \param viewPortYaw, input, the angle rotated aroud z(-180 ~ 180) //! \param viewPortFOVH, input, the horizontal FOV angle //! \param viewPortFOVV, input, the vertical FOV angle //! \param geoTypeOutput, input, the type for the output projection(viewport) @@ -315,6 +431,8 @@ typedef struct PARAM_VIEWPORT uint32_t faceHeight; uint32_t tileNumRow; uint32_t tileNumCol; + UsageType usageType; + Param_VideoFPStruct paramVideoFP; }Param_ViewPortInfo; //! @@ -366,6 +484,9 @@ typedef struct PARAM_STREAMSTITCHINFO //! \param paramPicInfo, input, the param for the picture,just used in the usedType=0 & 1 //! \param paramViewPort, input, the param for the viewport, just used in the usedType=1 //! \param paramStitchInfo, input, the param for the streamStitch, just used in the usedType=0 +//! \param sourceResolutionNum,input, the number of high res stream for usages of multi stream, Pan-zoom for instance +//! \param accessInterval, input, the time interval of every tile selection interface is called, expressed in milisecond +//! \param pStreamInfo, input, the param for the stream information of multi-stream usage //! \param destWidth, input, the width for the destination output area, just used in the usedType=2 //! \param destHeight, input, the height for the destination output area, just used in the usedType=2 //! \param frameWidth, input, the width of the frame, just used in the usedType=E_MERGE_AND_VIEWPORT @@ -376,6 +497,7 @@ typedef struct PARAM_STREAMSTITCHINFO //! \param inputLowBistreamLen,input, the length of the low resolution input bistream, just used in the usedType=E_MERGE_AND_VIEWPORT //! \param pOutputSEI, output, the buffer for the output SEI bistream, mainly RWPK, just used in the usedType=E_MERGE_AND_VIEWPORT //! \param outputSEILen, output, the length of the output SEI bistream, just used in the usedType=E_MERGE_AND_VIEWPORT +//! \param timeStamp, input, using timestamp to track frame, especially used in the E_MERGE_AND_VIEWPORT use case //! typedef struct PARAM_360SCVP { @@ -387,6 +509,9 @@ typedef struct PARAM_360SCVP Param_PicInfo paramPicInfo; Param_ViewPortInfo paramViewPort; param_streamStitchInfo paramStitchInfo; + int32_t sourceResolutionNum; + float accessInterval; + Stream_Info *pStreamInfo; int32_t destWidth; int32_t destHeight; unsigned int frameWidth; @@ -397,6 +522,9 @@ typedef struct PARAM_360SCVP unsigned int inputLowBistreamLen; unsigned char *pOutputSEI; unsigned int outputSEILen; + PluginDef pluginDef; + uint32_t timeStamp; + void *logFunction; //external log callback function pointer, NULL if external log is not used }param_360SCVP; //! @@ -410,7 +538,7 @@ typedef struct PARAM_360SCVP void * I360SCVP_Init(param_360SCVP* pParam360SCVP); //! -//! \brief This function create a new handle based on one existed handle +//! \brief This function create a new handle based on one existed handle //! and return the new handle of the stitch stream library //! \param void* p360SCVPHandle, input, one existed stitch library handle //! @@ -444,6 +572,18 @@ int32_t I360SCVP_process(param_360SCVP* pParam360SCVP, void * p360SCVPHandle); //! int32_t I360SCVP_setViewPort(void * p360SCVPHandle, float yaw, float pitch); +//! +//! \brief This function sets the parameter of the viewPort with extention +//! +//! \param void * p360SCVPHandle, input, which is created by the I360SVCP_Init function +//! \param void * pViewportInfo, input, the viewport information handle +//! +//! \return int32_t, the status of the function. +//! 0, if succeed +//! not 0, if fail +//! +int32_t I360SCVP_setViewPortEx(void * p360SCVPHandle, HeadPose* pose); + //! //! \brief This function completes the un-initialization, free the memory //! @@ -468,6 +608,19 @@ int32_t I360SCVP_unInit(void * p360SCVPHandle); //! int32_t I360SCVP_getFixedNumTiles(TileDef* pOutTile, Param_ViewportOutput* pParamViewPortOutput, void* p360SCVPHandle); +//! +//! \brief This function output the selected tiles according to the timely changed viewPort information, +//! especially these tiles are put in the original picture order. +//! for cube map source, currently support FOV range 100 ~70 degree +//! +//! \param TileDef* pOutTile, output, the list for the tiles inside the viewport +//! \param int32_t* pParamViewPortOutput, output, please refer to the structure Param_ViewportOutput +//! \param void* p360SCVPHandle, input, which is created by the I360SVCP_Init function +//! +//! \return int32_t, the number of the tiles inside the viewport. +//! +int32_t I360SCVP_getTilesInViewport(TileDef* pOutTile, Param_ViewportOutput* pParamViewPortOutput, void* p360SCVPHandle); + //! //! \brief This function provides the parsing NAL function, can give the slice type, tileCols number, tileRows number and nal information //! if it is the Slice, must provide the slice header length; if SEI, provide the SEI payload type @@ -560,6 +713,18 @@ int32_t I360SCVP_GenerateProj(void* p360SCVPHandle, int32_t projType, uint8_t *p //! int32_t I360SCVP_ParseRWPK(void* p360SCVPHandle, RegionWisePacking* pRWPK, uint8_t *pRWPKBits, uint32_t RWPKBitsSize); +//! +//! \brief This function gets the content coverge for viewport. +//! +//! \param void* p360SCVPHandle, input, which is created by the I360SVCP_Init function +//! \param CCDef* pOutCC, output, the centre and range of Azimuth and Elevation of 3D sphere +//! +//!\return int32_t, the status of the function. +//! 0, if succeed +//! not 0, if fail +//! +int32_t I360SCVP_getContentCoverage(void* p360SCVPHandle, CCDef* pOutCC); + //! //! \brief This function can get the specified values, for example vps, sps, and pps. //! @@ -586,6 +751,27 @@ int32_t I360SCVP_GetParameter(void* p360SCVPHandle, int32_t paramID, void** pVal //! int32_t I360SCVP_SetParameter(void* p360SCVPHandle, int32_t paramID, void* pValue); +//! +//! \brief This function can select tiles by legacy algorithm with good accuracy but low performance. +//! +//! \param void* p360SCVPHandle, input, which is created by the I360SVCP_Init function +//! \param void* pOutTile, output, the selected tiles +//! +//! \return int32_t, the selected tiles number. +//! >0, if succeed +//! <0, if fail +//! +int32_t I360SCVP_GetTilesByLegacyWay(TileDef* pOutTile, void* p360SCVPHandle); + +//! \brief This function can set the logcallback funciton. +//! +//! \param void* p360SCVPHandle, input, which is created by the I360SVCP_Init function +//! \param void* externalLog, input, the specified logcallback function +//! +//! \return int32_t, the status of the function. +//! ERROR_NONE if success, else failed reason +int32_t I360SCVPSetLogCallBack(void* p360SCVPHandle, void* externalLog); + #ifdef __cplusplus } #endif diff --git a/src/360SCVP/360SCVPAPIImpl.cpp b/src/360SCVP/360SCVPAPIImpl.cpp index b2ab44f1..2578ffd1 100644 --- a/src/360SCVP/360SCVPAPIImpl.cpp +++ b/src/360SCVP/360SCVPAPIImpl.cpp @@ -33,6 +33,7 @@ #include "360SCVPAPI.h" #include "360SCVPCommonDef.h" #include "360SCVPHevcEncHdr.h" +#include "360SCVPLog.h" #include "360SCVPImpl.h" void* I360SCVP_Init(param_360SCVP* pParam360SCVP) @@ -43,7 +44,7 @@ void* I360SCVP_Init(param_360SCVP* pParam360SCVP) if(pStitchstream->init(pParam360SCVP) < 0) { - delete(pStitchstream); + SAFE_DELETE(pStitchstream); return NULL; } return (void*)pStitchstream; @@ -95,10 +96,26 @@ int32_t I360SCVP_process(param_360SCVP* pParam360SCVP, void* p360SCVPHandle) int32_t I360SCVP_setViewPort(void* p360SCVPHandle, float yaw, float pitch) { int32_t ret = 0; + HeadPose pose; TstitchStream* pStitch = (TstitchStream*)(p360SCVPHandle); if (!pStitch) return 1; - ret = pStitch->setViewPort(yaw, pitch); + pose.yaw = yaw; + pose.pitch = pitch; + ret = pStitch->setViewPort(&pose); + + pStitch->getViewPortTiles(); + + return ret; +} + +int32_t I360SCVP_setViewPortEx(void* p360SCVPHandle, HeadPose* pose) +{ + int32_t ret = 0; + TstitchStream* pStitch = (TstitchStream*)(p360SCVPHandle); + if (!pStitch) + return 1; + ret = pStitch->setViewPort(pose); pStitch->getViewPortTiles(); @@ -112,6 +129,7 @@ int32_t I360SCVP_unInit(void* p360SCVPHandle) if (!pStitch) return -1; ret = pStitch->uninit(); + SAFE_DELETE(pStitch); return ret; } @@ -131,6 +149,22 @@ int32_t I360SCVP_getFixedNumTiles(TileDef* pOutTile, Param_ViewportOutput* pPara return ret; } +int32_t I360SCVP_getTilesInViewport(TileDef* pOutTile, Param_ViewportOutput* pParamViewPortOutput, void* p360SCVPHandle) +{ + int32_t ret = 0; + TstitchStream* pStitch = (TstitchStream*)(p360SCVPHandle); + if (!pStitch || !pOutTile || !pParamViewPortOutput) + return 1; + ret = pStitch->getTilesInViewport(pOutTile); + pParamViewPortOutput->dstWidthAlignTile = pStitch->m_viewportDestWidth; + pParamViewPortOutput->dstHeightAlignTile = pStitch->m_viewportDestHeight; + pParamViewPortOutput->dstWidthNet = pStitch->m_dstWidthNet; + pParamViewPortOutput->dstHeightNet = pStitch->m_dstHeightNet; + pParamViewPortOutput->xTopLeftNet = pStitch->m_xTopLeftNet; + pParamViewPortOutput->yTopLeftNet = pStitch->m_yTopLeftNet; + return ret; +} + int32_t I360SCVP_ParseNAL(Nalu* pNALU, void* p360SCVPHandle) { TstitchStream* pStitch = (TstitchStream*)(p360SCVPHandle); @@ -214,6 +248,21 @@ int32_t I360SCVP_ParseRWPK(void* p360SCVPHandle, RegionWisePacking* pRWPK, uint8 return ret; } +// output is the centre and range of Azimuth and Elevation of 3D sphere +int32_t I360SCVP_getContentCoverage(void* p360SCVPHandle, CCDef* pOutCC) +{ + int32_t ret = 0; + TstitchStream* pStitch = (TstitchStream*)(p360SCVPHandle); + if (!pStitch) + return -1; + + if (!pOutCC) + return -1; + + ret = pStitch->getContentCoverage(pOutCC); + return ret; +} + int32_t I360SCVP_GetParameter(void* p360SCVPHandle, int32_t paramID, void** pValue) { int32_t ret = 0; @@ -289,3 +338,30 @@ int32_t I360SCVP_SetParameter(void* p360SCVPHandle, int32_t paramID, void* pValu } return ret; } + +int32_t I360SCVP_GetTilesByLegacyWay(TileDef* pOutTile, void* p360SCVPHandle) +{ + int32_t ret = 0; + TstitchStream* pStitch = (TstitchStream*)(p360SCVPHandle); + if (!pStitch) + return -1; + ret = pStitch->getTilesByLegacyWay(pOutTile); + return ret; +} + +int32_t I360SCVPSetLogCallBack(void* p360SCVPHandle, void* externalLog) +{ + TstitchStream* pStitch = (TstitchStream*)p360SCVPHandle; + if (!pStitch) + return 1; + + LogFunction logFunction = (LogFunction)externalLog; + if (!logFunction) + return OMAF_ERROR_NULL_PTR; + + int32_t ret = pStitch->SetLogCallBack(logFunction); + if (ret) + return ret; + + return ERROR_NONE; +} diff --git a/src/360SCVP/360SCVPBitstream.cpp b/src/360SCVP/360SCVPBitstream.cpp index 4fb7679e..6b16eb69 100644 --- a/src/360SCVP/360SCVPBitstream.cpp +++ b/src/360SCVP/360SCVPBitstream.cpp @@ -25,6 +25,9 @@ */ #include "360SCVPBitstream.h" #include "assert.h" +extern "C" { + #include "safestringlib/safe_mem_lib.h" +} void* gts_malloc(size_t size) { @@ -65,7 +68,7 @@ GTS_BitStream *gts_bs_new(const int8_t *buffer, uint64_t BufferSize, uint32_t mo tmp = (GTS_BitStream *)gts_malloc(sizeof(GTS_BitStream)); if (!tmp) return NULL; - memset(tmp, 0, sizeof(GTS_BitStream)); + memset_s(tmp, sizeof(GTS_BitStream), 0); tmp->original = (int8_t*)buffer; tmp->size = BufferSize; @@ -287,7 +290,7 @@ uint32_t gts_bs_read_data(GTS_BitStream *bs, int8_t *data, uint32_t nbBytes) case GTS_BITSTREAM_READ: case GTS_BITSTREAM_WRITE: case GTS_BITSTREAM_WRITE_DYN: - memcpy(data, bs->original + bs->position, nbBytes); + memcpy_s(data, nbBytes, bs->original + bs->position, nbBytes); bs->position += nbBytes; return nbBytes; default: @@ -425,7 +428,7 @@ uint32_t gts_bs_write_byte(GTS_BitStream *bs, uint8_t byte, uint32_t countLoop) totalSize = bs->position + countLoop; if (totalSize > bs->size) return 0; - memset(bs->original + bs->position, byte, countLoop); + memset_s(bs->original + bs->position, countLoop, byte); bs->position += countLoop; return countLoop; case GTS_BITSTREAM_WRITE_DYN: @@ -445,7 +448,7 @@ uint32_t gts_bs_write_byte(GTS_BitStream *bs, uint8_t byte, uint32_t countLoop) return 0; bs->size = dynSize; } - memset(bs->original + bs->position, byte, countLoop); + memset_s(bs->original + bs->position, countLoop, byte); bs->position += countLoop; return countLoop; case GTS_BITSTREAM_FILE_READ: @@ -488,7 +491,7 @@ uint32_t gts_bs_write_data(GTS_BitStream *bs, const int8_t *data, uint32_t nbByt return 0; bs->size = new_size; } - memcpy(bs->original + bs->position, data, nbBytes); + memcpy_s(bs->original + bs->position, nbBytes, data, nbBytes); bs->position += nbBytes; return nbBytes; case GTS_BITSTREAM_FILE_READ: @@ -507,10 +510,8 @@ uint32_t gts_bs_write_data(GTS_BitStream *bs, const int8_t *data, uint32_t nbByt bs->buffer_io_size = newSize; } } - memcpy(bs->buffer_io+bs->buffer_written, data, nbBytes); + memcpy_s(bs->buffer_io+bs->buffer_written, nbBytes, data, nbBytes); bs->buffer_written += nbBytes; - return nbBytes; - if (gts_fwrite(data, nbBytes, 1, bs->stream) != 1) return 0; if (bs->size == bs->position) bs->size += nbBytes; bs->position += nbBytes; @@ -518,7 +519,7 @@ uint32_t gts_bs_write_data(GTS_BitStream *bs, const int8_t *data, uint32_t nbByt case GTS_BITSTREAM_WRITE: if (bs->position + nbBytes > bs->size) return 0; - memcpy(bs->original + bs->position, data, nbBytes); + memcpy_s(bs->original + bs->position, nbBytes, data, nbBytes); bs->position += nbBytes; return nbBytes; default: diff --git a/src/360SCVP/360SCVPCommonDef.h b/src/360SCVP/360SCVPCommonDef.h index 7e89c998..30cbe310 100644 --- a/src/360SCVP/360SCVPCommonDef.h +++ b/src/360SCVP/360SCVPCommonDef.h @@ -28,6 +28,14 @@ #define __360SCVP_COMMONDEF__ #include "stdint.h" +extern "C" { + #include "safestringlib/safe_mem_lib.h" +} + +#define SAFE_DELETE(x) if(NULL != (x)) { delete (x); (x)=NULL; }; +#define SAFE_FREE(x) if(NULL != (x)) { free((x)); (x)=NULL; }; +#define SAFE_DELETE_ARRAY(x) if(NULL != (x)) { delete[] (x); (x)=NULL; }; + typedef enum SLICE_TYPE { SLICE_B = 0, SLICE_P = 1, diff --git a/src/360SCVP/360SCVPCubeMap.cpp b/src/360SCVP/360SCVPCubeMap.cpp index b79fd44c..f647e7e4 100644 --- a/src/360SCVP/360SCVPCubeMap.cpp +++ b/src/360SCVP/360SCVPCubeMap.cpp @@ -90,7 +90,7 @@ void CubeMap::map2DTo3D(SPos& IPosIn, SPos *pSPosOut) pSPosOut->z = -1.0; break; default: - assert(!"Error CubeMap::map2DTo3D()"); + assert(0 && "Error CubeMap::map2DTo3D()"); break; } } diff --git a/src/360SCVP/360SCVPGeometry.cpp b/src/360SCVP/360SCVPGeometry.cpp index 8a12b5b0..3ada4ffe 100644 --- a/src/360SCVP/360SCVPGeometry.cpp +++ b/src/360SCVP/360SCVPGeometry.cpp @@ -38,7 +38,7 @@ Geometry::Geometry() { - memset(&m_sVideoInfo, 0, sizeof(struct SVideoInfo)); + memset_s(&m_sVideoInfo, sizeof(struct SVideoInfo), 0); m_iMarginX = m_iMarginY =0; m_bPadded = false; @@ -61,8 +61,8 @@ void Geometry::geoInit(SVideoInfo& sVideoInfo) for (int32_t i = 0; i < FACE_NUMBER; i++) { pUpleftTmp->faceIdx = -1; - pUpleftTmp->x = 7680;// sVideoInfo.iFaceWidth; //7680 - pUpleftTmp->y = 3840;// sVideoInfo.iFaceHeight; //3840 + pUpleftTmp->x = sVideoInfo.fullWidth; + pUpleftTmp->y = sVideoInfo.fullHeight; pDownRightTmp->faceIdx = -1; pDownRightTmp->x = 0; pDownRightTmp->y = 0; @@ -77,8 +77,8 @@ void Geometry::geoInit(SVideoInfo& sVideoInfo) void Geometry::geoUnInit() { - delete[] m_upLeft; - delete[] m_downRight; + SAFE_DELETE_ARRAY(m_upLeft); + SAFE_DELETE_ARRAY(m_downRight); } diff --git a/src/360SCVP/360SCVPGeometry.h b/src/360SCVP/360SCVPGeometry.h index 714a2f84..914c6c94 100644 --- a/src/360SCVP/360SCVPGeometry.h +++ b/src/360SCVP/360SCVPGeometry.h @@ -62,8 +62,8 @@ struct SPos POSType x; POSType y; POSType z; - SPos() : faceIdx(0), x(0), y(0), z(0) {}; - SPos(int32_t f, POSType xIn, POSType yIn, POSType zIn ) : faceIdx(f), x(xIn), y(yIn), z(zIn) {}; + SPos() : faceIdx(0), x(0), y(0), z(0) {} + SPos(int32_t f, POSType xIn, POSType yIn, POSType zIn ) : faceIdx(f), x(xIn), y(yIn), z(zIn) {} }; struct GeometryRotation @@ -76,7 +76,7 @@ struct ViewPortSettings float vFOV; float fYaw; // float fPitch; - ViewPortSettings() : hFOV(0), vFOV(0), fYaw(0), fPitch(0) {}; + ViewPortSettings() : hFOV(0), vFOV(0), fYaw(0), fPitch(0) {} }; struct SVideoInfo @@ -88,6 +88,8 @@ struct SVideoInfo int32_t iFaceHeight; //native size int32_t iNumFaces; //geometry faces ViewPortSettings viewPort; + int32_t fullWidth; + int32_t fullHeight; }; class Geometry @@ -99,7 +101,7 @@ class Geometry bool m_bPadded; bool m_bGeometryMapping; bool m_bConvOutputPaddingNeeded; - inline int32_t round(POSType t) { return (int32_t)(t+ (t>=0? 0.5 :-0.5)); }; + inline int32_t round(POSType t) { return (int32_t)(t+ (t>=0? 0.5 :-0.5)); } void rotate3D(SPos& sPos, int32_t rx, int32_t ry, int32_t rz); public: int32_t m_numFaces; @@ -109,7 +111,7 @@ class Geometry virtual ~Geometry(); void geoInit(SVideoInfo& sVideoInfo); void geoUnInit(); // just use in the viewport - GeometryType getType() { return (GeometryType)m_sVideoInfo.geoType; }; + GeometryType getType() { return (GeometryType)m_sVideoInfo.geoType; } void setPaddingFlag(bool bFlag) { m_bPadded = bFlag; } virtual void map2DTo3D(SPos& IPosIn, SPos *pSPosOut) = 0; virtual void map3DTo2D(SPos *pSPosIn, SPos *pSPosOut) = 0; diff --git a/src/360SCVP/360SCVPHevcEncHdr.cpp b/src/360SCVP/360SCVPHevcEncHdr.cpp index 60f12d27..a9814431 100644 --- a/src/360SCVP/360SCVPHevcEncHdr.cpp +++ b/src/360SCVP/360SCVPHevcEncHdr.cpp @@ -755,6 +755,10 @@ uint32_t writeCubeProjectionSEINal(GTS_BitStream *bs, CubemapProjectionSEI& proj } pRegion++; } + gts_bs_write_int(bs, packing.numHiRegions, 8); + gts_bs_write_int(bs, packing.lowResPicWidth, 32); + gts_bs_write_int(bs, packing.lowResPicHeight, 32); + gts_bs_write_int(bs, packing.timeStamp, 32); } hevc_bitstream_add_rbsp_trailing_bits(bs); @@ -853,7 +857,10 @@ uint32_t writeCubeProjectionSEINal(GTS_BitStream *bs, CubemapProjectionSEI& proj packing.projPictureHeight = pRegion->projPicHeight; packing.packedPictureWidth = pRegion->packedPicWidth; packing.packedPictureHeight = pRegion->packedPicHeight; - + packing.numHiRegions = pRegion->numHiRegions; + packing.lowResPicWidth = pRegion->lowResPicWidth; + packing.lowResPicHeight = pRegion->lowResPicHeight; + packing.timeStamp = pRegion->timeStamp; RectangularRegionWisePacking* inputRegion = pRegion->rectRegionPacking; packing.regionsSize = pRegion->numRegions; packing.pRegions = new RegionStruct[pRegion->numRegions]; @@ -882,11 +889,7 @@ uint32_t writeCubeProjectionSEINal(GTS_BitStream *bs, CubemapProjectionSEI& proj uint32_t ret = writeRwpkSEINal(stream, packing, temporalIdPlus1); - if (packing.pRegions != NULL) - { - delete []packing.pRegions; - packing.pRegions = NULL; - } + SAFE_DELETE_ARRAY(packing.pRegions); return ret; } @@ -952,10 +955,6 @@ uint32_t writeCubeProjectionSEINal(GTS_BitStream *bs, CubemapProjectionSEI& proj pViewportInput++; } uint32_t ret = writeRotationSEINal(stream, viewPort, temporalIdPlus1); - if (viewPort.pViewports) - { - delete []viewPort.pViewports; - viewPort.pViewports = NULL; - } + SAFE_DELETE_ARRAY(viewPort.pViewports); return ret; } diff --git a/src/360SCVP/360SCVPHevcParser.cpp b/src/360SCVP/360SCVPHevcParser.cpp index d963e1c4..2fdc07b9 100644 --- a/src/360SCVP/360SCVPHevcParser.cpp +++ b/src/360SCVP/360SCVPHevcParser.cpp @@ -82,39 +82,15 @@ uint32_t gts_media_nalu_remove_emulation_bytes(const int8_t *src_buffer, int8_t } -static uint8_t digits_of_agm[256] = { - 8, 7, 6, 6, 5, 5, 5, 5, - 4, 4, 4, 4, 4, 4, 4, 4, - 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0 +static uint8_t digits_of_agm[128] = { + 8, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; static uint32_t bs_get_ue(GTS_BitStream *gts_bitstream) @@ -132,7 +108,10 @@ static uint32_t bs_get_ue(GTS_BitStream *gts_bitstream) gts_bs_read_int(gts_bitstream, 8); data += 8; } - flag_c = digits_of_agm[flag_r]; + if (flag_r < 128) + flag_c = digits_of_agm[flag_r]; + else + flag_c = 0; gts_bs_read_int(gts_bitstream, flag_c); data += flag_c; return gts_bs_read_int(gts_bitstream, data + 1) - 1; @@ -265,34 +244,34 @@ static bool parse_short_term_ref_pic_set(GTS_BitStream *bs, HEVC_SPS *sps, HEVCS si->used_by_curr_pic_s0_flag[i] = (bool)gts_bs_read_int(bs, 1); } for (i=0; irps[idx_rps].num_positive_pics; i++) { - uint32_t delta_poc_s1_minus1 = bs_get_ue(bs); - poc = prev + delta_poc_s1_minus1 + 1; + uint32_t deltaPocS1 = 0; + deltaPocS1 = bs_get_ue(bs) + 1; + poc = prev + deltaPocS1; prev = poc; si->rps[idx_rps].delta_poc[i] = poc; - /*used_by_curr_pic_s1_flag[ i ] = */gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); } } return true; } -void hevc_pred_weight_table(GTS_BitStream *bs, HEVCState *hevc, HEVCSliceInfo *si, HEVC_PPS *pps, HEVC_SPS *sps, uint32_t num_ref_idx_l0_active, uint32_t num_ref_idx_l1_active) +void hevc_PredWeightTable(GTS_BitStream *bs, HEVCState *hevc, HEVCSliceInfo *si, HEVC_PPS *pps, HEVC_SPS *sps, uint32_t num_ref_idx_l0_active, uint32_t num_ref_idx_l1_active) { - uint32_t i, num_ref_idx; + uint32_t i = 0; bool first_pass=true; uint8_t luma_weights[20], chroma_weights[20]; - uint32_t ChromaArrayType = sps->separate_colour_plane_flag ? 0 : sps->chroma_format_idc; - - num_ref_idx = num_ref_idx_l0_active; - - /*luma_log2_weight_denom=*/i=bs_get_ue(bs); + uint32_t ChromaArrayType = 0; + uint32_t num_ref_idx = num_ref_idx_l0_active; + if (!sps->separate_colour_plane_flag) + ChromaArrayType = sps->chroma_format_idc; + i=bs_get_ue(bs); if (ChromaArrayType != 0) - /*delta_chroma_log2_weight_denom=*/i=bs_get_se(bs); + i=bs_get_se(bs); parse_weights: for (i=0; islice_type == GF_HEVC_SLICE_TYPE_B) { if (!first_pass) return; first_pass=false; @@ -520,7 +498,7 @@ static int32_t hevc_parse_slice_segment(GTS_BitStream *gts_bitstream, HEVCState if ( (hevc_pps->weighted_pred_flag && slice_info->slice_type == GF_HEVC_SLICE_TYPE_P ) || ( hevc_pps->weighted_bipred_flag && slice_info->slice_type == GF_HEVC_SLICE_TYPE_B) ) { - hevc_pred_weight_table(gts_bitstream, state, slice_info, hevc_pps, hevc_sps, count_index_reference_0, count_index_reference_1); + hevc_PredWeightTable(gts_bitstream, state, slice_info, hevc_pps, hevc_sps, count_index_reference_0, count_index_reference_1); } /*five_minus_max_num_merge_cand=*/bs_get_ue(gts_bitstream); } @@ -706,250 +684,264 @@ static bool hevc_parse_nal_header(GTS_BitStream *bs, uint8_t *nal_unit_type, uin return true; } - -void profile_tier_level(GTS_BitStream *bs, bool ProfilePresentFlag, uint8_t MaxNumSubLayersMinus1, HEVC_ProfileTierLevel *ptl) +void sub_profile_flag(GTS_BitStream *bs, bool ProfilePresentFlag, HEVC_ProfileTierLevel *pPTLHEVC) { - uint32_t i; - if (ProfilePresentFlag) { - ptl->profile_space = gts_bs_read_int(bs, 2); - ptl->tier_flag = gts_bs_read_int(bs, 1); - ptl->profile_idc = gts_bs_read_int(bs, 5); + if (!ProfilePresentFlag) + { + pPTLHEVC->level_idc = gts_bs_read_int(bs, 8); + } + else + { + pPTLHEVC->profile_space = gts_bs_read_int(bs, 2); + pPTLHEVC->tier_flag = gts_bs_read_int(bs, 1); + pPTLHEVC->profile_idc = gts_bs_read_int(bs, 5); + pPTLHEVC->profile_compatibility_flag = gts_bs_read_int(bs, 32); + pPTLHEVC->general_progressive_source_flag = (bool)gts_bs_read_int(bs, 1); + pPTLHEVC->general_interlaced_source_flag = (bool)gts_bs_read_int(bs, 1); + pPTLHEVC->general_non_packed_constraint_flag = (bool)gts_bs_read_int(bs, 1); + pPTLHEVC->general_frame_only_constraint_flag = (bool)gts_bs_read_int(bs, 1); + pPTLHEVC->general_reserved_44bits = gts_bs_read_long_int(bs, 44); + pPTLHEVC->level_idc = gts_bs_read_int(bs, 8); + } +} - ptl->profile_compatibility_flag = gts_bs_read_int(bs, 32); +void HEVC_profiletierevel(GTS_BitStream *bs, bool ProfilePresentFlag, uint8_t MaxNumSubLayersMinus1, HEVC_ProfileTierLevel *pPTLHEVC) +{ + uint32_t i = 0; + uint8_t MaxNum = MaxNumSubLayersMinus1; + sub_profile_flag(bs, ProfilePresentFlag, pPTLHEVC); - ptl->general_progressive_source_flag = (bool)gts_bs_read_int(bs, 1); - ptl->general_interlaced_source_flag = (bool)gts_bs_read_int(bs, 1); - ptl->general_non_packed_constraint_flag = (bool)gts_bs_read_int(bs, 1); - ptl->general_frame_only_constraint_flag = (bool)gts_bs_read_int(bs, 1); - ptl->general_reserved_44bits = gts_bs_read_long_int(bs, 44); - } - ptl->level_idc = gts_bs_read_int(bs, 8); - for (i=0; isub_ptl[i].profile_present_flag = (bool)gts_bs_read_int(bs, 1); - ptl->sub_ptl[i].level_present_flag = (bool)gts_bs_read_int(bs, 1); + for (i=0; i< MaxNum; i++) { + pPTLHEVC->sub_ptl[i].profile_present_flag = (bool)gts_bs_read_int(bs, 1); + pPTLHEVC->sub_ptl[i].level_present_flag = (bool)gts_bs_read_int(bs, 1); } - if (MaxNumSubLayersMinus1>0) { - for (i=MaxNumSubLayersMinus1; i<8; i++) { - /*reserved_zero_2bits*/gts_bs_read_int(bs, 2); + if (MaxNum >0) { + for (i= MaxNum; i<8; i++) { + gts_bs_read_int(bs, 2); } } - for (i=0; isub_ptl[i].profile_present_flag) { - ptl->sub_ptl[i].profile_space = gts_bs_read_int(bs, 2); - ptl->sub_ptl[i].tier_flag = (bool)gts_bs_read_int(bs, 1); - ptl->sub_ptl[i].profile_idc = gts_bs_read_int(bs, 5); - ptl->sub_ptl[i].profile_compatibility_flag = gts_bs_read_int(bs, 32); - /*ptl->sub_ptl[i].progressive_source_flag =*/ gts_bs_read_int(bs, 1); - /*ptl->sub_ptl[i].interlaced_source_flag =*/ gts_bs_read_int(bs, 1); - /*ptl->sub_ptl[i].non_packed_constraint_flag =*/ gts_bs_read_int(bs, 1); - /*ptl->sub_ptl[i].frame_only_constraint_flag =*/ gts_bs_read_int(bs, 1); - /*ptl->sub_ptl[i].reserved_44bits =*/ gts_bs_read_long_int(bs, 44); + for (i=0; i< MaxNum; i++) { + if (pPTLHEVC->sub_ptl[i].profile_present_flag) { + pPTLHEVC->sub_ptl[i].profile_space = gts_bs_read_int(bs, 2); + pPTLHEVC->sub_ptl[i].tier_flag = (bool)gts_bs_read_int(bs, 1); + pPTLHEVC->sub_ptl[i].profile_idc = gts_bs_read_int(bs, 5); + pPTLHEVC->sub_ptl[i].profile_compatibility_flag = gts_bs_read_int(bs, 32); + gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); + gts_bs_read_long_int(bs, 44); } - if (ptl->sub_ptl[i].level_present_flag) - ptl->sub_ptl[i].level_idc = gts_bs_read_int(bs, 8); + if (pPTLHEVC->sub_ptl[i].level_present_flag) + pPTLHEVC->sub_ptl[i].level_idc = gts_bs_read_int(bs, 8); } } static uint32_t scalability_type_to_idx(HEVC_VPS *vps, uint32_t scalability_type) { - uint32_t idx = 0, type; - for (type=0; type < scalability_type; type++) { - idx += (vps->scalability_mask[type] ? 1 : 0 ); + uint32_t ret = 0; + uint32_t type = 0; + uint32_t value = 0; + for (; type < scalability_type; type++) { + if (vps->scalability_mask[type]) + value = 1; + else + value = 0; + ret += value; } - return idx; + return ret; } -#define LHVC_VIEW_ORDER_INDEX 1 -#define LHVC_SCALABILITY_INDEX 2 +#define VIEW_ORDER_INDEX 1 -static uint32_t lhvc_get_scalability_id(HEVC_VPS *vps, uint32_t layer_id_in_vps, uint32_t scalability_type ) +static uint32_t get_scalability_id(HEVC_VPS *vps, uint32_t layer_id_in_vps, uint32_t scalability_type ) { - uint32_t idx; - if (!vps->scalability_mask[scalability_type]) return 0; + uint32_t idx = 0; + uint32_t ret = 0; + if (!vps->scalability_mask[scalability_type]) + return ret; idx = scalability_type_to_idx(vps, scalability_type); - return vps->dimension_id[layer_id_in_vps][idx]; -} - -static uint32_t lhvc_get_view_index(HEVC_VPS *vps, uint32_t id) -{ - return lhvc_get_scalability_id(vps, vps->layer_id_in_vps[id], LHVC_VIEW_ORDER_INDEX); + ret = vps->dimension_id[layer_id_in_vps][idx]; + return ret; } -static uint32_t lhvc_get_num_views(HEVC_VPS *vps) +static uint32_t get_num_views(HEVC_VPS *vps) { uint32_t numViews = 1, i; for (i=0; imax_layers; i++ ) { uint32_t layer_id = vps->layer_id_in_nuh[i]; - if (i>0 && ( lhvc_get_view_index( vps, layer_id) != lhvc_get_scalability_id( vps, i-1, LHVC_VIEW_ORDER_INDEX) )) { + uint32_t viewIndex = get_scalability_id(vps, vps->layer_id_in_vps[layer_id], VIEW_ORDER_INDEX); + if (i>0 && (viewIndex != get_scalability_id( vps, i-1, VIEW_ORDER_INDEX) )) { numViews++; } } return numViews; } -static void lhvc_parse_rep_format(HEVC_RepFormat *fmt, GTS_BitStream *bs) +static void parse_rep_format(HEVC_RepFormat *pRepFormatHevc, GTS_BitStream *bs) { + if (!pRepFormatHevc || !bs) + return; uint8_t chroma_bitdepth_present_flag = 0; - fmt->pic_width_luma_samples = gts_bs_read_int(bs, 16); - fmt->pic_height_luma_samples = gts_bs_read_int(bs, 16); + pRepFormatHevc->pic_width_luma_samples = gts_bs_read_int(bs, 16); + pRepFormatHevc->pic_height_luma_samples = gts_bs_read_int(bs, 16); chroma_bitdepth_present_flag = gts_bs_read_int(bs, 1); if (chroma_bitdepth_present_flag) { - fmt->chroma_format_idc = gts_bs_read_int(bs, 2); + pRepFormatHevc->chroma_format_idc = gts_bs_read_int(bs, 2); - if (fmt->chroma_format_idc == 3) - fmt->separate_colour_plane_flag = gts_bs_read_int(bs, 1); - fmt->bit_depth_luma = 8 + gts_bs_read_int(bs, 4); - fmt->bit_depth_chroma = 8 + gts_bs_read_int(bs, 4); + if (pRepFormatHevc->chroma_format_idc == 3) + pRepFormatHevc->separate_colour_plane_flag = gts_bs_read_int(bs, 1); + pRepFormatHevc->bit_depth_luma = 8 + gts_bs_read_int(bs, 4); + pRepFormatHevc->bit_depth_chroma = 8 + gts_bs_read_int(bs, 4); } - if (/*conformance_window_vps_flag*/ gts_bs_read_int(bs, 1)) { - /*conf_win_vps_left_offset*/bs_get_ue(bs); - /*conf_win_vps_right_offset*/bs_get_ue(bs); - /*conf_win_vps_top_offset*/bs_get_ue(bs); - /*conf_win_vps_bottom_offset*/bs_get_ue(bs); + if (gts_bs_read_int(bs, 1)) { + bs_get_ue(bs); + bs_get_ue(bs); + bs_get_ue(bs); + bs_get_ue(bs); } } -static bool hevc_parse_vps_extension(HEVC_VPS *vps, GTS_BitStream *bs) +static bool hevc_parseVPSExt(HEVC_VPS *pVPSHevc, GTS_BitStream *bs) { + bool OutputLayerFlag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; uint8_t splitting_flag, vps_nuh_layer_id_present_flag, view_id_len; - uint32_t i, j, num_scalability_types, num_add_olss, num_add_layer_set, num_indepentdent_layers, nb_bits, default_output_layer_idc=0; uint8_t dimension_id_len[16], dim_bit_offset[16]; - uint8_t /*avc_base_layer_flag, */NumLayerSets, /*default_one_target_output_layer_flag, */rep_format_idx_present_flag, ols_ids_to_ls_idx; - uint8_t layer_set_idx_for_ols_minus1[MAX_LHVC_LAYERS]; + uint8_t rep_format_idx_present_flag; + uint8_t idxolsIds; + uint8_t layerSetsNum; + uint8_t layerSetIdxForOlsMinus1[MAX_LHVC_LAYERS]; uint8_t nb_output_layers_in_output_layer_set[MAX_LHVC_LAYERS+1]; uint8_t ols_highest_output_layer_id[MAX_LHVC_LAYERS+1]; - - uint32_t k,d, r, p, iNuhLId, jNuhLId; uint8_t num_direct_ref_layers[64], num_pred_layers[64], num_layers_in_tree_partition[MAX_LHVC_LAYERS]; - uint8_t dependency_flag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS], id_pred_layers[64][MAX_LHVC_LAYERS]; -// uint8_t num_ref_layers[64]; -// uint8_t tree_partition_layer_id[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; -// uint8_t id_ref_layers[64][MAX_LHVC_LAYERS]; -// uint8_t id_direct_ref_layers[64][MAX_LHVC_LAYERS]; + uint8_t flagDepend[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS], id_pred_layers[64][MAX_LHVC_LAYERS]; uint8_t layer_id_in_list_flag[64]; - bool OutputLayerFlag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; - - vps->vps_extension_found=(bool)1; - if ((vps->max_layers > 1) && vps->base_layer_internal_flag) - profile_tier_level(bs, (bool)0, vps->max_sub_layers-1, &vps->ext_ptl[0]); + uint32_t i, j, num_add_layer_set, num_indepentdent_layers, nb_bits; + uint32_t numScalTypes = 0;; + uint32_t maxLayers = pVPSHevc->max_layers; + uint32_t k, r, iNuhLId, jNuhLId; + uint32_t direct; + uint32_t pred; + uint32_t independ; + uint32_t addOlssNum = 0; + uint32_t outputLayerIDC = 0; + uint32_t loopCnt = 0; + + pVPSHevc->vps_extension_found=(bool)1; + if ((maxLayers > 1) && pVPSHevc->base_layer_internal_flag) + HEVC_profiletierevel(bs, (bool)0, pVPSHevc->max_sub_layers-1, &pVPSHevc->ext_ptl[0]); splitting_flag = gts_bs_read_int(bs, 1); - num_scalability_types = 0; for (i=0; i<16; i++) { - vps->scalability_mask[i] = gts_bs_read_int(bs, 1); - num_scalability_types += vps->scalability_mask[i]; + pVPSHevc->scalability_mask[i] = gts_bs_read_int(bs, 1); + numScalTypes += pVPSHevc->scalability_mask[i]; } - if (num_scalability_types>=16) { - num_scalability_types=16; + if (numScalTypes >=16) { + numScalTypes =16; } dimension_id_len[0] = 0; - for (i=0; i<(num_scalability_types - splitting_flag); i++) { + for (i=0; i<(numScalTypes - splitting_flag); i++) { dimension_id_len[i] = 1 + gts_bs_read_int(bs, 3); } if (splitting_flag) { - for (i = 0; i < num_scalability_types; i++) { + for (i = 0; i < numScalTypes; i++) { dim_bit_offset[i] = 0; for (j = 0; j < i; j++) dim_bit_offset[i] += dimension_id_len[j]; } - dimension_id_len[num_scalability_types-1] = 1 + (5 - dim_bit_offset[num_scalability_types-1]); - dim_bit_offset[num_scalability_types] = 6; + dimension_id_len[numScalTypes -1] = 1 + (5 - dim_bit_offset[numScalTypes -1]); + dim_bit_offset[numScalTypes] = 6; } vps_nuh_layer_id_present_flag = gts_bs_read_int(bs, 1); - vps->layer_id_in_nuh[0] = 0; - vps->layer_id_in_vps[0] = 0; - for (i=1; imax_layers; i++) { + pVPSHevc->layer_id_in_nuh[0] = 0; + pVPSHevc->layer_id_in_vps[0] = 0; + for (i=1; ilayer_id_in_nuh[i] = gts_bs_read_int(bs, 6); + pVPSHevc->layer_id_in_nuh[i] = gts_bs_read_int(bs, 6); } else { - vps->layer_id_in_nuh[i] = i; + pVPSHevc->layer_id_in_nuh[i] = i; } - vps->layer_id_in_vps[vps->layer_id_in_nuh[i]] = i; + pVPSHevc->layer_id_in_vps[pVPSHevc->layer_id_in_nuh[i]] = i; if (!splitting_flag) { - for (j=0; jdimension_id[i][j] = gts_bs_read_int(bs, dimension_id_len[j]); + for (j=0; j< numScalTypes; j++) { + pVPSHevc->dimension_id[i][j] = gts_bs_read_int(bs, dimension_id_len[j]); } } } if (splitting_flag) { - for (i = 0; imax_layers; i++) - for (j=0; jdimension_id[i][j] = ((vps->layer_id_in_nuh[i] & ((1 << dim_bit_offset[j+1]) -1)) >> dim_bit_offset[j]); + for (i = 0; idimension_id[i][j] = ((pVPSHevc->layer_id_in_nuh[i] & ((1 << dim_bit_offset[j+1]) -1)) >> dim_bit_offset[j]); } else { - for (j=0; jdimension_id[0][j] = 0; + for (j=0; j< numScalTypes; j++) + pVPSHevc->dimension_id[0][j] = 0; } view_id_len = gts_bs_read_int(bs, 4); if (view_id_len > 0) { - for( i = 0; i < lhvc_get_num_views(vps); i++ ) { - /*m_viewIdVal[i] = */ gts_bs_read_int(bs, view_id_len); + for( i = 0; i < get_num_views(pVPSHevc); i++ ) { + gts_bs_read_int(bs, view_id_len); } } - for (i=1; imax_layers; i++) { + for (i=1; idirect_dependency_flag[i][j] = gts_bs_read_int(bs, 1); + pVPSHevc->direct_dependency_flag[i][j] = gts_bs_read_int(bs, 1); } } - - //we do the test on MAX_LHVC_LAYERS and break in the loop to avoid a wrong GCC 4.8 warning on array bounds for (i = 0; i < MAX_LHVC_LAYERS; i++) { - if (i >= vps->max_layers) break; - for (j = 0; j < vps->max_layers; j++) { - dependency_flag[i][j] = vps->direct_dependency_flag[i][j]; + if (i >= maxLayers) break; + for (j = 0; j < maxLayers; j++) { + flagDepend[i][j] = pVPSHevc->direct_dependency_flag[i][j]; for (k = 0; k < i; k++) - if (vps->direct_dependency_flag[i][k] && vps->direct_dependency_flag[k][j]) - dependency_flag[i][j] = 1; + if (pVPSHevc->direct_dependency_flag[i][k] && pVPSHevc->direct_dependency_flag[k][j]) + flagDepend[i][j] = 1; } } - for (i = 0; i < vps->max_layers; i++) { - iNuhLId = vps->layer_id_in_nuh[i]; - d = r = p = 0; - for (j = 0; j < vps->max_layers; j++) { - jNuhLId = vps->layer_id_in_nuh[j]; - if (vps->direct_dependency_flag[i][j]) { -// id_direct_ref_layers[iNuhLId][d] = jNuhLId; - d++; + for (i = 0; i < maxLayers; i++) { + iNuhLId = pVPSHevc->layer_id_in_nuh[i]; + direct = 0; + pred = 0; + r = 0; + for (j = 0; j < maxLayers; j++) { + jNuhLId = pVPSHevc->layer_id_in_nuh[j]; + if (pVPSHevc->direct_dependency_flag[i][j]) { + direct++; } - if (dependency_flag[i][j]) { -// id_ref_layers[iNuhLId][r] = jNuhLId; + if (flagDepend[i][j]) { r++; } - if (dependency_flag[j][i]) - id_pred_layers[iNuhLId][p++] = jNuhLId; + if (flagDepend[j][i]) + id_pred_layers[iNuhLId][pred++] = jNuhLId; } - num_direct_ref_layers[iNuhLId] = d; -// num_ref_layers[iNuhLId] = r; - num_pred_layers[iNuhLId] = p; + num_direct_ref_layers[iNuhLId] = direct; + num_pred_layers[iNuhLId] = pred; } - memset(layer_id_in_list_flag, 0, 64*sizeof(uint8_t)); - k = 0; //num_indepentdent_layers - for (i = 0; i < vps->max_layers; i++) { - iNuhLId = vps->layer_id_in_nuh[i]; + memset_s(layer_id_in_list_flag, 64*sizeof(uint8_t), 0); + independ = 0; + for (i = 0; i < maxLayers; i++) { + iNuhLId = pVPSHevc->layer_id_in_nuh[i]; if (!num_direct_ref_layers[iNuhLId]) { uint32_t h = 1; - //tree_partition_layer_id[k][0] = iNuhLId; for (j = 0; j < num_pred_layers[iNuhLId]; j++) { uint32_t predLId = id_pred_layers[iNuhLId][j]; if (!layer_id_in_list_flag[predLId]) { - //tree_partition_layer_id[k][h++] = predLId; layer_id_in_list_flag[predLId] = 1; } } - num_layers_in_tree_partition[k++] = h; + num_layers_in_tree_partition[independ++] = h; } } - num_indepentdent_layers = k; + num_indepentdent_layers = independ; num_add_layer_set = 0; if (num_indepentdent_layers > 1) @@ -960,234 +952,237 @@ static bool hevc_parse_vps_extension(HEVC_VPS *vps, GTS_BitStream *bs) nb_bits =1; while ((1 << nb_bits) < (num_layers_in_tree_partition[j] + 1)) nb_bits++; - /*highest_layer_idx_plus1[i][j]*/gts_bs_read_int(bs, nb_bits); + gts_bs_read_int(bs, nb_bits); } - if (/*vps_sub_layers_max_minus1_present_flag*/gts_bs_read_int(bs, 1)) { - for (i = 0; i < vps->max_layers; i++) { - /*sub_layers_vps_max_minus1[ i ]*/gts_bs_read_int(bs, 3); + if (gts_bs_read_int(bs, 1)) { + for (i = 0; i < maxLayers; i++) { + gts_bs_read_int(bs, 3); } } - if (/*max_tid_ref_present_flag = */gts_bs_read_int(bs, 1)) { - for (i=0; i<(vps->max_layers-1) ; i++) { - for (j= i+1; j < vps->max_layers; j++) { - if (vps->direct_dependency_flag[j][i]) - /*max_tid_il_ref_pics_plus1[ i ][ j ]*/gts_bs_read_int(bs, 3); + if (gts_bs_read_int(bs, 1)) { + for (i=0; i<(maxLayers-1) ; i++) { + for (j= i+1; j < maxLayers; j++) { + if (pVPSHevc->direct_dependency_flag[j][i]) + gts_bs_read_int(bs, 3); } } } - /*default_ref_layers_active_flag*/gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); - vps->num_profile_tier_level = 1+bs_get_ue(bs); - if (vps->num_profile_tier_level > MAX_LHVC_LAYERS) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] Wrong number of PTLs in VPS %d\n", vps->num_profile_tier_level)); - vps->num_profile_tier_level=1; + pVPSHevc->num_profile_tier_level = 1+bs_get_ue(bs); + if (pVPSHevc->num_profile_tier_level > MAX_LHVC_LAYERS) { + pVPSHevc->num_profile_tier_level=1; return false; } - for (i=vps->base_layer_internal_flag ? 2 : 1; i < vps->num_profile_tier_level; i++) { - bool vps_profile_present_flag = (bool)gts_bs_read_int(bs, 1); - profile_tier_level(bs, vps_profile_present_flag, vps->max_sub_layers-1, &vps->ext_ptl[i-1] ); + i = 1; + if (pVPSHevc->base_layer_internal_flag) + i = 2; + for (; i < pVPSHevc->num_profile_tier_level; i++) { + bool flagProfileVPS = (bool)gts_bs_read_int(bs, 1); + HEVC_profiletierevel(bs, flagProfileVPS, pVPSHevc->max_sub_layers-1, &pVPSHevc->ext_ptl[i-1] ); } + addOlssNum = 0; + layerSetsNum = pVPSHevc->num_layer_sets + num_add_layer_set; - NumLayerSets = vps->num_layer_sets + num_add_layer_set; - num_add_olss = 0; - - if (NumLayerSets > 1) { - num_add_olss = bs_get_ue(bs); - default_output_layer_idc = gts_bs_read_int(bs,2); - default_output_layer_idc = default_output_layer_idc < 2 ? default_output_layer_idc : 2; + if (layerSetsNum > 1) { + addOlssNum = bs_get_ue(bs); + outputLayerIDC = gts_bs_read_int(bs,2); + if (outputLayerIDC > 2) + outputLayerIDC = 2; } - vps->num_output_layer_sets = num_add_olss + NumLayerSets; - - - layer_set_idx_for_ols_minus1[0] = 1; - vps->output_layer_flag[0][0] = (bool)1; + pVPSHevc->num_output_layer_sets = addOlssNum + layerSetsNum; + layerSetIdxForOlsMinus1[0] = 1; + pVPSHevc->output_layer_flag[0][0] = (bool)1; - for (i = 0; i < vps->num_output_layer_sets; i++) { - if ((NumLayerSets > 2) && (i >= NumLayerSets)) { + for (i = 0; i < pVPSHevc->num_output_layer_sets; i++) { + if ((layerSetsNum > 2) && (i >= layerSetsNum)) { nb_bits = 1; - while ((1 << nb_bits) < (NumLayerSets - 1)) + while ((1 << nb_bits) < (layerSetsNum - 1)) nb_bits++; - layer_set_idx_for_ols_minus1[i] = gts_bs_read_int(bs, nb_bits); + layerSetIdxForOlsMinus1[i] = gts_bs_read_int(bs, nb_bits); } else - layer_set_idx_for_ols_minus1[i] = 0; - ols_ids_to_ls_idx = i < NumLayerSets ? i : layer_set_idx_for_ols_minus1[i] + 1; - - if ((i > (vps->num_layer_sets - 1)) || (default_output_layer_idc == 2)) { - for (j = 0; j < vps->num_layers_in_id_list[ols_ids_to_ls_idx]; j++) - vps->output_layer_flag[i][j] = (bool)gts_bs_read_int(bs, 1); + layerSetIdxForOlsMinus1[i] = 0; + idxolsIds = i < layerSetsNum ? i : layerSetIdxForOlsMinus1[i] + 1; + loopCnt = pVPSHevc->num_layers_in_id_list[idxolsIds]; + if ((i > (pVPSHevc->num_layer_sets - 1)) || (outputLayerIDC == 2)) { + for (j = 0; j < loopCnt; j++) + pVPSHevc->output_layer_flag[i][j] = (bool)gts_bs_read_int(bs, 1); } - if ((default_output_layer_idc == 0) || (default_output_layer_idc == 1)) { - for (j = 0; j < vps->num_layers_in_id_list[ols_ids_to_ls_idx]; j++) { - if ((default_output_layer_idc == 0) || (vps->LayerSetLayerIdList[i][j] == vps->LayerSetLayerIdListMax[i])) + if ((outputLayerIDC == 0) || (outputLayerIDC == 1)) { + for (j = 0; j < loopCnt; j++) { + if ((outputLayerIDC == 0) || (pVPSHevc->LayerSetLayerIdList[i][j] == pVPSHevc->LayerSetLayerIdListMax[i])) OutputLayerFlag[i][j] = true; else OutputLayerFlag[i][j] = false; } } - - for (j = 0; j < vps->num_layers_in_id_list[ols_ids_to_ls_idx]; j++) { + for (j = 0; j < loopCnt; j++) { if (OutputLayerFlag[i][j]) { - uint32_t curLayerID, k; - vps->necessary_layers_flag[i][j] = true; - curLayerID = vps->LayerSetLayerIdList[i][j]; + uint32_t layeridCur; + uint32_t k; + pVPSHevc->necessary_layers_flag[i][j] = true; + layeridCur = pVPSHevc->LayerSetLayerIdList[i][j]; for (k = 0; k < j; k++) { - uint32_t refLayerId = vps->LayerSetLayerIdList[i][k]; - if (dependency_flag[vps->layer_id_in_vps[curLayerID]][vps->layer_id_in_vps[refLayerId]]) - vps->necessary_layers_flag[i][k] = true; + uint32_t LayerIdref = pVPSHevc->LayerSetLayerIdList[i][k]; + if (flagDepend[pVPSHevc->layer_id_in_vps[layeridCur]][pVPSHevc->layer_id_in_vps[LayerIdref]]) + pVPSHevc->necessary_layers_flag[i][k] = true; } } } - vps->num_necessary_layers[i] = 0; - for (j = 0; j < vps->num_layers_in_id_list[ols_ids_to_ls_idx]; j++) { - if (vps->necessary_layers_flag[i][j]) - vps->num_necessary_layers[i] += 1; + pVPSHevc->num_necessary_layers[i] = 0; + for (j = 0; j < loopCnt; j++) { + if (pVPSHevc->necessary_layers_flag[i][j]) + pVPSHevc->num_necessary_layers[i] += 1; } if (i == 0) { - if (vps->base_layer_internal_flag) { - if (vps->max_layers > 1) - vps->profile_tier_level_idx[0][0] = 1; + if (pVPSHevc->base_layer_internal_flag) { + if (maxLayers > 1) + pVPSHevc->profile_tier_level_idx[0][0] = 1; else - vps->profile_tier_level_idx[0][0] = 0; + pVPSHevc->profile_tier_level_idx[0][0] = 0; } continue; } nb_bits = 1; - while ((uint32_t)(1 << nb_bits) < vps->num_profile_tier_level) + while ((uint32_t)(1 << nb_bits) < pVPSHevc->num_profile_tier_level) nb_bits++; - for (j = 0; j < vps->num_layers_in_id_list[ols_ids_to_ls_idx]; j++) - if (vps->necessary_layers_flag[i][j] && vps->num_profile_tier_level) - vps->profile_tier_level_idx[i][j] = gts_bs_read_int(bs, nb_bits); + for (j = 0; j < loopCnt; j++) + if (pVPSHevc->necessary_layers_flag[i][j] && pVPSHevc->num_profile_tier_level) + pVPSHevc->profile_tier_level_idx[i][j] = gts_bs_read_int(bs, nb_bits); else - vps->profile_tier_level_idx[i][j] = 0; + pVPSHevc->profile_tier_level_idx[i][j] = 0; nb_output_layers_in_output_layer_set[i] = 0; ols_highest_output_layer_id[i] = 0; - for (j = 0; j < vps->num_layers_in_id_list[ols_ids_to_ls_idx]; j++) { + for (j = 0; j < loopCnt; j++) { nb_output_layers_in_output_layer_set[i] += OutputLayerFlag[i][j]; if (OutputLayerFlag[i][j]) { - ols_highest_output_layer_id[i] = vps->LayerSetLayerIdList[ols_ids_to_ls_idx][j]; + ols_highest_output_layer_id[i] = pVPSHevc->LayerSetLayerIdList[idxolsIds][j]; } } if (nb_output_layers_in_output_layer_set[i] == 1 && ols_highest_output_layer_id[i] > 0) - vps->alt_output_layer_flag[i] = (bool)gts_bs_read_int(bs, 1); + pVPSHevc->alt_output_layer_flag[i] = (bool)gts_bs_read_int(bs, 1); } - vps->num_rep_formats = 1 + bs_get_ue(bs); - if (vps->num_rep_formats > 16) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] Wrong number of rep formats in VPS %d\n", vps->num_rep_formats)); - vps->num_rep_formats = 0; + pVPSHevc->num_rep_formats = 1 + bs_get_ue(bs); + if (pVPSHevc->num_rep_formats > 16) { + pVPSHevc->num_rep_formats = 0; return false; } - - for (i = 0; i < vps->num_rep_formats; i++) { - lhvc_parse_rep_format(&vps->rep_formats[i], bs); + for (i = 0; i < pVPSHevc->num_rep_formats; i++) { + parse_rep_format(&pVPSHevc->rep_formats[i], bs); } - if (vps->num_rep_formats > 1) + if (pVPSHevc->num_rep_formats > 1) rep_format_idx_present_flag = gts_bs_read_int(bs, 1); else rep_format_idx_present_flag = 0; - vps->rep_format_idx[0] = 0; + pVPSHevc->rep_format_idx[0] = 0; nb_bits = 1; - while ((uint32_t)(1 << nb_bits) < vps->num_rep_formats) + while ((uint32_t)(1 << nb_bits) < pVPSHevc->num_rep_formats) nb_bits++; - for (i = vps->base_layer_internal_flag ? 1 : 0; i < vps->max_layers; i++) { + for (i = pVPSHevc->base_layer_internal_flag ? 1 : 0; i < maxLayers; i++) { if (rep_format_idx_present_flag) { - vps->rep_format_idx[i] = gts_bs_read_int(bs, nb_bits); + pVPSHevc->rep_format_idx[i] = gts_bs_read_int(bs, nb_bits); } else { - vps->rep_format_idx[i] = i < vps->num_rep_formats - 1 ? i : vps->num_rep_formats - 1; + pVPSHevc->rep_format_idx[i] = i < pVPSHevc->num_rep_formats - 1 ? i : pVPSHevc->num_rep_formats - 1; } } //TODO - we don't use the rest ... -return true; + return true; } -static void sub_layer_hrd_parameters(GTS_BitStream *bs, int32_t subLayerId, uint32_t cpb_cnt, bool sub_pic_hrd_params_present_flag) { +static void sub_layer_hrd_parameters(GTS_BitStream *bs, int32_t subLayerId, uint32_t cpb_cnt, bool flagHdrParaPresentSubpic) { uint32_t i; for (i = 0; i <= cpb_cnt; i++) { - /*bit_rate_value_minus1[i] = */bs_get_ue(bs); - /*cpb_size_value_minus1[i] = */bs_get_ue(bs); - if (sub_pic_hrd_params_present_flag) { - /*cpb_size_du_value_minus1[i] = */bs_get_ue(bs); - /*bit_rate_du_value_minus1[i] = */bs_get_ue(bs); + bs_get_ue(bs); + bs_get_ue(bs); + if (flagHdrParaPresentSubpic) { + bs_get_ue(bs); + bs_get_ue(bs); } - /*cbr_flag[i] = */gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); } } static void hevc_parse_hrd_parameters(GTS_BitStream *bs, bool commonInfPresentFlag, int32_t maxNumSubLayersMinus1) { int32_t i; - bool nal_hrd_parameters_present_flag = false; - bool vcl_hrd_parameters_present_flag = false; - bool sub_pic_hrd_params_present_flag = false; + uint32_t flagHdrParaPresent = 0; + uint32_t flagHdrParaPresentVcl = 0; + bool flagHdrParaPresentSubpic = false; if (commonInfPresentFlag) { - nal_hrd_parameters_present_flag = (bool)gts_bs_read_int(bs, 1); - vcl_hrd_parameters_present_flag = (bool)gts_bs_read_int(bs, 1); - if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) { - sub_pic_hrd_params_present_flag = (bool)gts_bs_read_int(bs, 1); - if (sub_pic_hrd_params_present_flag) { - /*tick_divisor_minus2 = */gts_bs_read_int(bs, 8); - /*du_cpb_removal_delay_increment_length_minus1 = */gts_bs_read_int(bs, 5); - /*sub_pic_cpb_params_in_pic_timing_sei_flag = */gts_bs_read_int(bs, 1); - /*dpb_output_delay_du_length_minus1 = */gts_bs_read_int(bs, 5); + flagHdrParaPresent = gts_bs_read_int(bs, 1); + flagHdrParaPresentVcl = gts_bs_read_int(bs, 1); + if (flagHdrParaPresent || flagHdrParaPresentVcl) { + flagHdrParaPresentSubpic = (bool)gts_bs_read_int(bs, 1); + if (flagHdrParaPresentSubpic) { + gts_bs_read_int(bs, 8); + gts_bs_read_int(bs, 5); + gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 5); } - /*bit_rate_scale = */gts_bs_read_int(bs, 4); - /*cpb_size_scale = */gts_bs_read_int(bs, 4); - if (sub_pic_hrd_params_present_flag) { - /*cpb_size_du_scale = */gts_bs_read_int(bs, 4); + gts_bs_read_int(bs, 4); + gts_bs_read_int(bs, 4); + if (flagHdrParaPresentSubpic) { + gts_bs_read_int(bs, 4); } - /*initial_cpb_removal_delay_length_minus1 = */gts_bs_read_int(bs, 5); - /*au_cpb_removal_delay_length_minus1 = */gts_bs_read_int(bs, 5); - /*dpb_output_delay_length_minus1 = */gts_bs_read_int(bs, 5); + gts_bs_read_int(bs, 5); + gts_bs_read_int(bs, 5); + gts_bs_read_int(bs, 5); } } for (i = 0; i <= maxNumSubLayersMinus1; i++) { - bool fixed_pic_rate_general_flag_i = (bool)gts_bs_read_int(bs, 1); - bool fixed_pic_rate_within_cvs_flag_i = true; - bool low_delay_hrd_flag_i = false; + uint32_t fixed_pic_rate_general_flag_i = 0; + uint32_t fixed_pic_rate_within_cvs_flag_i = 1; + uint32_t low_delay_hrd_flag_i = 0; uint32_t cpb_cnt_minus1_i = 0; + fixed_pic_rate_general_flag_i = gts_bs_read_int(bs, 1); if (!fixed_pic_rate_general_flag_i) { - fixed_pic_rate_within_cvs_flag_i = (bool)gts_bs_read_int(bs, 1); + fixed_pic_rate_within_cvs_flag_i = gts_bs_read_int(bs, 1); } if (fixed_pic_rate_within_cvs_flag_i) - /*elemental_duration_in_tc_minus1[i] = */bs_get_ue(bs); + bs_get_ue(bs); else - low_delay_hrd_flag_i = (bool)gts_bs_read_int(bs, 1); + low_delay_hrd_flag_i = gts_bs_read_int(bs, 1); if (!low_delay_hrd_flag_i) { cpb_cnt_minus1_i = bs_get_ue(bs); } - if (nal_hrd_parameters_present_flag) { - sub_layer_hrd_parameters(bs, i, cpb_cnt_minus1_i, sub_pic_hrd_params_present_flag); + if (flagHdrParaPresent) { + sub_layer_hrd_parameters(bs, i, cpb_cnt_minus1_i, flagHdrParaPresentSubpic); } - if (vcl_hrd_parameters_present_flag) { - sub_layer_hrd_parameters(bs, i, cpb_cnt_minus1_i, sub_pic_hrd_params_present_flag); + if (flagHdrParaPresentVcl) { + sub_layer_hrd_parameters(bs, i, cpb_cnt_minus1_i, flagHdrParaPresentSubpic); } } } static int32_t gts_media_hevc_read_vps_bs(GTS_BitStream *bs, HEVCState *hevc, bool stop_at_vps_ext) { - uint8_t vps_sub_layer_ordering_info_present_flag, vps_extension_flag; - uint32_t i, j; + uint8_t subLayerOrderInfoPresentFlag; + uint8_t vps_extension_flag; + uint8_t layer_id_included_flag[MAX_LHVC_LAYERS][64]; + uint32_t i = 0; + uint32_t j; int32_t vps_id = -1; HEVC_VPS *vps; - uint8_t layer_id_included_flag[MAX_LHVC_LAYERS][64]; - + uint32_t layerIDMax = 0; + uint32_t layerSetsNum = 0; //nalu header already parsed vps_id = gts_bs_read_int(bs, 4); - if (vps_id>=16) return -1; + if (vps_id>=16) + return -1; vps = &hevc->vps[vps_id]; vps->bit_pos_vps_extensions = -1; @@ -1199,42 +1194,53 @@ static int32_t gts_media_hevc_read_vps_bs(GTS_BitStream *bs, HEVCState *hevc, bo vps->base_layer_internal_flag = (bool)gts_bs_read_int(bs, 1); vps->base_layer_available_flag = (bool)gts_bs_read_int(bs, 1); vps->max_layers = 1 + gts_bs_read_int(bs, 6); - if (vps->max_layers>MAX_LHVC_LAYERS) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] sorry, %d layers in VPS but only %d supported\n", vps->max_layers, MAX_LHVC_LAYERS)); + if (vps->max_layers>MAX_LHVC_LAYERS) + { return -1; } vps->max_sub_layers = gts_bs_read_int(bs, 3) + 1; vps->temporal_id_nesting = (bool)gts_bs_read_int(bs, 1); - /* vps_reserved_ffff_16bits = */ gts_bs_read_int(bs, 16); - profile_tier_level(bs, (bool)1, vps->max_sub_layers-1, &vps->ptl); + gts_bs_read_int(bs, 16); + HEVC_profiletierevel(bs, (bool)1, vps->max_sub_layers-1, &vps->ptl); - vps_sub_layer_ordering_info_present_flag = gts_bs_read_int(bs, 1); - for (i=(vps_sub_layer_ordering_info_present_flag ? 0 : vps->max_sub_layers - 1); i < vps->max_sub_layers; i++) { + subLayerOrderInfoPresentFlag = gts_bs_read_int(bs, 1); + if (subLayerOrderInfoPresentFlag) + i = 0; + else + i = vps->max_sub_layers - 1; + for (; i < vps->max_sub_layers; i++) + { vps->vps_max_dec_pic_buffering_minus1 = bs_get_ue(bs); vps->vps_max_num_reorder_pics = bs_get_ue(bs); vps->vps_max_latency_increase_plus1 = bs_get_ue(bs); } - vps->max_layer_id = gts_bs_read_int(bs, 6); - if (vps->max_layer_id > MAX_LHVC_LAYERS) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] VPS max layer ID %u but GPAC only supports %u\n", vps->max_layer_id, MAX_LHVC_LAYERS)); + layerIDMax = gts_bs_read_int(bs, 6); + vps->max_layer_id = layerIDMax; + if (layerIDMax > MAX_LHVC_LAYERS) + { return -1; } - vps->num_layer_sets = bs_get_ue(bs) + 1; - if (vps->num_layer_sets > MAX_LHVC_LAYERS) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] Wrong number of layer sets in VPS %d\n", vps->num_layer_sets)); + layerSetsNum = bs_get_ue(bs) + 1; + vps->num_layer_sets = layerSetsNum; + if (layerSetsNum > MAX_LHVC_LAYERS) + { return -1; } - for (i=1; i < vps->num_layer_sets; i++) { - for (j=0; j <= vps->max_layer_id; j++) { + for (i=1; i < layerSetsNum; i++) + { + for (j=0; j <= layerIDMax; j++) + { layer_id_included_flag[ i ][ j ] = gts_bs_read_int(bs, 1); } } vps->num_layers_in_id_list[0] = 1; - for (i = 1; i < vps->num_layer_sets; i++) { - uint32_t n, m; - n = 0; - for (m = 0; m <= vps->max_layer_id; m++) - if (layer_id_included_flag[i][m]) { + for (i = 1; i < layerSetsNum; i++) + { + uint32_t n = 0; + uint32_t m = 0; + for (m = 0; m <= layerIDMax; m++) + if (layer_id_included_flag[i][m]) + { if(n < MAX_LHVC_LAYERS) vps->LayerSetLayerIdList[i][n++] = m; if (vps->LayerSetLayerIdListMax[i] < m) @@ -1242,23 +1248,25 @@ static int32_t gts_media_hevc_read_vps_bs(GTS_BitStream *bs, HEVCState *hevc, bo } vps->num_layers_in_id_list[i] = n; } - if (/*vps_timing_info_present_flag*/gts_bs_read_int(bs, 1)) { - uint32_t vps_num_hrd_parameters; - /*uint32_t vps_num_units_in_tick = */gts_bs_read_int(bs, 32); - /*uint32_t vps_time_scale = */gts_bs_read_int(bs, 32); - if (/*vps_poc_proportional_to_timing_flag*/gts_bs_read_int(bs, 1)) { - /*vps_num_ticks_poc_diff_one_minus1*/bs_get_ue(bs); - } - vps_num_hrd_parameters = bs_get_ue(bs); - for( i = 0; i < vps_num_hrd_parameters; i++ ) { - bool cprms_present_flag = true; - /*hrd_layer_set_idx[i] = */bs_get_ue(bs); - if (i>0) - cprms_present_flag = (bool)gts_bs_read_int(bs, 1) ; - hevc_parse_hrd_parameters(bs, cprms_present_flag, vps->max_sub_layers - 1); - } - } - if (stop_at_vps_ext) { + if (gts_bs_read_int(bs, 1)) + { + uint32_t numHrdParamVPS; + gts_bs_read_int(bs, 32); + gts_bs_read_int(bs, 32); + if (gts_bs_read_int(bs, 1)) { + bs_get_ue(bs); + } + numHrdParamVPS = bs_get_ue(bs); + for( i = 0; i < numHrdParamVPS; i++ ) { + bool flagPresentCPRMS = true; + bs_get_ue(bs); + if (i!=0) + flagPresentCPRMS = (bool)gts_bs_read_int(bs, 1) ; + hevc_parse_hrd_parameters(bs, flagPresentCPRMS, vps->max_sub_layers - 1); + } + } + if (stop_at_vps_ext) + { return vps_id; } @@ -1266,14 +1274,13 @@ static int32_t gts_media_hevc_read_vps_bs(GTS_BitStream *bs, HEVCState *hevc, bo if (vps_extension_flag ) { bool res; gts_bs_align(bs); - res = hevc_parse_vps_extension(vps, bs); + res = hevc_parseVPSExt(vps, bs); if (res!=true) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] Failed to parse VPS extensions\n")); return -1; } - if (/*vps_extension2_flag*/gts_bs_read_int(bs, 1)) { + if (gts_bs_read_int(bs, 1)) { while (gts_bs_available(bs)) { - /*vps_extension_data_flag */ gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); } } } @@ -1288,15 +1295,14 @@ static void hevc_scaling_list_data(GTS_BitStream *bs) for (matrixId=0; matrixId<6; matrixId += (sizeId == 3) ? 3:1 ) { uint32_t scaling_list_pred_mode_flag_sizeId_matrixId = gts_bs_read_int(bs, 1); if( ! scaling_list_pred_mode_flag_sizeId_matrixId ) { - /*scaling_list_pred_matrix_id_delta[ sizeId ][ matrixId ] =*/ bs_get_ue(bs); + bs_get_ue(bs); } else { - //uint32_t nextCoef = 8; uint32_t coefNum = MIN(64, (1 << (4+(sizeId << 1)))); if ( sizeId > 1 ) { - /*scaling_list_dc_coef_minuS8[ sizeId - 2 ][ matrixId ] = */bs_get_se(bs); + bs_get_se(bs); } for (i = 0; i=16) { return -1; } - memset(&ptl, 0, sizeof(ptl)); + memset_s(&ptl, sizeof(ptl), 0); max_sub_layers_minus1 = 0; sps_ext_or_max_sub_layers_minus1 = 0; if (layer_id == 0) @@ -1345,8 +1352,8 @@ static int32_t gts_media_hevc_read_sps_bs(GTS_BitStream *bs, HEVCState *hevc, ui sps_ext_or_max_sub_layers_minus1 = gts_bs_read_int(bs, 3); multiLayerExtSpsFlag = (bool)((layer_id != 0) && (sps_ext_or_max_sub_layers_minus1 == 7)); if (!multiLayerExtSpsFlag) { - /*temporal_id_nesting_flag = */gts_bs_read_int(bs, 1); - profile_tier_level(bs, (bool)1, max_sub_layers_minus1, &ptl); + gts_bs_read_int(bs, 1); + HEVC_profiletierevel(bs, (bool)1, max_sub_layers_minus1, &ptl); } sps_id = bs_get_ue(bs); @@ -1354,227 +1361,214 @@ static int32_t gts_media_hevc_read_sps_bs(GTS_BitStream *bs, HEVCState *hevc, ui return -1; } - hevc_sps = &hevc->sps[sps_id]; - if (!hevc_sps->state) { - hevc_sps->state = 1; - hevc_sps->id = sps_id; - hevc_sps->vps_id = vps_id; + spsInforHevc = &hevc->sps[sps_id]; + if (!spsInforHevc->state) { + spsInforHevc->state = 1; + spsInforHevc->id = sps_id; + spsInforHevc->vps_id = vps_id; } - hevc_sps->ptl = ptl; + spsInforHevc->ptl = ptl; vps = &hevc->vps[vps_id]; - hevc_sps->max_sub_layers_minus1 = max_sub_layers_minus1; + spsInforHevc->max_sub_layers_minus1 = max_sub_layers_minus1; - //sps_rep_format_idx = 0; if (multiLayerExtSpsFlag) { uint8_t update_rep_format_flag = gts_bs_read_int(bs, 1); if (update_rep_format_flag) { - hevc_sps->rep_format_idx = gts_bs_read_int(bs, 8); + spsInforHevc->rep_format_idx = gts_bs_read_int(bs, 8); } else { - hevc_sps->rep_format_idx = vps->rep_format_idx[layer_id]; + spsInforHevc->rep_format_idx = vps->rep_format_idx[layer_id]; } - hevc_sps->width = vps->rep_formats[hevc_sps->rep_format_idx].pic_width_luma_samples; - hevc_sps->height = vps->rep_formats[hevc_sps->rep_format_idx].pic_height_luma_samples; - hevc_sps->chroma_format_idc = vps->rep_formats[hevc_sps->rep_format_idx].chroma_format_idc; - hevc_sps->bit_depth_luma = vps->rep_formats[hevc_sps->rep_format_idx].bit_depth_luma; - hevc_sps->bit_depth_chroma = vps->rep_formats[hevc_sps->rep_format_idx].bit_depth_chroma; - hevc_sps->separate_colour_plane_flag = (bool)vps->rep_formats[hevc_sps->rep_format_idx].separate_colour_plane_flag; + spsInforHevc->width = vps->rep_formats[spsInforHevc->rep_format_idx].pic_width_luma_samples; + spsInforHevc->height = vps->rep_formats[spsInforHevc->rep_format_idx].pic_height_luma_samples; + spsInforHevc->chroma_format_idc = vps->rep_formats[spsInforHevc->rep_format_idx].chroma_format_idc; + spsInforHevc->bit_depth_luma = vps->rep_formats[spsInforHevc->rep_format_idx].bit_depth_luma; + spsInforHevc->bit_depth_chroma = vps->rep_formats[spsInforHevc->rep_format_idx].bit_depth_chroma; + spsInforHevc->separate_colour_plane_flag = (bool)vps->rep_formats[spsInforHevc->rep_format_idx].separate_colour_plane_flag; //TODO this is crude ... - hevc_sps->ptl = vps->ext_ptl[0]; + spsInforHevc->ptl = vps->ext_ptl[0]; } else { - hevc_sps->chroma_format_idc = bs_get_ue(bs); - if (hevc_sps->chroma_format_idc==3) - hevc_sps->separate_colour_plane_flag = (bool)gts_bs_read_int(bs, 1); - hevc_sps->width = bs_get_ue(bs); - hevc_sps->height = bs_get_ue(bs); - if (/*conformance_window_flag*/gts_bs_read_int(bs, 1)) { + spsInforHevc->chroma_format_idc = bs_get_ue(bs); + if (spsInforHevc->chroma_format_idc==3) + spsInforHevc->separate_colour_plane_flag = (bool)gts_bs_read_int(bs, 1); + spsInforHevc->width = bs_get_ue(bs); + spsInforHevc->height = bs_get_ue(bs); + if (gts_bs_read_int(bs, 1)) { uint32_t width_sub, height_sub; - if (hevc_sps->chroma_format_idc==1) { + if (spsInforHevc->chroma_format_idc==1) { width_sub = height_sub = 2; } - else if (hevc_sps->chroma_format_idc==2) { + else if (spsInforHevc->chroma_format_idc==2) { width_sub = 2; height_sub = 1; } else { width_sub = height_sub = 1; } - hevc_sps->cw_left = bs_get_ue(bs); - hevc_sps->cw_right = bs_get_ue(bs); - hevc_sps->cw_top = bs_get_ue(bs); - hevc_sps->cw_bottom = bs_get_ue(bs); + spsInforHevc->cw_left = bs_get_ue(bs); + spsInforHevc->cw_right = bs_get_ue(bs); + spsInforHevc->cw_top = bs_get_ue(bs); + spsInforHevc->cw_bottom = bs_get_ue(bs); - hevc_sps->width -= width_sub * (hevc_sps->cw_left + hevc_sps->cw_right); - hevc_sps->height -= height_sub * (hevc_sps->cw_top + hevc_sps->cw_bottom); + spsInforHevc->width -= width_sub * (spsInforHevc->cw_left + spsInforHevc->cw_right); + spsInforHevc->height -= height_sub * (spsInforHevc->cw_top + spsInforHevc->cw_bottom); } - hevc_sps->bit_depth_luma = 8 + bs_get_ue(bs); - hevc_sps->bit_depth_chroma = 8 + bs_get_ue(bs); + spsInforHevc->bit_depth_luma = 8 + bs_get_ue(bs); + spsInforHevc->bit_depth_chroma = 8 + bs_get_ue(bs); } - hevc_sps->log2_max_pic_order_cnt_lsb = 4 + bs_get_ue(bs); + spsInforHevc->log2_max_pic_order_cnt_lsb = 4 + bs_get_ue(bs); if (!multiLayerExtSpsFlag) { sps_sub_layer_ordering_info_present_flag = (bool)gts_bs_read_int(bs, 1); - for(i=sps_sub_layer_ordering_info_present_flag ? 0 : hevc_sps->max_sub_layers_minus1; i<=hevc_sps->max_sub_layers_minus1; i++) { - hevc_sps->max_dec_pic_buffering = bs_get_ue(bs); - hevc_sps->num_reorder_pics = bs_get_ue(bs); - hevc_sps->max_latency_increase = bs_get_ue(bs); + for(i=sps_sub_layer_ordering_info_present_flag ? 0 : spsInforHevc->max_sub_layers_minus1; i<=spsInforHevc->max_sub_layers_minus1; i++) { + spsInforHevc->max_dec_pic_buffering = bs_get_ue(bs); + spsInforHevc->num_reorder_pics = bs_get_ue(bs); + spsInforHevc->max_latency_increase = bs_get_ue(bs); } } - log2_min_luma_coding_block_size = 3 + bs_get_ue(bs); - log2_diff_max_min_luma_coding_block_size = bs_get_ue(bs); - hevc_sps->max_CU_width = ( 1<<(log2_min_luma_coding_block_size + log2_diff_max_min_luma_coding_block_size) ); - hevc_sps->max_CU_height = ( 1<<(log2_min_luma_coding_block_size + log2_diff_max_min_luma_coding_block_size) ); + MinLumaCodingBlockSizeLog2 = bs_get_ue(bs) + 3; + DiffMaxMinLumaCodingBlockSizeLog2 = bs_get_ue(bs); + spsInforHevc->max_CU_width = ( 1<<(MinLumaCodingBlockSizeLog2 + DiffMaxMinLumaCodingBlockSizeLog2) ); + spsInforHevc->max_CU_height = ( 1<<(MinLumaCodingBlockSizeLog2 + DiffMaxMinLumaCodingBlockSizeLog2) ); - log2_min_transform_block_size = 2 + bs_get_ue(bs); - /*log2_max_transform_block_size = log2_min_transform_block_size + */bs_get_ue(bs); + MinTransforBlockSizeLog2 = 2 + bs_get_ue(bs); + bs_get_ue(bs); depth = 0; - hevc_sps->max_transform_hierarchy_depth_inter = bs_get_ue(bs); - hevc_sps->max_transform_hierarchy_depth_intra = bs_get_ue(bs); - while( (uint32_t) ( hevc_sps->max_CU_width >> log2_diff_max_min_luma_coding_block_size ) > (uint32_t) ( 1 << ( log2_min_transform_block_size + depth ) ) ) + spsInforHevc->max_transform_hierarchy_depth_inter = bs_get_ue(bs); + spsInforHevc->max_transform_hierarchy_depth_intra = bs_get_ue(bs); + while( (uint32_t) ( spsInforHevc->max_CU_width >> DiffMaxMinLumaCodingBlockSizeLog2 ) > (uint32_t) ( 1 << ( MinTransforBlockSizeLog2 + depth ) ) ) { depth++; } - hevc_sps->max_CU_depth = log2_diff_max_min_luma_coding_block_size + depth; + spsInforHevc->max_CU_depth = DiffMaxMinLumaCodingBlockSizeLog2 + depth; - nb_CTUs = ((hevc_sps->width + hevc_sps->max_CU_width -1) / hevc_sps->max_CU_width) * ((hevc_sps->height + hevc_sps->max_CU_height-1) / hevc_sps->max_CU_height); - hevc_sps->bitsSliceSegmentAddress = 0; - while (nb_CTUs > (uint32_t) (1 << hevc_sps->bitsSliceSegmentAddress)) { - hevc_sps->bitsSliceSegmentAddress++; + nb_CTUs = ((spsInforHevc->width + spsInforHevc->max_CU_width -1) / spsInforHevc->max_CU_width) * ((spsInforHevc->height + spsInforHevc->max_CU_height-1) / spsInforHevc->max_CU_height); + spsInforHevc->bitsSliceSegmentAddress = 0; + while (nb_CTUs > (uint32_t) (1 << spsInforHevc->bitsSliceSegmentAddress)) { + spsInforHevc->bitsSliceSegmentAddress++; } scaling_list_enable_flag = (bool)gts_bs_read_int(bs, 1); if (scaling_list_enable_flag) { bool sps_infer_scaling_list_flag = (bool)0; - /*uint8_t sps_scaling_list_ref_layer_id = 0;*/ if (multiLayerExtSpsFlag) { sps_infer_scaling_list_flag = (bool)gts_bs_read_int(bs, 1); } if (sps_infer_scaling_list_flag) { - /*sps_scaling_list_ref_layer_id = */gts_bs_read_int(bs, 6); + gts_bs_read_int(bs, 6); } else { - if (/*sps_scaling_list_data_present_flag=*/gts_bs_read_int(bs, 1) ) { + if (gts_bs_read_int(bs, 1) ) { hevc_scaling_list_data(bs); } } } - /*asymmetric_motion_partitions_enabled_flag= */ gts_bs_read_int(bs, 1); - hevc_sps->sample_adaptive_offset_enabled_flag = (bool)gts_bs_read_int(bs, 1); - if (/*pcm_enabled_flag= */ gts_bs_read_int(bs, 1) ) { - /*pcm_sample_bit_depth_luma_minus1=*/gts_bs_read_int(bs, 4); - /*pcm_sample_bit_depth_chroma_minus1=*/gts_bs_read_int(bs, 4); - /*log2_min_pcm_luma_coding_block_size_minus3= */ bs_get_ue(bs); - /*log2_diff_max_min_pcm_luma_coding_block_size = */ bs_get_ue(bs); - /*pcm_loop_filter_disable_flag=*/gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); + spsInforHevc->sample_adaptive_offset_enabled_flag = (bool)gts_bs_read_int(bs, 1); + if (gts_bs_read_int(bs, 1) ) { + gts_bs_read_int(bs, 4); + gts_bs_read_int(bs, 4); + bs_get_ue(bs); + bs_get_ue(bs); + gts_bs_read_int(bs, 1); } - hevc_sps->num_short_term_ref_pic_sets = bs_get_ue(bs); - if (hevc_sps->num_short_term_ref_pic_sets>64) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] Invalid number of short term reference picture sets %d\n", hevc_sps->num_short_term_ref_pic_sets)); + spsInforHevc->num_short_term_ref_pic_sets = bs_get_ue(bs); + if (spsInforHevc->num_short_term_ref_pic_sets>64) { return -1; } -/* - for (i=0; inum_short_term_ref_pic_sets; i++) { - bool ret = parse_short_term_ref_pic_set(bs, hevc_sps, &hevc->s_info, i); - //cannot parse short_term_ref_pic_set, skip VUI parsing - if (!ret) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] Invalid short_term_ref_pic_set\n")); - return -1; - } - } -*/ - hevc_sps->long_term_ref_pics_present_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->long_term_ref_pics_present_flag) { - hevc_sps->num_long_term_ref_pic_sps = bs_get_ue(bs); - for (i=0; inum_long_term_ref_pic_sps; i++) { - /*lt_ref_pic_poc_lsb_sps=*/gts_bs_read_int(bs, hevc_sps->log2_max_pic_order_cnt_lsb); - /*used_by_curr_pic_lt_sps_flag*/gts_bs_read_int(bs, 1); + + spsInforHevc->long_term_ref_pics_present_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->long_term_ref_pics_present_flag) { + spsInforHevc->num_long_term_ref_pic_sps = bs_get_ue(bs); + for (i=0; inum_long_term_ref_pic_sps; i++) { + gts_bs_read_int(bs, spsInforHevc->log2_max_pic_order_cnt_lsb); + gts_bs_read_int(bs, 1); } } - hevc_sps->temporal_mvp_enable_flag = (bool)gts_bs_read_int(bs, 1); - hevc_sps->strong_intra_smoothing_enable_flag = (bool)gts_bs_read_int(bs, 1); + spsInforHevc->temporal_mvp_enable_flag = (bool)gts_bs_read_int(bs, 1); + spsInforHevc->strong_intra_smoothing_enable_flag = (bool)gts_bs_read_int(bs, 1); if (vui_flag_pos) *vui_flag_pos = (uint32_t)gts_bs_get_bit_offset(bs); - hevc_sps->vui_parameters_present_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui_parameters_present_flag) { - - hevc_sps->vui.aspect_ratio_info_present_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.aspect_ratio_info_present_flag) { - hevc_sps->vui.sar_idc = gts_bs_read_int(bs, 8); - if (hevc_sps->vui.sar_idc == 255) { - hevc_sps->vui.sar_width = gts_bs_read_int(bs, 16); - hevc_sps->vui.sar_height = gts_bs_read_int(bs, 16); - } else if (hevc_sps->vui.sar_idc<17) { - hevc_sps->vui.sar_width = hevc_sar[hevc_sps->vui.sar_idc].w; - hevc_sps->vui.sar_height = hevc_sar[hevc_sps->vui.sar_idc].h; + spsInforHevc->vui_parameters_present_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui_parameters_present_flag) { + + spsInforHevc->vui.aspect_ratio_info_present_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.aspect_ratio_info_present_flag) { + spsInforHevc->vui.sar_idc = gts_bs_read_int(bs, 8); + if (spsInforHevc->vui.sar_idc == 255) { + spsInforHevc->vui.sar_width = gts_bs_read_int(bs, 16); + spsInforHevc->vui.sar_height = gts_bs_read_int(bs, 16); + } else if (spsInforHevc->vui.sar_idc<17) { + spsInforHevc->vui.sar_width = hevc_sar[spsInforHevc->vui.sar_idc].w; + spsInforHevc->vui.sar_height = hevc_sar[spsInforHevc->vui.sar_idc].h; } } - hevc_sps->vui.overscan_info_present = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.overscan_info_present) - hevc_sps->vui.overscan_appropriate = (bool)gts_bs_read_int(bs, 1); - - hevc_sps->vui.video_signal_type_present_flag =(bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.video_signal_type_present_flag) { - hevc_sps->vui.video_format = gts_bs_read_int(bs, 3); - hevc_sps->vui.video_full_range_flag =(bool)gts_bs_read_int(bs, 1); - hevc_sps->vui.colour_description_present_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.colour_description_present_flag) { - hevc_sps->vui.colour_primaries = gts_bs_read_int(bs, 8); - hevc_sps->vui.transfer_characteristic = gts_bs_read_int(bs, 8); - hevc_sps->vui.matrix_coeffs = gts_bs_read_int(bs, 8); + spsInforHevc->vui.overscan_info_present = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.overscan_info_present) + spsInforHevc->vui.overscan_appropriate = (bool)gts_bs_read_int(bs, 1); + + spsInforHevc->vui.video_signal_type_present_flag =(bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.video_signal_type_present_flag) { + spsInforHevc->vui.video_format = gts_bs_read_int(bs, 3); + spsInforHevc->vui.video_full_range_flag =(bool)gts_bs_read_int(bs, 1); + spsInforHevc->vui.colour_description_present_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.colour_description_present_flag) { + spsInforHevc->vui.colour_primaries = gts_bs_read_int(bs, 8); + spsInforHevc->vui.transfer_characteristic = gts_bs_read_int(bs, 8); + spsInforHevc->vui.matrix_coeffs = gts_bs_read_int(bs, 8); } } - hevc_sps->vui.chroma_loc_info_present_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.chroma_loc_info_present_flag) { - hevc_sps->vui.chroma_sample_loc_type_top_field = bs_get_ue(bs); - hevc_sps->vui.chroma_sample_loc_type_bottom_field = bs_get_ue(bs); - } - - hevc_sps->vui.neutra_chroma_indication_flag =(bool)gts_bs_read_int(bs, 1); - hevc_sps->vui.field_seq_flag =(bool)gts_bs_read_int(bs, 1); - hevc_sps->vui.frame_field_info_present_flag =(bool)gts_bs_read_int(bs, 1); - hevc_sps->vui.default_display_window_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.default_display_window_flag) { - hevc_sps->vui.left_offset = bs_get_ue(bs); - hevc_sps->vui.right_offset = bs_get_ue(bs); - hevc_sps->vui.top_offset = bs_get_ue(bs); - hevc_sps->vui.bottom_offset = bs_get_ue(bs); - } - - hevc_sps->vui.has_timing_info = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.has_timing_info ) { - hevc_sps->vui.num_units_in_tick = gts_bs_read_int(bs, 32); - hevc_sps->vui.time_scale = gts_bs_read_int(bs, 32); - hevc_sps->vui.poc_proportional_to_timing_flag = (bool)gts_bs_read_int(bs, 1); - if (hevc_sps->vui.poc_proportional_to_timing_flag) - hevc_sps->vui.num_ticks_poc_diff_one_minus1 = bs_get_ue(bs); - if (/*hrd_parameters_present_flag=*/gts_bs_read_int(bs, 1) ) { -// GF_LOG(GF_LOG_INFO, GF_LOG_CODING, ("[HEVC] HRD param parsing not implemented\n")); + spsInforHevc->vui.chroma_loc_info_present_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.chroma_loc_info_present_flag) { + spsInforHevc->vui.chroma_sample_loc_type_top_field = bs_get_ue(bs); + spsInforHevc->vui.chroma_sample_loc_type_bottom_field = bs_get_ue(bs); + } + + spsInforHevc->vui.neutra_chroma_indication_flag =(bool)gts_bs_read_int(bs, 1); + spsInforHevc->vui.field_seq_flag =(bool)gts_bs_read_int(bs, 1); + spsInforHevc->vui.frame_field_info_present_flag =(bool)gts_bs_read_int(bs, 1); + spsInforHevc->vui.default_display_window_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.default_display_window_flag) { + spsInforHevc->vui.left_offset = bs_get_ue(bs); + spsInforHevc->vui.right_offset = bs_get_ue(bs); + spsInforHevc->vui.top_offset = bs_get_ue(bs); + spsInforHevc->vui.bottom_offset = bs_get_ue(bs); + } + + spsInforHevc->vui.has_timing_info = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.has_timing_info ) { + spsInforHevc->vui.num_units_in_tick = gts_bs_read_int(bs, 32); + spsInforHevc->vui.time_scale = gts_bs_read_int(bs, 32); + spsInforHevc->vui.poc_proportional_to_timing_flag = (bool)gts_bs_read_int(bs, 1); + if (spsInforHevc->vui.poc_proportional_to_timing_flag) + spsInforHevc->vui.num_ticks_poc_diff_one_minus1 = bs_get_ue(bs); + if (gts_bs_read_int(bs, 1) ) { return sps_id; } } - if (/*bitstream_restriction_flag=*/gts_bs_read_int(bs, 1)) { - /*tiles_fixed_structure_flag = */gts_bs_read_int(bs, 1); - /*motion_vectors_over_pic_boundaries_flag = */gts_bs_read_int(bs, 1); - /*restricted_ref_pic_lists_flag = */gts_bs_read_int(bs, 1); - /*min_spatial_segmentation_idc = */bs_get_ue(bs); - /*max_bytes_per_pic_denom = */bs_get_ue(bs); - /*max_bits_per_min_cu_denom = */bs_get_ue(bs); - /*log2_max_mv_length_horizontal = */bs_get_ue(bs); - /*log2_max_mv_length_vertical = */bs_get_ue(bs); + if (gts_bs_read_int(bs, 1)) { + gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); + bs_get_ue(bs); + bs_get_ue(bs); + bs_get_ue(bs); + bs_get_ue(bs); + bs_get_ue(bs); } } - if (/*sps_extension_flag*/gts_bs_read_int(bs, 1)) { + if (gts_bs_read_int(bs, 1)) { while (gts_bs_available(bs)) { - /*sps_extension_data_flag */ gts_bs_read_int(bs, 1); + gts_bs_read_int(bs, 1); } } @@ -1591,7 +1585,6 @@ static int32_t gts_media_hevc_read_pps_bs(GTS_BitStream *gts_bitstream, HEVCStat index_hevc_pps = bs_get_ue(gts_bitstream); if ((index_hevc_pps<0) || (index_hevc_pps>=64)) { - //GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] wrong PPS ID %d in PPS\n", index_hevc_pps)); return -1; } hevc_pps = &state_info->pps[index_hevc_pps]; @@ -1610,23 +1603,23 @@ static int32_t gts_media_hevc_read_pps_bs(GTS_BitStream *gts_bitstream, HEVCStat hevc_pps->output_flag_present_flag = (bool)gts_bs_read_int(gts_bitstream, 1); hevc_pps->num_extra_slice_header_bits = gts_bs_read_int(gts_bitstream, 3); - /*sign_data_hiding_flag = */gts_bs_read_int(gts_bitstream, 1); + gts_bs_read_int(gts_bitstream, 1); hevc_pps->cabac_init_present_flag = (bool)gts_bs_read_int(gts_bitstream, 1); hevc_pps->num_ref_idx_l0_default_active = 1 + bs_get_ue(gts_bitstream); hevc_pps->num_ref_idx_l1_default_active = 1 + bs_get_ue(gts_bitstream); hevc_pps->pic_init_qp_minus26 = bs_get_se(gts_bitstream); - /*constrained_intra_pred_flag = */gts_bs_read_int(gts_bitstream, 1); - /*transform_skip_enabled_flag = */gts_bs_read_int(gts_bitstream, 1); + gts_bs_read_int(gts_bitstream, 1); + gts_bs_read_int(gts_bitstream, 1); hevc_pps->cu_qp_delta_enabled_flag = (bool)gts_bs_read_int(gts_bitstream, 1); if (hevc_pps->cu_qp_delta_enabled_flag ) hevc_pps->diff_cu_qp_delta_depth = bs_get_ue(gts_bitstream); - /*pic_cb_qp_offset = */bs_get_se(gts_bitstream); - /*pic_cr_qp_offset = */bs_get_se(gts_bitstream); + bs_get_se(gts_bitstream); + bs_get_se(gts_bitstream); hevc_pps->slice_chroma_qp_offsets_present_flag = (bool)gts_bs_read_int(gts_bitstream, 1); hevc_pps->weighted_pred_flag = (bool)gts_bs_read_int(gts_bitstream, 1); hevc_pps->weighted_bipred_flag = (bool)gts_bs_read_int(gts_bitstream, 1); - /*transquant_bypass_enable_flag = */gts_bs_read_int(gts_bitstream, 1); + gts_bs_read_int(gts_bitstream, 1); hevc_pps->tiles_enabled_flag = (bool)gts_bs_read_int(gts_bitstream, 1); hevc_pps->entropy_coding_sync_enabled_flag = (bool)gts_bs_read_int(gts_bitstream, 1); if (hevc_pps->tiles_enabled_flag) { @@ -1649,9 +1642,9 @@ static int32_t gts_media_hevc_read_pps_bs(GTS_BitStream *gts_bitstream, HEVCStat state_info->tile_slice_count = 1; } hevc_pps->loop_filter_across_slices_enabled_flag = (bool)gts_bs_read_int(gts_bitstream, 1); - if (/*hevc_pps->deblocking_filter_control_present_flag*/ gts_bs_read_int(gts_bitstream, 1)) { + if (gts_bs_read_int(gts_bitstream, 1)) { hevc_pps->deblocking_filter_override_enabled_flag = (bool)gts_bs_read_int(gts_bitstream, 1); - if (! /*pic_disable_deblocking_filter_flag= */gts_bs_read_int(gts_bitstream, 1) ) { + if (! gts_bs_read_int(gts_bitstream, 1) ) { /*beta_offset_div2 = */bs_get_se(gts_bitstream); /*tc_offset_div2 = */bs_get_se(gts_bitstream); } @@ -1677,16 +1670,16 @@ int32_t gts_media_hevc_parse_nalu(hevc_specialInfo* pSpecialInfo, int8_t *data, uint32_t data_without_emulation_bytes_size = 0; bool is_slice = false; int32_t ret = -1; - HEVCSliceInfo n_state; + HEVCSliceInfo SliceInfo; uint8_t *nal_unit_type = &pSpecialInfo->naluType; uint8_t *temporal_id = &pSpecialInfo->temporal_id; uint8_t *layer_id = &pSpecialInfo->layer_id; uint16_t* slicehdrlen = &pSpecialInfo->sliceHeaderLen; uint16_t* payloadType = &pSpecialInfo->seiPayloadType; + int32_t specialID = 0; - memcpy(&n_state, &hevc->s_info, sizeof(HEVCSliceInfo)); + memcpy_s(&SliceInfo, sizeof(HEVCSliceInfo), &hevc->s_info, sizeof(HEVCSliceInfo)); - //hevc->last_parsed_vps_id = hevc->last_parsed_sps_id = hevc->last_parsed_pps_id = -1; hevc->s_info.entry_point_start_bits = -1; hevc->s_info.payload_start_offset = -1; @@ -1704,9 +1697,9 @@ int32_t gts_media_hevc_parse_nalu(hevc_specialInfo* pSpecialInfo, int8_t *data, if (!bs) goto exit; if (! hevc_parse_nal_header(bs, nal_unit_type, temporal_id, layer_id, payloadType)) goto exit; - n_state.nal_unit_type = *nal_unit_type; + SliceInfo.nal_unit_type = *nal_unit_type; - switch (n_state.nal_unit_type) { + switch (SliceInfo.nal_unit_type) { case GTS_HEVC_NALU_ACCESS_UNIT: case GTS_HEVC_NALU_END_OF_SEQ: case GTS_HEVC_NALU_END_OF_STREAM: @@ -1731,54 +1724,59 @@ int32_t gts_media_hevc_parse_nalu(hevc_specialInfo* pSpecialInfo, int8_t *data, case GTS_HEVC_NALU_SLICE_RASL_N: case GTS_HEVC_NALU_SLICE_RASL_R: is_slice = true; - /* slice - read the info and compare.*/ - ret = hevc_parse_slice_segment(bs, hevc, &n_state); - if (ret<0) goto exit; - - *slicehdrlen = n_state.payload_start_offset; - hevc_compute_poc(&n_state); - + ret = hevc_parse_slice_segment(bs, hevc, &SliceInfo); + if (ret<0) + goto exit; ret = 0; - if (hevc->s_info.poc != n_state.poc) { - ret=1; + *slicehdrlen = SliceInfo.payload_start_offset; + hevc_compute_poc(&SliceInfo); + if (hevc->s_info.poc != SliceInfo.poc) + { + ret = 1; break; } - if (n_state.first_slice_segment_in_pic_flag) { - if (!(*layer_id) || (n_state.prev_layer_id_plus1 && ((*layer_id) <= n_state.prev_layer_id_plus1 - 1)) ) { + if (SliceInfo.first_slice_segment_in_pic_flag) + { + if (!(*layer_id) + || (SliceInfo.prev_layer_id_plus1 && ((*layer_id) <= SliceInfo.prev_layer_id_plus1 - 1)) ) + { ret = 1; break; } } break; case GTS_HEVC_NALU_SEQ_PARAM: - hevc->last_parsed_sps_id = gts_media_hevc_read_sps_bs(bs, hevc, *layer_id, NULL); + specialID = gts_media_hevc_read_sps_bs(bs, hevc, *layer_id, NULL); + hevc->last_parsed_sps_id = specialID; ret = 0; break; case GTS_HEVC_NALU_PIC_PARAM: - hevc->last_parsed_pps_id = gts_media_hevc_read_pps_bs(bs, hevc); + specialID = gts_media_hevc_read_pps_bs(bs, hevc); + hevc->last_parsed_pps_id = specialID; ret = 0; break; case GTS_HEVC_NALU_VID_PARAM: - hevc->last_parsed_vps_id = gts_media_hevc_read_vps_bs(bs, hevc, false); + specialID = gts_media_hevc_read_vps_bs(bs, hevc, false); + hevc->last_parsed_vps_id = specialID; ret = 0; break; default: ret = 0; break; } - - /* save _prev values */ + //save the previous values if (ret && hevc->s_info.sps) { - n_state.frame_num_offset_prev = hevc->s_info.frame_num_offset; - n_state.frame_num_prev = hevc->s_info.frame_num; + SliceInfo.frame_num_offset_prev = hevc->s_info.frame_num_offset; + SliceInfo.frame_num_prev = hevc->s_info.frame_num; - n_state.poc_lsb_prev = hevc->s_info.poc_lsb; - n_state.poc_msb_prev = hevc->s_info.poc_msb; - n_state.prev_layer_id_plus1 = *layer_id + 1; + SliceInfo.poc_lsb_prev = hevc->s_info.poc_lsb; + SliceInfo.poc_msb_prev = hevc->s_info.poc_msb; + SliceInfo.prev_layer_id_plus1 = *layer_id + 1; } - if (is_slice) hevc_compute_poc(&n_state); - memcpy(&hevc->s_info, &n_state, sizeof(HEVCSliceInfo)); + if (is_slice) + hevc_compute_poc(&SliceInfo); + memcpy_s(&hevc->s_info, sizeof(HEVCSliceInfo), &SliceInfo, sizeof(HEVCSliceInfo)); exit: if (bs) gts_bs_del(bs); @@ -1920,6 +1918,16 @@ int32_t hevc_read_RwpkSEI(int8_t *pRWPKBits, uint32_t RWPKBitsSize, RegionWisePa if (numRegions > DEFAULT_REGION_NUM) goto exit; pRWPK->numRegions = numRegions; + //if caller set pRWPK->rectRegionPacking as null and didn't allocate memory + //due to they don't know how many tiles there are, + //the following code will malloc() for caller, finally the caller must free() memory after use + if (!pRWPK->rectRegionPacking) { + pRWPK->rectRegionPacking = + (RectangularRegionWisePacking*)malloc(numRegions * sizeof(RectangularRegionWisePacking)); + } + if (!pRWPK->rectRegionPacking) + goto exit; + pRWPK->projPicWidth = gts_bs_read_int(bs, 32); pRWPK->projPicHeight = gts_bs_read_int(bs, 32); pRWPK->packedPicWidth = gts_bs_read_int(bs, 16); //bitstr.read16Bits(); @@ -1929,7 +1937,7 @@ int32_t hevc_read_RwpkSEI(int8_t *pRWPKBits, uint32_t RWPKBitsSize, RegionWisePa { RectangularRegionWisePacking region; uint8_t packed8Bits = gts_bs_read_int(bs, 8); - memset(®ion, 0, sizeof(RectangularRegionWisePacking)); + memset_s(®ion, sizeof(RectangularRegionWisePacking), 0); region.guardBandFlag = (packed8Bits >> 4) & 0x01; // read RectRegionPacking region.projRegWidth = gts_bs_read_int(bs, 32); @@ -1956,8 +1964,13 @@ int32_t hevc_read_RwpkSEI(int8_t *pRWPKBits, uint32_t RWPKBitsSize, RegionWisePa region.gbType2 = (packed16Bits >> 6) & 0x07; region.gbType3 = (packed16Bits >> 3) & 0x07; } - memcpy(&pRWPK->rectRegionPacking[i], ®ion, sizeof(RectangularRegionWisePacking)); + memcpy_s(&pRWPK->rectRegionPacking[i], sizeof(RectangularRegionWisePacking), ®ion, sizeof(RectangularRegionWisePacking)); } + pRWPK->numHiRegions = gts_bs_read_int(bs, 8); + pRWPK->lowResPicWidth = gts_bs_read_int(bs, 32); + pRWPK->lowResPicHeight = gts_bs_read_int(bs, 32); + pRWPK->timeStamp = gts_bs_read_int(bs, 32); + exit: if (bs) gts_bs_del(bs); if (data_without_emulation_bytes) gts_free(data_without_emulation_bytes); diff --git a/src/360SCVP/360SCVPHevcParser.h b/src/360SCVP/360SCVPHevcParser.h index f0abca91..8903c9f7 100644 --- a/src/360SCVP/360SCVPHevcParser.h +++ b/src/360SCVP/360SCVPHevcParser.h @@ -184,6 +184,10 @@ struct RegionWisePackingSEI uint16_t packedPictureHeight; uint16_t regionsSize; RegionStruct* pRegions; + uint8_t numHiRegions; + uint32_t lowResPicWidth; + uint32_t lowResPicHeight; + uint32_t timeStamp; }; struct ViewportStruct @@ -320,45 +324,46 @@ typedef struct RepFormat #define MAX_LHVC_LAYERS 4 -#define MAX_NUM_LAYER_SETS 1024 +#define NUM_LAYER_SETS_MAX 1024 typedef struct { int32_t id; - /*used to discard repeated SPSs - 0: not parsed, 1 parsed, 2 stored*/ - uint32_t state; - int32_t bit_pos_vps_extensions; - uint32_t crc; bool vps_extension_found; - uint32_t max_layers, max_sub_layers, max_layer_id, num_layer_sets; bool temporal_id_nesting; - HEVC_ProfileTierLevel ptl; - - HEVC_SublayerPTL sub_ptl[8]; - - + bool base_layer_internal_flag; + bool base_layer_available_flag; + uint8_t num_profile_tier_level; + uint8_t num_output_layer_sets; + int32_t bit_pos_vps_extensions; + uint32_t max_layers; + uint32_t max_sub_layers; + uint32_t max_layer_id; + uint32_t num_layer_sets; + uint32_t vps_max_dec_pic_buffering_minus1; + uint32_t vps_max_num_reorder_pics; + uint32_t vps_max_latency_increase_plus1; + uint32_t num_rep_formats; + uint32_t state; + uint32_t crc; + uint32_t rep_format_idx[16]; uint32_t scalability_mask[16]; uint32_t dimension_id[MAX_LHVC_LAYERS][16]; uint32_t layer_id_in_nuh[MAX_LHVC_LAYERS]; uint32_t layer_id_in_vps[MAX_LHVC_LAYERS]; - - uint8_t num_profile_tier_level, num_output_layer_sets; - uint32_t profile_level_tier_idx[MAX_LHVC_LAYERS]; - HEVC_ProfileTierLevel ext_ptl[MAX_LHVC_LAYERS]; - - uint32_t num_rep_formats; - HEVC_RepFormat rep_formats[16]; - uint32_t rep_format_idx[16]; - bool base_layer_internal_flag, base_layer_available_flag; - uint8_t num_layers_in_id_list[MAX_NUM_LAYER_SETS]; - uint8_t direct_dependency_flag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; + HEVC_ProfileTierLevel ptl; bool output_layer_flag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; - uint8_t profile_tier_level_idx[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; bool alt_output_layer_flag[MAX_LHVC_LAYERS]; - uint8_t num_necessary_layers[MAX_LHVC_LAYERS]; bool necessary_layers_flag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; + uint8_t num_layers_in_id_list[NUM_LAYER_SETS_MAX]; + uint8_t direct_dependency_flag[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; + uint8_t profile_tier_level_idx[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; + uint8_t num_necessary_layers[MAX_LHVC_LAYERS]; uint8_t LayerSetLayerIdList[MAX_LHVC_LAYERS][MAX_LHVC_LAYERS]; - uint8_t LayerSetLayerIdListMax[MAX_LHVC_LAYERS]; //the highest value in LayerSetLayerIdList[i] - uint32_t vps_max_dec_pic_buffering_minus1, vps_max_num_reorder_pics, vps_max_latency_increase_plus1; + uint8_t LayerSetLayerIdListMax[MAX_LHVC_LAYERS]; + uint32_t profile_level_tier_idx[MAX_LHVC_LAYERS]; + HEVC_ProfileTierLevel ext_ptl[MAX_LHVC_LAYERS]; + HEVC_SublayerPTL sub_ptl[8]; + HEVC_RepFormat rep_formats[16]; } HEVC_VPS; diff --git a/src/360SCVP/360SCVPHevcTileMerge.cpp b/src/360SCVP/360SCVPHevcTileMerge.cpp index 6f0d18c1..4d12b9ec 100644 --- a/src/360SCVP/360SCVPHevcTileMerge.cpp +++ b/src/360SCVP/360SCVPHevcTileMerge.cpp @@ -32,6 +32,7 @@ #include "360SCVPHevcEncHdr.h" #include "360SCVPHevcParser.h" #include "360SCVPMergeStreamAPI.h" + #include "360SCVPLog.h" int32_t gcd(int32_t a, int32_t b) { @@ -59,11 +60,11 @@ int32_t init_one_bitstream(oneStream_info **pBs) pTiledBitstreams = (oneStream_info *)malloc(sizeof(oneStream_info)); if (!pTiledBitstreams) return -1; - memset(pTiledBitstreams, 0, sizeof(oneStream_info)); + memset_s(pTiledBitstreams, sizeof(oneStream_info), 0); pTiledBitstreams->hevcSlice = (HEVCState*)malloc(sizeof(HEVCState)); if(pTiledBitstreams->hevcSlice) { - memset(pTiledBitstreams->hevcSlice, 0, sizeof(HEVCState)); + memset_s(pTiledBitstreams->hevcSlice, sizeof(HEVCState), 0); pTiledBitstreams->hevcSlice->sps_active_idx = -1; } pTiledBitstreams->outputBufferLen = 0; @@ -158,10 +159,10 @@ int32_t merge_header(GTS_BitStream *bs, oneStream_info* pSlice, uint8_t **pBitst HEVCState *hevc = pSlice->hevcSlice; hevc_specialInfo specialInfo; - memset(&specialInfo, 0, sizeof(hevc_specialInfo)); + memset_s(&specialInfo, sizeof(hevc_specialInfo), 0); specialInfo.ptr = pBufferSliceCur; specialInfo.ptr_size = lenSlice; - memset(nalsize, 0, sizeof(nalsize)); + memset_s(nalsize, sizeof(nalsize), 0); uint64_t bs_position = bs->position; int32_t spsCnt; parse_hevc_specialinfo(&specialInfo, hevc, nalsize, &specialLen, &spsCnt, 0); @@ -174,7 +175,7 @@ int32_t merge_header(GTS_BitStream *bs, oneStream_info* pSlice, uint8_t **pBitst { if (orgHevc) { - memcpy(orgHevc, hevc, sizeof(HEVCState)); + memcpy_s(orgHevc, sizeof(HEVCState), hevc, sizeof(HEVCState)); } // Choose LR header if HR and LR exist @@ -198,7 +199,7 @@ int32_t merge_header(GTS_BitStream *bs, oneStream_info* pSlice, uint8_t **pBitst bs_position = bs->position; //copy slice data - memcpy(pBitstreamCur, pBufferSliceCur + specialLen, nalsize[SLICE_DATA]); + memcpy_s(pBitstreamCur, nalsize[SLICE_DATA], pBufferSliceCur + specialLen, nalsize[SLICE_DATA]); pBitstreamCur += nalsize[SLICE_DATA]; bs->position += nalsize[SLICE_DATA]; pSlice->outputBufferLen += nalsize[SLICE_DATA]; @@ -265,7 +266,7 @@ int32_t get_merge_solution(hevc_mergeStream *mergeStream) // Check if the input tile number is legitimate if(height == 0 || HR_hc == 0 || height % LR_tile_h || HR_ntile % HR_hc || LR_ntile % (height / LR_tile_h)) { - printf("The input tile number is not legitimate!\n"); + SCVP_LOG(LOG_ERROR, "The input tile number is not legitimate!\n"); return -1; } @@ -388,7 +389,7 @@ void* tile_merge_Init(param_mergeStream *mergeStreamParams) hevc_mergeStream *mergeStream = (hevc_mergeStream *)malloc(sizeof(hevc_mergeStream)); if(!mergeStream) return NULL; - memset(mergeStream, 0, sizeof(hevc_mergeStream)); + memset_s(mergeStream, sizeof(hevc_mergeStream), 0); int32_t HR_ntile = mergeStreamParams->highRes.selectedTilesCount; int32_t LR_ntile = mergeStreamParams->lowRes.selectedTilesCount; @@ -396,7 +397,7 @@ void* tile_merge_Init(param_mergeStream *mergeStreamParams) copy_tile_params(&(mergeStream->lowRes), &(mergeStreamParams->lowRes)); mergeStream->slice_segment_address = (int32_t *)malloc((HR_ntile+LR_ntile) * sizeof(int32_t)); - memset(mergeStream->slice_segment_address, 0, (HR_ntile+LR_ntile) * sizeof(int32_t)); + memset_s(mergeStream->slice_segment_address, (HR_ntile+LR_ntile) * sizeof(int32_t), 0); mergeStream->inputBistreamsLen = 0; diff --git a/src/360SCVP/360SCVPHevcTilestream.cpp b/src/360SCVP/360SCVPHevcTilestream.cpp index c89389f6..eadebf53 100644 --- a/src/360SCVP/360SCVPHevcTilestream.cpp +++ b/src/360SCVP/360SCVPHevcTilestream.cpp @@ -30,6 +30,7 @@ #include "360SCVPHevcParser.h" #include "360SCVPHevcEncHdr.h" #include "360SCVPTiledstreamAPI.h" +#include "360SCVPLog.h" int32_t hevc_import_ffextradata(hevc_specialInfo* pSpecialInfo, HEVCState* hevc, uint32_t *pSize, int32_t *spsCnt, int32_t bParse) { @@ -202,7 +203,7 @@ int32_t parse_hevc_specialinfo(hevc_specialInfo* pSpecialInfo, HEVCState* hevc, if (byteCnt > 0) { - memcpy(pSpecialInfo->ptr, pSpecialInfo->ptr + byteCnt, ptr_size); + memmove_s(pSpecialInfo->ptr, ptr_size, pSpecialInfo->ptr + byteCnt, ptr_size); pSpecialInfo->ptr_size = ptr_size; } int32_t nalCnt = hevc_import_ffextradata(pSpecialInfo, hevc, nalsize, spsCnt, bParse); @@ -236,12 +237,12 @@ int32_t parse_tiles_info(hevc_gen_tiledstream* pGenTilesStream) uint8_t * pBufferSliceCur = pSliceCur->pTiledBitstreamBuffer; int32_t lenSlice = pSliceCur->inputBufferLen; uint32_t nalsize[200]; - memset(nalsize, 0, sizeof(nalsize)); + memset_s(nalsize, sizeof(nalsize), 0); pSpecialInfo->ptr = pBufferSliceCur; pSpecialInfo->ptr_size = lenSlice; - int32_t spsCnt; + int32_t spsCnt = 0; int32_t nalCnt = parse_hevc_specialinfo(pSpecialInfo, pSliceCur->hevcSlice, nalsize, &specialLen, &spsCnt, pGenTilesStream->parseType); if (spsCnt > 1) pBufferSliceCur = pBufferSliceCur + (spsCnt - 1) * specialLen; @@ -288,8 +289,8 @@ int32_t parse_tiles_info(hevc_gen_tiledstream* pGenTilesStream) HEVC_PPS *pps = &pSliceCur->hevcSlice->pps[pSliceCur->hevcSlice->last_parsed_pps_id]; - memset(pSliceCur->columnWidth, 0, sizeof(pSliceCur->columnWidth)); - memset(pSliceCur->rowHeight, 0, sizeof(pSliceCur->rowHeight)); + memset_s(pSliceCur->columnWidth, sizeof(pSliceCur->columnWidth), 0); + memset_s(pSliceCur->rowHeight, sizeof(pSliceCur->rowHeight), 0); if(!pps->tiles_enabled_flag) { pSliceCur->tilesWidthCount = 1; @@ -400,7 +401,7 @@ void* genTiledStream_Init(param_gen_tiledStream* pParamGenTiledStream) hevc_gen_tiledstream *pGen = (hevc_gen_tiledstream *)malloc(sizeof(hevc_gen_tiledstream)); if (!pGen) return NULL; - memset(pGen, 0, sizeof(hevc_gen_tiledstream)); + memset_s(pGen, sizeof(hevc_gen_tiledstream), 0); pGen->pTiledBitstreams = (oneStream_info**)malloc(pParamGenTiledStream->tilesHeightCount * pParamGenTiledStream->tilesWidthCount * sizeof(oneStream_info *)); @@ -427,7 +428,7 @@ void* genTiledStream_Init(param_gen_tiledStream* pParamGenTiledStream) pGen->pTiledBitstreams[i*pGen->tilesWidthCount + j]->hevcSlice = (HEVCState*)malloc(sizeof(HEVCState)); if (pGen->pTiledBitstreams[i*pGen->tilesWidthCount + j]->hevcSlice) { - memset(pGen->pTiledBitstreams[i*pGen->tilesWidthCount + j]->hevcSlice, 0, sizeof(HEVCState)); + memset_s(pGen->pTiledBitstreams[i*pGen->tilesWidthCount + j]->hevcSlice, sizeof(HEVCState), 0); pGen->pTiledBitstreams[i*pGen->tilesWidthCount + j]->hevcSlice->sps_active_idx = -1; } @@ -512,7 +513,7 @@ int32_t genTiledStream_parseNals(param_gen_tiledStream* pParamGenTiledStream, v if (!pParamGenTiledStream || !pGenHandle) { ret = 1; - printf("the pointer of input paramter is NULL\n"); + SCVP_LOG(LOG_ERROR, "the pointer of input paramter is NULL\n"); return ret; } hevc_gen_tiledstream* pGenTilesStream = (hevc_gen_tiledstream*)pGenHandle; @@ -560,7 +561,7 @@ int32_t genTiledStream_getParam(void* pGenHandle, uint32_t id, uint8_t** pValu if (!pGenHandle || !pValue) { - printf("the input pointer is null\n"); + SCVP_LOG(LOG_ERROR, "the input pointer is null\n"); return -1; } hevc_gen_tiledstream* pGenTilesStream = (hevc_gen_tiledstream*)pGenHandle; @@ -588,7 +589,7 @@ int32_t genTiledStream_setParam(void* pGenHandle, uint32_t id, uint8_t* pValue { if (!pGenHandle || !pValue) { - printf("the input pointer is null\n"); + SCVP_LOG(LOG_WARNING, "the input pointer is null\n"); return 1; } diff --git a/src/360SCVP/360SCVPImpl.cpp b/src/360SCVP/360SCVPImpl.cpp index a0436eac..66fc4bce 100644 --- a/src/360SCVP/360SCVPImpl.cpp +++ b/src/360SCVP/360SCVPImpl.cpp @@ -27,15 +27,17 @@ #include "stdlib.h" #include "string.h" #include "assert.h" +#include #include "360SCVPTiledstreamAPI.h" #include "360SCVPViewportAPI.h" #include "360SCVPMergeStreamAPI.h" #include "360SCVPCommonDef.h" #include "360SCVPHevcEncHdr.h" +#include "360SCVPLog.h" +#include "TileSelectionPlugins_API.h" #include "360SCVPImpl.h" #include "360SCVPHevcTileMerge.h" - TstitchStream::TstitchStream() { m_pOutTile = new TileDef[1000]; @@ -46,12 +48,12 @@ TstitchStream::TstitchStream() m_hevcState = new HEVCState; if (m_hevcState) { - memset(m_hevcState, 0, sizeof(HEVCState)); + memset_s(m_hevcState, sizeof(HEVCState), 0); m_hevcState->sps_active_idx = -1; } - memset(&m_pViewportParam, 0, sizeof(generateViewPortParam)); - memset(&m_mergeStreamParam, 0, sizeof(param_mergeStream)); - memset(&m_streamStitch, 0, sizeof(param_gen_tiledStream)); + memset_s(&m_pViewportParam, sizeof(generateViewPortParam), 0); + memset_s(&m_mergeStreamParam, sizeof(param_mergeStream), 0); + memset_s(&m_streamStitch, sizeof(param_gen_tiledStream), 0); m_pViewport = NULL; m_pMergeStream = NULL; m_pSteamStitch = NULL; @@ -89,34 +91,39 @@ TstitchStream::TstitchStream() m_startCodesSize = 0; m_nalType = 0; m_projType = 0; - memset(&m_sliceType, 0, sizeof(SliceType)); + memset_s(&m_sliceType, sizeof(SliceType), 0); m_usedType = 0; m_xTopLeftNet = 0; m_yTopLeftNet = 0; m_dstRwpk = RegionWisePacking(); + m_pTileSelection = NULL; + m_pluginLibHdl = NULL; + m_createPlugin = NULL; + m_destroyPlugin = NULL; + m_bNeedPlugin = false; } TstitchStream::TstitchStream(TstitchStream& other) { m_pOutTile = new TileDef[1000]; - memcpy(m_pOutTile, other.m_pOutTile, 1000 * sizeof(TileDef)); + memcpy_s(m_pOutTile, 1000 * sizeof(TileDef), other.m_pOutTile, 1000 * sizeof(TileDef)); m_pUpLeft = new point[6]; - memcpy(m_pUpLeft, other.m_pUpLeft, 6 * sizeof(point)); + memcpy_s(m_pUpLeft, 6 * sizeof(point), other.m_pUpLeft, 6 * sizeof(point)); m_pDownRight = new point[6]; - memcpy(m_pDownRight, other.m_pDownRight, 6 * sizeof(point)); + memcpy_s(m_pDownRight, 6 * sizeof(point), other.m_pDownRight, 6 * sizeof(point)); m_pNalInfo[0] = new nal_info[1000]; - memcpy(m_pNalInfo[0], other.m_pNalInfo[0], 1000 * sizeof(nal_info)); + memcpy_s(m_pNalInfo[0], 1000 * sizeof(nal_info), other.m_pNalInfo[0], 1000 * sizeof(nal_info)); m_pNalInfo[1] = new nal_info[1000]; - memcpy(m_pNalInfo[1], other.m_pNalInfo[1], 1000 * sizeof(nal_info)); + memcpy_s(m_pNalInfo[1], 1000 * sizeof(nal_info), other.m_pNalInfo[1], 1000 * sizeof(nal_info)); m_hevcState = new HEVCState; if (m_hevcState) { - memcpy(m_hevcState, other.m_hevcState, sizeof(HEVCState)); + memcpy_s(m_hevcState, sizeof(HEVCState), other.m_hevcState, sizeof(HEVCState)); } - memcpy(&m_pViewportParam, &(other.m_pViewportParam), sizeof(generateViewPortParam)); - memcpy(&m_mergeStreamParam, &(other.m_mergeStreamParam), sizeof(param_mergeStream)); - memcpy(&m_streamStitch, &(other.m_streamStitch), sizeof(param_gen_tiledStream)); + memcpy_s(&m_pViewportParam, sizeof(generateViewPortParam), &(other.m_pViewportParam), sizeof(generateViewPortParam)); + memcpy_s(&m_mergeStreamParam, sizeof(param_mergeStream), &(other.m_mergeStreamParam), sizeof(param_mergeStream)); + memcpy_s(&m_streamStitch, sizeof(param_gen_tiledStream), &(other.m_streamStitch), sizeof(param_gen_tiledStream)); m_pViewport = NULL; m_pMergeStream = NULL; m_pSteamStitch = NULL; @@ -144,9 +151,9 @@ TstitchStream::TstitchStream(TstitchStream& other) m_tileHeightCountOri[0] = other.m_tileHeightCountOri[0]; m_tileHeightCountOri[1] = other.m_tileHeightCountOri[1]; m_specialInfo[0] = new unsigned char[200]; - memcpy(m_specialInfo[0], other.m_specialInfo[0], 200 * sizeof(unsigned char)); + memcpy_s(m_specialInfo[0], 200 * sizeof(unsigned char), other.m_specialInfo[0], 200 * sizeof(unsigned char)); m_specialInfo[1] = new unsigned char[200]; - memcpy(m_specialInfo[1], other.m_specialInfo[1], 200 * sizeof(unsigned char)); + memcpy_s(m_specialInfo[1], 200 * sizeof(unsigned char), other.m_specialInfo[1], 200 * sizeof(unsigned char)); m_sliceHeaderLen = other.m_sliceHeaderLen; m_dstWidthNet = other.m_dstWidthNet; m_dstHeightNet = other.m_dstHeightNet; @@ -162,48 +169,121 @@ TstitchStream::TstitchStream(TstitchStream& other) m_data = NULL; m_startCodesSize = other.m_startCodesSize; m_nalType = other.m_nalType; - memcpy(&m_sliceType, &(other.m_sliceType), sizeof(SliceType)); + memcpy_s(&m_sliceType, sizeof(SliceType), &(other.m_sliceType), sizeof(SliceType)); m_usedType = other.m_usedType; m_xTopLeftNet = other.m_xTopLeftNet; m_yTopLeftNet = other.m_yTopLeftNet; m_dstRwpk = RegionWisePacking(); m_dstRwpk = other.m_dstRwpk; + m_pTileSelection = NULL; + m_pluginLibHdl = NULL; + m_createPlugin = NULL; + m_destroyPlugin = NULL; + m_bNeedPlugin = false; } -TstitchStream::~TstitchStream() +TstitchStream& TstitchStream::operator=(const TstitchStream& other) { - if (m_pOutTile) { - delete []m_pOutTile; - m_pOutTile = nullptr; - } - if (m_pUpLeft) { - delete []m_pUpLeft; - m_pUpLeft = nullptr; - } - if (m_pDownRight) { - delete []m_pDownRight; - m_pDownRight = nullptr; - } - if (m_pNalInfo[0]) { - delete []m_pNalInfo[0]; - m_pNalInfo[0] = nullptr; - } - if (m_pNalInfo[1]) { - delete []m_pNalInfo[1]; - m_pNalInfo[1] = nullptr; - } - if (m_hevcState) { - delete m_hevcState; - m_hevcState = nullptr; - } - if (m_specialInfo[0]) { - delete []m_specialInfo[0]; - m_specialInfo[0] = nullptr; - } - if (m_specialInfo[1]) { - delete []m_specialInfo[1]; - m_specialInfo[1] = nullptr; + if (&other == this) + return *this; + SAFE_DELETE_ARRAY(m_pOutTile); + m_pOutTile = new TileDef[1000]; + memcpy_s(m_pOutTile, 1000 * sizeof(TileDef), other.m_pOutTile, 1000 * sizeof(TileDef)); + SAFE_DELETE_ARRAY(m_pUpLeft); + m_pUpLeft = new point[6]; + memcpy_s(m_pUpLeft, 6 * sizeof(point), other.m_pUpLeft, 6 * sizeof(point)); + SAFE_DELETE_ARRAY(m_pDownRight); + m_pDownRight = new point[6]; + memcpy_s(m_pDownRight, 6 * sizeof(point), other.m_pDownRight, 6 * sizeof(point)); + SAFE_DELETE_ARRAY(m_pNalInfo[0]); + m_pNalInfo[0] = new nal_info[1000]; + memcpy_s(m_pNalInfo[0], 1000 * sizeof(nal_info), other.m_pNalInfo[0], 1000 * sizeof(nal_info)); + SAFE_DELETE_ARRAY(m_pNalInfo[1]); + m_pNalInfo[1] = new nal_info[1000]; + memcpy_s(m_pNalInfo[1], 1000 * sizeof(nal_info), other.m_pNalInfo[1], 1000 * sizeof(nal_info)); + SAFE_DELETE(m_hevcState); + m_hevcState = new HEVCState; + if (m_hevcState) + { + memcpy_s(m_hevcState, sizeof(HEVCState), other.m_hevcState, sizeof(HEVCState)); } + + memcpy_s(&m_pViewportParam, sizeof(generateViewPortParam), &(other.m_pViewportParam), sizeof(generateViewPortParam)); + memcpy_s(&m_mergeStreamParam, sizeof(param_mergeStream), &(other.m_mergeStreamParam), sizeof(param_mergeStream)); + memcpy_s(&m_streamStitch, sizeof(param_gen_tiledStream), &(other.m_streamStitch), sizeof(param_gen_tiledStream)); + m_pViewport = NULL; + m_pMergeStream = NULL; + m_pSteamStitch = NULL; + m_seiFramePacking_enable = other.m_seiFramePacking_enable; + m_seiPayloadType = other.m_seiPayloadType; + m_seiProj_enable = other.m_seiProj_enable; + m_seiSphereRot_enable = other.m_seiSphereRot_enable; + m_seiRWPK_enable = other.m_seiRWPK_enable; + m_seiViewport_enable = other.m_seiViewport_enable; + m_projType = other.m_projType; + m_specialDataLen[0] = other.m_specialDataLen[0]; + m_specialDataLen[1] = other.m_specialDataLen[1]; + m_hrTilesInRow = other.m_hrTilesInRow; + m_hrTilesInCol = other.m_hrTilesInCol; + m_lrTilesInRow = other.m_lrTilesInRow; + m_lrTilesInCol = other.m_lrTilesInCol; + m_bSPSReady = other.m_bSPSReady; + m_bPPSReady = other.m_bPPSReady; + m_tileWidthCountSel[0] = other.m_tileWidthCountSel[0]; + m_tileWidthCountSel[1] = other.m_tileWidthCountSel[1]; + m_tileHeightCountSel[0] = other.m_tileHeightCountSel[0]; + m_tileHeightCountSel[1] = other.m_tileHeightCountSel[1]; + m_tileWidthCountOri[0] = other.m_tileWidthCountOri[0]; + m_tileWidthCountOri[1] = other.m_tileWidthCountOri[1]; + m_tileHeightCountOri[0] = other.m_tileHeightCountOri[0]; + m_tileHeightCountOri[1] = other.m_tileHeightCountOri[1]; + SAFE_DELETE_ARRAY(m_specialInfo[0]); + m_specialInfo[0] = new unsigned char[200]; + memcpy_s(m_specialInfo[0], 200 * sizeof(unsigned char), other.m_specialInfo[0], 200 * sizeof(unsigned char)); + SAFE_DELETE_ARRAY(m_specialInfo[1]); + m_specialInfo[1] = new unsigned char[200]; + memcpy_s(m_specialInfo[1], 200 * sizeof(unsigned char), other.m_specialInfo[1], 200 * sizeof(unsigned char)); + m_sliceHeaderLen = other.m_sliceHeaderLen; + m_dstWidthNet = other.m_dstWidthNet; + m_dstHeightNet = other.m_dstHeightNet; + m_maxSelTiles = other.m_maxSelTiles; + m_pRWPK = NULL; + m_pSeiViewport = other.m_pSeiViewport; + m_pFramePacking = other.m_pFramePacking; + m_pSphereRot = other.m_pSphereRot; + m_pSeiViewport = other.m_pSeiViewport; + m_viewportDestWidth = other.m_viewportDestWidth; + m_viewportDestHeight = other.m_viewportDestHeight; + m_dataSize = 0; + m_data = NULL; + m_startCodesSize = other.m_startCodesSize; + m_nalType = other.m_nalType; + memcpy_s(&m_sliceType, sizeof(SliceType), &(other.m_sliceType), sizeof(SliceType)); + m_usedType = other.m_usedType; + m_xTopLeftNet = other.m_xTopLeftNet; + m_yTopLeftNet = other.m_yTopLeftNet; + m_dstRwpk = RegionWisePacking(); + m_dstRwpk = other.m_dstRwpk; + m_pTileSelection = NULL; + m_pluginLibHdl = NULL; + m_createPlugin = NULL; + m_destroyPlugin = NULL; + m_bNeedPlugin = false; + + return *this; +} + +TstitchStream::~TstitchStream() +{ + SAFE_DELETE_ARRAY(m_pOutTile); + SAFE_DELETE_ARRAY(m_pUpLeft); + SAFE_DELETE_ARRAY(m_pDownRight); + SAFE_DELETE_ARRAY(m_pNalInfo[0]); + SAFE_DELETE_ARRAY(m_pNalInfo[1]); + + SAFE_DELETE(m_hevcState); + SAFE_DELETE_ARRAY(m_specialInfo[0]); + SAFE_DELETE_ARRAY(m_specialInfo[1]); } int32_t TstitchStream::initViewport(Param_ViewPortInfo* pViewPortInfo, int32_t tilecolCount, int32_t tilerowCount) @@ -224,8 +304,67 @@ int32_t TstitchStream::initViewport(Param_ViewPortInfo* pViewPortInfo, int32_t t m_pViewportParam.m_viewPort_fYaw = pViewPortInfo->viewPortYaw; m_pViewportParam.m_viewPort_hFOV = pViewPortInfo->viewPortFOVH; m_pViewportParam.m_viewPort_vFOV = pViewPortInfo->viewPortFOVV; + m_pViewportParam.m_usageType = pViewPortInfo->usageType; + if (m_pViewportParam.m_input_geoType == E_SVIDEO_EQUIRECT) + { + if (pViewPortInfo->paramVideoFP.rows != 1 || pViewPortInfo->paramVideoFP.cols != 1) + SCVP_LOG(LOG_WARNING, "Viewport rows and cols number is illegal!!! Set them to defaul 1x1 !!!\n"); + + m_pViewportParam.m_paramVideoFP.cols = 1; + m_pViewportParam.m_paramVideoFP.rows = 1; + m_pViewportParam.m_paramVideoFP.faces[0][0].faceHeight = pViewPortInfo->faceHeight; + m_pViewportParam.m_paramVideoFP.faces[0][0].faceWidth = pViewPortInfo->faceWidth; + m_pViewportParam.m_paramVideoFP.faces[0][0].idFace = 1; + m_pViewportParam.m_paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + } + else if (m_pViewportParam.m_input_geoType == E_SVIDEO_CUBEMAP) + { + /* Check the paramVideoFP rows / cols exceeds the maximum array size for Cubemap projections */ + if (pViewPortInfo->paramVideoFP.rows > 6 || pViewPortInfo->paramVideoFP.cols > 6 + || pViewPortInfo->paramVideoFP.rows <= 0 || pViewPortInfo->paramVideoFP.cols <= 0 ) { + SCVP_LOG(LOG_ERROR, "Viewport rows and cols is not suitable for Cubemap: rows is %d, col is %d\n", pViewPortInfo->paramVideoFP.rows, pViewPortInfo->paramVideoFP.cols); + return ERROR_BAD_PARAM; + } + else { + m_pViewportParam.m_paramVideoFP.cols = pViewPortInfo->paramVideoFP.cols; + m_pViewportParam.m_paramVideoFP.rows = pViewPortInfo->paramVideoFP.rows; + for (int i = 0; i < pViewPortInfo->paramVideoFP.rows; i++) { + for (int j = 0; j < pViewPortInfo->paramVideoFP.cols; j++) { + m_pViewportParam.m_paramVideoFP.faces[i][j].faceHeight = pViewPortInfo->paramVideoFP.faces[i][j].faceHeight; + m_pViewportParam.m_paramVideoFP.faces[i][j].faceWidth = pViewPortInfo->paramVideoFP.faces[i][j].faceWidth; + m_pViewportParam.m_paramVideoFP.faces[i][j].idFace = pViewPortInfo->paramVideoFP.faces[i][j].idFace; + m_pViewportParam.m_paramVideoFP.faces[i][j].rotFace = pViewPortInfo->paramVideoFP.faces[i][j].rotFace; + } + } + } + } + else if (m_pViewportParam.m_input_geoType == E_SVIDEO_PLANAR) + { + if (pViewPortInfo->paramVideoFP.rows != 1 || pViewPortInfo->paramVideoFP.cols != 1) + SCVP_LOG(LOG_WARNING, "Viewport rows and cols number is illegal!!! Set them to default 1x1 !!!\n"); + m_pViewportParam.m_paramVideoFP.cols = 1; + m_pViewportParam.m_paramVideoFP.rows = 1; + m_pViewportParam.m_paramVideoFP.faces[0][0].faceHeight = pViewPortInfo->paramVideoFP.faces[0][0].faceHeight; + m_pViewportParam.m_paramVideoFP.faces[0][0].faceWidth = pViewPortInfo->paramVideoFP.faces[0][0].faceWidth; + m_pViewportParam.m_paramVideoFP.faces[0][0].idFace = pViewPortInfo->paramVideoFP.faces[0][0].idFace; + m_pViewportParam.m_paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + } + else { + SCVP_LOG(LOG_ERROR, "The Input GeoType %d is not supported by viewport implementation!\n", m_pViewportParam.m_input_geoType); + return ERROR_BAD_PARAM; + } + m_pViewport = genViewport_Init(&m_pViewportParam); - return 0; + return ERROR_NONE; +} + +int32_t TstitchStream::SetLogCallBack(LogFunction logFunction) +{ + if (!logFunction) + return OMAF_ERROR_NULL_PTR; + + logCallBack = logFunction; + return ERROR_NONE; } int32_t TstitchStream::initMerge(param_360SCVP* pParamStitchStream, int32_t sliceSize) @@ -277,35 +416,186 @@ int32_t TstitchStream::initMerge(param_360SCVP* pParamStitchStream, int32_t slic m_mergeStreamParam.highRes.pHeader = (param_oneStream_info *)malloc(sizeof(param_oneStream_info)); m_mergeStreamParam.lowRes.pHeader = (param_oneStream_info *)malloc(sizeof(param_oneStream_info)); - m_mergeStreamParam.highRes.pTiledBitstreams = (param_oneStream_info **)malloc(HR_ntile * sizeof(param_oneStream_info *)); - if (m_mergeStreamParam.highRes.pHeader) - { - m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = (uint8_t *)malloc(sliceSize); + + if (!m_mergeStreamParam.highRes.pHeader || !m_mergeStreamParam.lowRes.pHeader) { + SCVP_LOG(LOG_ERROR, "Init Merge Failed: pHeader of highRes or lowRes is NULL\n"); + if (m_mergeStreamParam.highRes.pHeader) { + free(m_mergeStreamParam.highRes.pHeader); + m_mergeStreamParam.highRes.pHeader = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader) { + free(m_mergeStreamParam.lowRes.pHeader); + m_mergeStreamParam.lowRes.pHeader = NULL; + } + + return -1; } - if (m_mergeStreamParam.lowRes.pHeader) - { - m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = (uint8_t *)malloc(100); + + m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = (uint8_t *)malloc(sliceSize); + m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = (uint8_t *)malloc(100); + + if (!m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer || !m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer) { + SCVP_LOG(LOG_ERROR, "Init Merge Failed: Tiled Bitstream Buffer of highRes or lowRes is not allocated\n"); + + if (m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.highRes.pHeader) { + free(m_mergeStreamParam.highRes.pHeader); + m_mergeStreamParam.highRes.pHeader = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader) { + free(m_mergeStreamParam.lowRes.pHeader); + m_mergeStreamParam.lowRes.pHeader = NULL; + } + return -1; } - if (!m_mergeStreamParam.highRes.pHeader || !m_mergeStreamParam.lowRes.pHeader - || !m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer - || !m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer - || !m_mergeStreamParam.highRes.pTiledBitstreams) + m_mergeStreamParam.highRes.pTiledBitstreams = (param_oneStream_info **)malloc(HR_ntile * sizeof(param_oneStream_info *)); + if (!m_mergeStreamParam.highRes.pTiledBitstreams) { + SCVP_LOG(LOG_ERROR, "Init Merge Failed: Tiled Bitstreams of highRes is not allocated\n"); + + if (m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.highRes.pHeader) { + free(m_mergeStreamParam.highRes.pHeader); + m_mergeStreamParam.highRes.pHeader = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader) { + free(m_mergeStreamParam.lowRes.pHeader); + m_mergeStreamParam.lowRes.pHeader = NULL; + } return -1; + } + for (int32_t i = 0; i < HR_ntile; i++) { m_mergeStreamParam.highRes.pTiledBitstreams[i] = (param_oneStream_info *)malloc(sizeof(param_oneStream_info)); - if (!m_mergeStreamParam.highRes.pTiledBitstreams[i]) + if (!m_mergeStreamParam.highRes.pTiledBitstreams[i]) { + for (int32_t j = 0; j < i; j++) { + if (m_mergeStreamParam.highRes.pTiledBitstreams[j]) { + free(m_mergeStreamParam.highRes.pTiledBitstreams[j]); + m_mergeStreamParam.highRes.pTiledBitstreams[j] = NULL; + } + } + + free(m_mergeStreamParam.highRes.pTiledBitstreams); + m_mergeStreamParam.highRes.pTiledBitstreams = NULL; + + if (m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.highRes.pHeader) { + free(m_mergeStreamParam.highRes.pHeader); + m_mergeStreamParam.highRes.pHeader = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader) { + free(m_mergeStreamParam.lowRes.pHeader); + m_mergeStreamParam.lowRes.pHeader = NULL; + } return -1; + } } m_mergeStreamParam.lowRes.pTiledBitstreams = (param_oneStream_info **)malloc(LR_ntile * sizeof(param_oneStream_info *)); if (!m_mergeStreamParam.lowRes.pTiledBitstreams) + { + if (m_mergeStreamParam.highRes.pTiledBitstreams) + { + for (int32_t i = 0; i < HR_ntile; i++) + { + if (m_mergeStreamParam.highRes.pTiledBitstreams[i]) + { + free(m_mergeStreamParam.highRes.pTiledBitstreams[i]); + m_mergeStreamParam.highRes.pTiledBitstreams[i] = NULL; + } + } + free(m_mergeStreamParam.highRes.pTiledBitstreams); + m_mergeStreamParam.highRes.pTiledBitstreams = NULL; + } + + if (m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.highRes.pHeader) { + free(m_mergeStreamParam.highRes.pHeader); + m_mergeStreamParam.highRes.pHeader = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader) { + free(m_mergeStreamParam.lowRes.pHeader); + m_mergeStreamParam.lowRes.pHeader = NULL; + } return -1; + } for (int32_t i = 0; i < LR_ntile; i++) { m_mergeStreamParam.lowRes.pTiledBitstreams[i] = (param_oneStream_info *)malloc(sizeof(param_oneStream_info)); if (!m_mergeStreamParam.lowRes.pTiledBitstreams[i]) + { + for (int32_t j = 0; j < i; j++) + { + if (m_mergeStreamParam.lowRes.pTiledBitstreams[j]) + { + free(m_mergeStreamParam.lowRes.pTiledBitstreams[j]); + m_mergeStreamParam.lowRes.pTiledBitstreams[j] = NULL; + } + } + + free(m_mergeStreamParam.lowRes.pTiledBitstreams); + m_mergeStreamParam.lowRes.pTiledBitstreams = NULL; + + if (m_mergeStreamParam.highRes.pTiledBitstreams) + { + for (int32_t i = 0; i < HR_ntile; i++) + { + if (m_mergeStreamParam.highRes.pTiledBitstreams[i]) + { + free(m_mergeStreamParam.highRes.pTiledBitstreams[i]); + m_mergeStreamParam.highRes.pTiledBitstreams[i] = NULL; + } + } + free(m_mergeStreamParam.highRes.pTiledBitstreams); + m_mergeStreamParam.highRes.pTiledBitstreams = NULL; + } + + if (m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.highRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer) { + free(m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer); + m_mergeStreamParam.lowRes.pHeader->pTiledBitstreamBuffer = NULL; + } + if (m_mergeStreamParam.highRes.pHeader) { + free(m_mergeStreamParam.highRes.pHeader); + m_mergeStreamParam.highRes.pHeader = NULL; + } + if (m_mergeStreamParam.lowRes.pHeader) { + free(m_mergeStreamParam.lowRes.pHeader); + m_mergeStreamParam.lowRes.pHeader = NULL; + } return -1; + } } m_pMergeStream = tile_merge_Init(&m_mergeStreamParam); @@ -321,9 +611,96 @@ int32_t TstitchStream::init(param_360SCVP* pParamStitchStream) m_specialDataLen[1] = 0; m_usedType = pParamStitchStream->usedType; m_dstRwpk.rectRegionPacking = NULL; + pParamStitchStream->paramViewPort.usageType = (UsageType)(pParamStitchStream->usedType); + m_pViewportParam.m_input_geoType = pParamStitchStream->paramViewPort.geoTypeInput; + + if (pParamStitchStream->logFunction) + logCallBack = (LogFunction)(pParamStitchStream->logFunction); + else + logCallBack = GlogFunction; + if (m_usedType == E_PARSER_FOR_CLIENT) { return ret; + } + if (pParamStitchStream->paramViewPort.geoTypeInput == E_SVIDEO_EQUIRECT) + { + pParamStitchStream->paramViewPort.paramVideoFP.cols = 1; + pParamStitchStream->paramViewPort.paramVideoFP.rows = 1; + pParamStitchStream->paramViewPort.paramVideoFP.faces[0][0].faceHeight = pParamStitchStream->paramViewPort.faceHeight; + pParamStitchStream->paramViewPort.paramVideoFP.faces[0][0].faceWidth = pParamStitchStream->paramViewPort.faceWidth; + pParamStitchStream->paramViewPort.paramVideoFP.faces[0][0].idFace = 1; + pParamStitchStream->paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + } + else if (pParamStitchStream->paramViewPort.geoTypeInput == E_SVIDEO_PLANAR) + { + char* pluginLibPath = pParamStitchStream->pluginDef.pluginLibPath; + m_bNeedPlugin = true; + if (!pluginLibPath) { + SCVP_LOG(LOG_ERROR, "The plugin library file path is NULL!\n"); + return OMAF_INVALID_PLUGIN_PARAM; + } + void* libHandler = dlopen(pluginLibPath, RTLD_LAZY); + const char *dlsymErr = dlerror(); + if (!libHandler) + { + SCVP_LOG(LOG_ERROR,"failed to open tile selection library path!\n"); + return OMAF_ERROR_DLOPEN; + } + + if (dlsymErr) { + SCVP_LOG(LOG_ERROR, "Get error msg when load the plugin lib file: %s\n", dlsymErr); + return OMAF_ERROR_DLSYM; + } + + CreateTileSelection *createTS = NULL; + createTS = (CreateTileSelection*)dlsym(libHandler, "Create"); + dlsymErr = dlerror(); + if (dlsymErr) { + SCVP_LOG(LOG_ERROR, "Failed to load symbol Create: %s\n", dlsymErr); + return OMAF_ERROR_DLSYM; + } + + if (!createTS) { + SCVP_LOG(LOG_ERROR, "NULL Tile Selection Creator !\n"); + return ERROR_NULL_PTR; + } + TileSelection *tileSelection = createTS(); + if (!tileSelection) { + SCVP_LOG(LOG_ERROR,"failed to Create TileSelection Handler!\n"); + dlclose(libHandler); + libHandler = NULL; + return ERROR_NULL_PTR; + } + + DestroyTileSelection *destroyTS = NULL; + destroyTS = (DestroyTileSelection*)dlsym(libHandler, "Destroy"); + dlsymErr = dlerror(); + if (dlsymErr) + { + SCVP_LOG(LOG_ERROR, "Failed to load symbol Destroy for TileSelection!\n"); + return OMAF_ERROR_DLSYM; + } + if (!destroyTS) { + SCVP_LOG(LOG_ERROR, "NULL Destroy TileSelection!\n"); + return ERROR_NULL_PTR; + } + m_pTileSelection = tileSelection; + m_pluginLibHdl = libHandler; + m_createPlugin = (void*)createTS; + m_destroyPlugin = (void*)destroyTS; + ret = m_pTileSelection->Initialize(pParamStitchStream); + if (ret) { + SCVP_LOG(LOG_ERROR, "Failed to Initialize Tile Selection Plugin with error code %d\n", ret); + } + } + + if (m_usedType == E_VIEWPORT_ONLY) + { + // Init the viewport library + ret = initViewport(&pParamStitchStream->paramViewPort, pParamStitchStream->paramViewPort.tileNumCol * pParamStitchStream->paramViewPort.paramVideoFP.cols, + pParamStitchStream->paramViewPort.tileNumRow * pParamStitchStream->paramViewPort.paramVideoFP.rows); + return ret; } if (m_usedType == E_STREAM_STITCH_ONLY) { @@ -350,7 +727,7 @@ int32_t TstitchStream::init(param_360SCVP* pParamStitchStream) int32_t tilerowCount = m_tileHeightCountOri[0]; // Init the viewport library - ret = initViewport(&pParamStitchStream->paramViewPort, tilecolCount, tilerowCount); + ret = initViewport(&pParamStitchStream->paramViewPort, tilecolCount* pParamStitchStream->paramViewPort.paramVideoFP.cols, tilerowCount* pParamStitchStream->paramViewPort.paramVideoFP.rows); int32_t sliceHeight = pParamStitchStream->paramViewPort.faceHeight / tilerowCount; int32_t sliceWidth = pParamStitchStream->paramViewPort.faceWidth / tilecolCount; @@ -386,57 +763,74 @@ int32_t TstitchStream::uninit() m_mergeStreamParam.lowRes.pHeader = NULL; } - for (int32_t i = 0; i < HR_ntile; i++) + if (m_mergeStreamParam.highRes.pTiledBitstreams) { - if (m_mergeStreamParam.highRes.pTiledBitstreams[i]) + for (int32_t i = 0; i < HR_ntile; i++) { - free(m_mergeStreamParam.highRes.pTiledBitstreams[i]); - m_mergeStreamParam.highRes.pTiledBitstreams[i] = NULL; + if (m_mergeStreamParam.highRes.pTiledBitstreams[i]) + { + free(m_mergeStreamParam.highRes.pTiledBitstreams[i]); + m_mergeStreamParam.highRes.pTiledBitstreams[i] = NULL; + } } + free(m_mergeStreamParam.highRes.pTiledBitstreams); + m_mergeStreamParam.highRes.pTiledBitstreams = NULL; } - for (int32_t i = 0; i < LR_ntile; i++) + if (m_mergeStreamParam.lowRes.pTiledBitstreams) { - if (m_mergeStreamParam.lowRes.pTiledBitstreams[i]) + for (int32_t i = 0; i < LR_ntile; i++) { - free(m_mergeStreamParam.lowRes.pTiledBitstreams[i]); - m_mergeStreamParam.lowRes.pTiledBitstreams[i] = NULL; + if (m_mergeStreamParam.lowRes.pTiledBitstreams[i]) + { + free(m_mergeStreamParam.lowRes.pTiledBitstreams[i]); + m_mergeStreamParam.lowRes.pTiledBitstreams[i] = NULL; + } } + free(m_mergeStreamParam.lowRes.pTiledBitstreams); + m_mergeStreamParam.lowRes.pTiledBitstreams = NULL; } + ret = tile_merge_Close(m_pMergeStream); } if(m_pViewport) ret |= genViewport_unInit(m_pViewport); if (m_pSteamStitch) ret |= genTiledStream_unInit(m_pSteamStitch); - if (m_pOutTile) - delete[]m_pOutTile; - m_pOutTile = NULL; - if (m_pUpLeft) - delete[]m_pUpLeft; - m_pUpLeft = NULL; - if (m_pDownRight) - delete[]m_pDownRight; - m_pDownRight = NULL; - if(m_pNalInfo[0]) - delete[]m_pNalInfo[0]; - m_pNalInfo[0] = NULL; - if(m_pNalInfo[1]) - delete[]m_pNalInfo[1]; - m_pNalInfo[1] = NULL; + SAFE_DELETE_ARRAY(m_pOutTile); + SAFE_DELETE_ARRAY(m_pUpLeft); + SAFE_DELETE_ARRAY(m_pDownRight); + SAFE_DELETE_ARRAY(m_pNalInfo[0]); + SAFE_DELETE_ARRAY(m_pNalInfo[1]); + + SAFE_DELETE(m_hevcState); + SAFE_DELETE_ARRAY(m_specialInfo[0]); + SAFE_DELETE_ARRAY(m_specialInfo[1]); + + SAFE_DELETE_ARRAY(m_dstRwpk.rectRegionPacking); + + if (m_pTileSelection) { + ret = m_pTileSelection->UnInit(); + if (ret != ERROR_NONE) { + SCVP_LOG(LOG_ERROR, "Tile Selection Uninitialization is Failed with error code %d\n", ret); + return ret; + } + DestroyTileSelection *destroyTS = NULL; - if (m_hevcState) - delete m_hevcState; - m_hevcState = NULL; - if (m_specialInfo[0]) - delete[]m_specialInfo[0]; - m_specialInfo[0] = NULL; - if (m_specialInfo[1]) - delete[]m_specialInfo[1]; - m_specialInfo[1] = NULL; - - if (m_dstRwpk.rectRegionPacking) - delete[]m_dstRwpk.rectRegionPacking; - m_dstRwpk.rectRegionPacking = NULL; + destroyTS = (DestroyTileSelection*)m_destroyPlugin; + if (!destroyTS) + { + SCVP_LOG(LOG_ERROR, "NULL Destroy TileSelection!\n"); + return ERROR_NULL_PTR; + } + void* pluginHdl = m_pluginLibHdl; + (*destroyTS)(m_pTileSelection); + if (pluginHdl != NULL) + dlclose(pluginHdl); + m_createPlugin = NULL; + m_destroyPlugin = NULL; + m_pluginLibHdl = NULL; + ret = ERROR_NONE; + } return ret; } @@ -450,7 +844,7 @@ int32_t TstitchStream::parseNals(param_360SCVP* pParamStitchStream, int32_t pars param_oneStream_info TiledBitstream; void* pGenStream; - memset(&GenStreamParam, 0, sizeof(param_gen_tiledStream)); + memset_s(&GenStreamParam, sizeof(param_gen_tiledStream), 0); GenStreamParam.pts = 0; GenStreamParam.tilesHeightCount = 1; GenStreamParam.tilesWidthCount = 1; @@ -471,8 +865,8 @@ int32_t TstitchStream::parseNals(param_360SCVP* pParamStitchStream, int32_t pars } if (m_specialDataLen[streamIdx] > 0) { - memmove(TiledBitstream.pTiledBitstreamBuffer + m_specialDataLen[streamIdx], TiledBitstream.pTiledBitstreamBuffer, pParamStitchStream->inputBitstreamLen); - memcpy(TiledBitstream.pTiledBitstreamBuffer, m_specialInfo[streamIdx], m_specialDataLen[streamIdx]); + memmove_s(TiledBitstream.pTiledBitstreamBuffer + m_specialDataLen[streamIdx], pParamStitchStream->inputBitstreamLen, TiledBitstream.pTiledBitstreamBuffer, pParamStitchStream->inputBitstreamLen); + memcpy_s(TiledBitstream.pTiledBitstreamBuffer, m_specialDataLen[streamIdx], m_specialInfo[streamIdx], m_specialDataLen[streamIdx]); TiledBitstream.inputBufferLen += m_specialDataLen[streamIdx]; } } @@ -499,27 +893,27 @@ int32_t TstitchStream::parseNals(param_360SCVP* pParamStitchStream, int32_t pars oneStream_info * pSlice = pGenTilesStream->pTiledBitstreams[0]; if (((pGenTilesStream->parseType == E_PARSER_ONENAL)) && m_bSPSReady && m_bPPSReady) { - memcpy(pSlice->hevcSlice->sps, m_hevcState->sps, 6 * sizeof(HEVC_SPS)); + memcpy_s(pSlice->hevcSlice->sps, 6 * sizeof(HEVC_SPS), m_hevcState->sps, 6 * sizeof(HEVC_SPS)); pSlice->hevcSlice->last_parsed_sps_id = m_hevcState->last_parsed_sps_id; - memcpy(pSlice->hevcSlice->pps, m_hevcState->pps, 16 * sizeof(HEVC_PPS)); + memcpy_s(pSlice->hevcSlice->pps, 16 * sizeof(HEVC_PPS), m_hevcState->pps, 16 * sizeof(HEVC_PPS)); pSlice->hevcSlice->last_parsed_pps_id = m_hevcState->last_parsed_pps_id; } genTiledStream_parseNals(&GenStreamParam, pGenStream); if(pGenTilesStream->parseType != E_PARSER_ONENAL) - memcpy(m_hevcState, pSlice->hevcSlice, sizeof(HEVCState)); + memcpy_s(m_hevcState, sizeof(HEVCState), pSlice->hevcSlice, sizeof(HEVCState)); else { if (GenStreamParam.nalType == GTS_HEVC_NALU_SEQ_PARAM) { - memcpy(m_hevcState->sps, pSlice->hevcSlice->sps, 16 * sizeof(HEVC_SPS)); + memcpy_s(m_hevcState->sps, 16 * sizeof(HEVC_SPS), pSlice->hevcSlice->sps, 16 * sizeof(HEVC_SPS)); m_hevcState->last_parsed_sps_id = pSlice->hevcSlice->last_parsed_sps_id; m_bSPSReady = 1; } if (GenStreamParam.nalType == GTS_HEVC_NALU_PIC_PARAM) { - memcpy(m_hevcState->pps, pSlice->hevcSlice->pps, 16 * sizeof(HEVC_PPS)); + memcpy_s(m_hevcState->pps, 16 * sizeof(HEVC_SPS), pSlice->hevcSlice->pps, 16 * sizeof(HEVC_PPS)); m_hevcState->last_parsed_pps_id = pSlice->hevcSlice->last_parsed_pps_id; m_bPPSReady = 1; } @@ -531,7 +925,7 @@ int32_t TstitchStream::parseNals(param_360SCVP* pParamStitchStream, int32_t pars if (GenStreamParam.specialLen) { m_specialDataLen[streamIdx] = GenStreamParam.specialLen; - memcpy(m_specialInfo[streamIdx], TiledBitstream.pTiledBitstreamBuffer, m_specialDataLen[streamIdx]); + memcpy_s(m_specialInfo[streamIdx], m_specialDataLen[streamIdx], TiledBitstream.pTiledBitstreamBuffer, m_specialDataLen[streamIdx]); } m_specialDataLen[streamIdx] = GenStreamParam.specialLen; m_nalType = GenStreamParam.nalType; @@ -561,7 +955,6 @@ int32_t TstitchStream::feedParamToGenStream(param_360SCVP* pParamStitchStream) param_oneStream_info *pTmpLowHdr = m_mergeStreamParam.lowRes.pHeader; pTmpLowHdr->pTiledBitstreamBuffer = pParamStitchStream->pInputLowBitstream; pTmpLowHdr->inputBufferLen = m_specialDataLen[1]; - printf("the tiled idx="); tile_merge_reset(m_pMergeStream); @@ -579,12 +972,11 @@ int32_t TstitchStream::feedParamToGenStream(param_360SCVP* pParamStitchStream) pTmpHigh[idx]->inputBufferLen = (pTmpTile->idx!=0) ? m_pNalInfo[0][pTmpTile->idx].nalLen : m_pNalInfo[0][pTmpTile->idx].nalLen- m_specialDataLen[0]; pTmpHigh[idx]->pTiledBitstreamBuffer = (pTmpTile->idx != 0) ? m_pNalInfo[0][pTmpTile->idx].pNalStream : m_pNalInfo[0][pTmpTile->idx].pNalStream + m_specialDataLen[0]; - printf(" %d ", pTmpTile->idx); + SCVP_LOG(LOG_INFO, "Get the %d th tile \n", pTmpTile->idx); pTmpTile++; idx++; } } - idx = 0; for (int32_t i = 0; i < m_tileHeightCountSel[1]; i++) { @@ -595,11 +987,10 @@ int32_t TstitchStream::feedParamToGenStream(param_360SCVP* pParamStitchStream) pTmpLow[idx]->inputBufferLen = (idx != 0) ? m_pNalInfo[1][idx].nalLen : m_pNalInfo[1][idx].nalLen - m_specialDataLen[1]; pTmpLow[idx]->pTiledBitstreamBuffer = (idx != 0) ? m_pNalInfo[1][idx].pNalStream : m_pNalInfo[1][idx].pNalStream + m_specialDataLen[1]; - printf(" %d ", idx); + SCVP_LOG(LOG_INFO, "Get the %d th tile \n", idx); idx++; } } - printf("\n"); return 0; } @@ -608,13 +999,24 @@ int32_t TstitchStream::getViewPortTiles() if (!m_pViewport) return -1; int32_t ret = 0; - ret = genViewport_process(&m_pViewportParam, m_pViewport); + + for (int i = 0; i < 6; i++) + { + m_pUpLeft[i].faceId = -1; + m_pDownRight[i].faceId = -1; + } + + if(m_pViewportParam.m_input_geoType == E_SVIDEO_EQUIRECT) + ret = genViewport_postprocess(&m_pViewportParam, m_pViewport); + else + ret = 0; if (ret) { - printf("gen viewport process error!\n"); + SCVP_LOG(LOG_ERROR, "gen viewport process error!\n"); return -1; } - ret = genViewport_getFixedNumTiles(m_pViewport, m_pOutTile); + if(m_usedType == E_MERGE_AND_VIEWPORT) + ret = genViewport_getFixedNumTiles(m_pViewport, m_pOutTile); if (m_pViewportParam.m_input_geoType == SVIDEO_EQUIRECT) { int32_t widthViewport = 0; @@ -641,11 +1043,36 @@ TileDef* TstitchStream::getSelectedTile() { return m_pOutTile; } -int32_t TstitchStream::setViewPort(float yaw, float pitch) + +int32_t TstitchStream::getTilesByLegacyWay(TileDef* pOutTile) { - return genViewport_setViewPort(m_pViewport, yaw, pitch); + if (!m_pViewport) { + LOG(WARNING) << "Viewport is not allocated"; + return -1; + } + int32_t ret = 0; + + ret = genViewport_process(&m_pViewportParam, m_pViewport); + if (ret == -1) { + LOG(WARNING) << "Error returned when using the traditional way to calculate tiles!"; + return ret; + } + ret = genViewport_getTilesInViewportByLegacyWay(m_pViewport, pOutTile); + return ret; } +int32_t TstitchStream::setViewPort(HeadPose *pose) +{ + int32_t ret = 0; + if (m_pTileSelection) { + ret = m_pTileSelection->SetViewportInfo(pose); + return ret; + } + else if (m_bNeedPlugin) + return SCVP_ERROR_PLUGIN_NOEXIST; + else + return genViewport_setViewPort(m_pViewport, pose->yaw, pose->pitch); +} int32_t TstitchStream::doMerge(param_360SCVP* pParamStitchStream) { @@ -663,6 +1090,10 @@ int32_t TstitchStream::doMerge(param_360SCVP* pParamStitchStream) pParamStitchStream->outputSEILen = 0; m_dstRwpk.numRegions = m_tileWidthCountSel[0] * m_tileHeightCountSel[0] + m_tileWidthCountSel[1] * m_tileHeightCountSel[1]; + m_dstRwpk.numHiRegions = m_tileWidthCountSel[0] * m_tileHeightCountSel[0]; + m_dstRwpk.lowResPicWidth = mergeStream->lowRes.width; + m_dstRwpk.lowResPicHeight = mergeStream->lowRes.height; + m_dstRwpk.timeStamp = pParamStitchStream->timeStamp; if (!m_dstRwpk.rectRegionPacking) { @@ -675,7 +1106,7 @@ int32_t TstitchStream::doMerge(param_360SCVP* pParamStitchStream) ret = EncRWPKSEI(&m_dstRwpk, pParamStitchStream->pOutputSEI, &pParamStitchStream->outputSEILen); pParamStitchStream->outputBitstreamLen = m_mergeStreamParam.outputiledbistreamlen; - memcpy(pParamStitchStream->pOutputBitstream, m_mergeStreamParam.pOutputBitstream, m_mergeStreamParam.outputiledbistreamlen); + memcpy_s(pParamStitchStream->pOutputBitstream, m_mergeStreamParam.outputiledbistreamlen, m_mergeStreamParam.pOutputBitstream, m_mergeStreamParam.outputiledbistreamlen); return ret; } @@ -744,10 +1175,9 @@ int TstitchStream::GenerateRwpkInfo(RegionWisePacking *dstRwpk) int lowRes_tile_height = m_mergeStreamParam.lowRes.height / m_tileHeightCountOri[1]; dstRwpk->constituentPicMatching = 0; - dstRwpk->packedPicWidth = highRes_tile_width * m_tileWidthCountSel[0] + lowRes_tile_width * m_tileHeightCountSel[1]; - dstRwpk->packedPicHeight = highRes_tile_height * m_tileHeightCountSel[0]; - uint8_t highTilesNum = m_tileWidthCountSel[0] * m_tileHeightCountSel[0]; + dstRwpk->packedPicWidth = highRes_tile_width * m_tileWidthCountSel[0] + lowRes_tile_width * ((dstRwpk->numRegions- highTilesNum) / m_lrTilesInCol); + dstRwpk->packedPicHeight = highRes_tile_height * m_tileHeightCountSel[0]; for (uint8_t regionIdx = 0; regionIdx < dstRwpk->numRegions; regionIdx++) { @@ -780,13 +1210,13 @@ int TstitchStream::GenerateRwpkInfo(RegionWisePacking *dstRwpk) else { int lowIdx = regionIdx - highTilesNum; - rwpk->projRegWidth = lowRes_tile_width; - rwpk->projRegHeight = lowRes_tile_height; - rwpk->projRegTop = (lowIdx / m_tileWidthCountOri[1]) * lowRes_tile_height; - rwpk->projRegLeft = (lowIdx % m_tileWidthCountOri[1]) * lowRes_tile_width; - rwpk->packedRegWidth = rwpk->projRegWidth; - rwpk->packedRegHeight = rwpk->projRegHeight; + rwpk->packedRegWidth = lowRes_tile_width; + rwpk->packedRegHeight = lowRes_tile_height; + rwpk->projRegWidth = lowRes_tile_width * m_mergeStreamParam.highRes.width / m_mergeStreamParam.lowRes.width; + rwpk->projRegHeight = lowRes_tile_height * m_mergeStreamParam.highRes.height / m_mergeStreamParam.lowRes.height; + rwpk->projRegTop = (lowIdx / m_tileWidthCountOri[1] * rwpk->projRegHeight); + rwpk->projRegLeft = (lowIdx % m_tileWidthCountOri[1] * rwpk->projRegWidth); rwpk->packedRegTop = (lowIdx % m_lrTilesInCol) * lowRes_tile_height; rwpk->packedRegLeft = (lowIdx / m_lrTilesInCol) * lowRes_tile_width + highRes_tile_width * m_hrTilesInRow; @@ -817,6 +1247,27 @@ int32_t TstitchStream::getFixedNumTiles(TileDef* pOutTile) return ret; } +int32_t TstitchStream::getTilesInViewport(TileDef* pOutTile) +{ + int32_t ret = 0; + if (pOutTile == NULL) + return -1; + + if (m_pTileSelection) { + ret = m_pTileSelection->GetTilesInViewport(pOutTile); + return ret; + } + else if (m_bNeedPlugin) + return SCVP_ERROR_PLUGIN_NOEXIST; + else + { + ret = genViewport_getTilesInViewport(m_pViewport, pOutTile); + m_viewportDestWidth = m_pViewportParam.m_viewportDestWidth; + m_viewportDestHeight = m_pViewportParam.m_viewportDestHeight; + } + return ret; +} + int32_t TstitchStream::doStreamStitch(param_360SCVP* pParamStitchStream) { int32_t ret = 0; @@ -896,7 +1347,7 @@ int32_t TstitchStream::merge_one_tile(uint8_t **pBitstream, oneStream_info* pSli return GTS_BAD_PARAM; hevc_specialInfo specialInfo; - memset(&specialInfo, 0, sizeof(hevc_specialInfo)); + memset_s(&specialInfo, sizeof(hevc_specialInfo), 0); specialInfo.ptr = pBufferSliceCur; specialInfo.ptr_size = lenSlice; @@ -905,7 +1356,7 @@ int32_t TstitchStream::merge_one_tile(uint8_t **pBitstream, oneStream_info* pSli hevc->pps[hevc->last_parsed_pps_id].tiles_enabled_flag = hevc->pps[hevc->last_parsed_pps_id].org_tiles_enabled_flag; - memset(nalsize, 0, sizeof(nalsize)); + memset_s(nalsize, sizeof(nalsize), 0); uint64_t bs_position = bs->position; int32_t spsCnt; parse_hevc_specialinfo(&specialInfo, hevc, nalsize, &specialLen, &spsCnt, 0); @@ -975,7 +1426,7 @@ int32_t TstitchStream::merge_one_tile(uint8_t **pBitstream, oneStream_info* pSli bs_position = bs->position; //copy slice data - memcpy(pBitstreamCur, pBufferSliceCur + specialLen, nalsize[SLICE_DATA]); + memcpy_s(pBitstreamCur, nalsize[SLICE_DATA], pBufferSliceCur + specialLen, nalsize[SLICE_DATA]); pBitstreamCur += nalsize[SLICE_DATA]; bs->position += nalsize[SLICE_DATA]; pBufferSliceCur += specialLen + nalsize[SLICE_DATA]; @@ -1082,7 +1533,7 @@ int32_t TstitchStream::getRWPKInfo(RegionWisePacking *pRWPK) int32_t ret = 0; if (!pRWPK) return -1; - memcpy(pRWPK, &m_dstRwpk, sizeof(RegionWisePacking)); + memcpy_s(pRWPK, sizeof(RegionWisePacking), &m_dstRwpk, sizeof(RegionWisePacking)); return ret; } @@ -1150,6 +1601,14 @@ int32_t TstitchStream::setViewportSEI(OMNIViewPort* pSeiViewport) return ret; } +int32_t TstitchStream::getContentCoverage(CCDef* pOutCC) +{ + int32_t ret = 0; + if (pOutCC == NULL) + return -1; + ret = genViewport_getContentCoverage(m_pViewport, pOutCC); + return ret; +} int32_t TstitchStream::GeneratePPS(param_360SCVP* pParamStitchStream, TileArrangement* pTileArrange) { @@ -1169,11 +1628,11 @@ int32_t TstitchStream::GeneratePPS(param_360SCVP* pParamStitchStream, TileArran { // parsing the origin pps hevc_specialInfo specialInfo; - memset(&specialInfo, 0, sizeof(hevc_specialInfo)); + memset_s(&specialInfo, sizeof(hevc_specialInfo), 0); specialInfo.ptr = pParamStitchStream->pInputBitstream; specialInfo.ptr_size = pParamStitchStream->inputBitstreamLen; uint32_t nalsize[20]; - memset(nalsize, 0, sizeof(nalsize)); + memset_s(nalsize, sizeof(nalsize), 0); int32_t spsCnt; ret = hevc_import_ffextradata(&specialInfo, m_hevcState, nalsize, &spsCnt, 0); if (ret < 0) @@ -1184,7 +1643,7 @@ int32_t TstitchStream::GeneratePPS(param_360SCVP* pParamStitchStream, TileArran bsWrite = NULL; return ret; } - memcpy(&hevcTmp, m_hevcState, sizeof(HEVCState)); + memcpy_s(&hevcTmp, sizeof(HEVCState), m_hevcState, sizeof(HEVCState)); if (hevcTmp.last_parsed_pps_id > 63) { gts_bs_del(bs); @@ -1224,7 +1683,7 @@ int32_t TstitchStream::GeneratePPS(param_360SCVP* pParamStitchStream, TileArran gts_bs_del(bs); bs = NULL; } - if (bsWrite) + if (bsWrite) { gts_bs_del(bsWrite); bsWrite = NULL; @@ -1237,7 +1696,7 @@ int32_t TstitchStream::GeneratePPS(param_360SCVP* pParamStitchStream, TileArran gts_bs_del(bs); bs = NULL; } - if (bsWrite) + if (bsWrite) { gts_bs_del(bsWrite); bsWrite = NULL; @@ -1263,11 +1722,11 @@ int32_t TstitchStream::GenerateSPS(param_360SCVP* pParamStitchStream) { // parsing the origin sps hevc_specialInfo specialInfo; - memset(&specialInfo, 0, sizeof(hevc_specialInfo)); + memset_s(&specialInfo, sizeof(hevc_specialInfo), 0); specialInfo.ptr = pParamStitchStream->pInputBitstream; specialInfo.ptr_size = pParamStitchStream->inputBitstreamLen; uint32_t nalsize[20]; - memset(nalsize, 0, sizeof(nalsize)); + memset_s(nalsize, sizeof(nalsize), 0); int32_t spsCnt; ret = hevc_import_ffextradata(&specialInfo, m_hevcState, nalsize, &spsCnt, 0); if (ret < 0) @@ -1287,7 +1746,7 @@ int32_t TstitchStream::GenerateSPS(param_360SCVP* pParamStitchStream) return ret; } // modify the sps - memcpy(&hevcTmp, m_hevcState, sizeof(HEVCState)); + memcpy_s(&hevcTmp, sizeof(HEVCState), m_hevcState, sizeof(HEVCState)); HEVC_SPS *sps = &hevcTmp.sps[0]; /* if (!sps) @@ -1355,10 +1814,10 @@ int32_t TstitchStream::GenerateSliceHdr(param_360SCVP* pParam360SCVP, int32_t n uint32_t nalsize[20]; hevc_specialInfo specialInfo; int32_t spsCnt; - memset(&specialInfo, 0, sizeof(hevc_specialInfo)); + memset_s(&specialInfo, sizeof(hevc_specialInfo), 0); specialInfo.ptr = pParam360SCVP->pInputBitstream; specialInfo.ptr_size = pParam360SCVP->inputBitstreamLen; - memset(nalsize, 0, sizeof(nalsize)); + memset_s(nalsize, sizeof(nalsize), 0); ret = hevc_import_ffextradata(&specialInfo, m_hevcState, nalsize, &spsCnt, 0); if (ret < 0) { @@ -1369,7 +1828,7 @@ int32_t TstitchStream::GenerateSliceHdr(param_360SCVP* pParam360SCVP, int32_t n return ret; } // modify the sliceheader - memcpy(&hevcTmp, m_hevcState, sizeof(HEVCState)); + memcpy_s(&hevcTmp, sizeof(HEVCState), m_hevcState, sizeof(HEVCState)); HEVC_SPS *sps = &(hevcTmp.sps[0]); sps->width = pParam360SCVP->destWidth; diff --git a/src/360SCVP/360SCVPImpl.h b/src/360SCVP/360SCVPImpl.h index 0afc0b83..455e21e6 100644 --- a/src/360SCVP/360SCVPImpl.h +++ b/src/360SCVP/360SCVPImpl.h @@ -26,6 +26,8 @@ #ifndef _360SCVP_IMPL_H_ #define _360SCVP_IMPL_H_ #include "360SCVPHevcTilestream.h" +#include "../utils/data_type.h" +#include "TileSelectionPlugins_API.h" class TstitchStream { @@ -74,7 +76,8 @@ class TstitchStream int32_t m_lrTilesInRow; int32_t m_hrTilesInRow; int32_t m_hrTilesInCol; - RegionWisePacking m_dstRwpk; + RegionWisePacking m_dstRwpk; + TileSelection *m_pTileSelection; public: uint16_t m_nalType; @@ -93,14 +96,16 @@ class TstitchStream TstitchStream(); TstitchStream(TstitchStream& other); + TstitchStream& operator=(const TstitchStream& other); virtual ~TstitchStream(); int32_t init(param_360SCVP* pParamStitchStream); int32_t uninit(); int32_t getViewPortTiles(); int32_t feedParamToGenStream(param_360SCVP* pParamStitchStream); - int32_t setViewPort(float yaw, float pitch); + int32_t setViewPort(HeadPose *pose); int32_t doMerge(param_360SCVP* pParamStitchStream); int32_t getFixedNumTiles(TileDef* pOutTile); + int32_t getTilesInViewport(TileDef* pOutTile); int32_t parseNals(param_360SCVP* pParamStitchStream, int32_t parseType, Nalu* pNALU, int32_t streamIdx); int32_t GenerateRWPK(RegionWisePacking* pRWPK, uint8_t *pRWPKBits, int32_t* pRWPKBitsSize); int32_t GenerateProj(int32_t projType, uint8_t *pProjBits, int32_t* pProjBitsSize); @@ -121,12 +126,21 @@ class TstitchStream int32_t GenerateRwpkInfo(RegionWisePacking *dstRwpk); int32_t EncRWPKSEI(RegionWisePacking* pRWPK, uint8_t *pRWPKBits, uint32_t* pRWPKBitsSize); int32_t DecRWPKSEI(RegionWisePacking* pRWPK, uint8_t *pRWPKBits, uint32_t RWPKBitsSize); + int32_t getContentCoverage(CCDef* pOutCC); TileDef* getSelectedTile(); + int32_t getTilesByLegacyWay(TileDef* pOutTile); + int32_t SetLogCallBack(LogFunction logFunction); protected: int32_t initMerge(param_360SCVP* pParamStitchStream, int32_t sliceSize); int32_t initViewport(Param_ViewPortInfo* pViewPortInfo, int32_t tilecolCount, int32_t tilerowCount); int32_t merge_partstream_into1bitstream(int32_t totalInputLen); + +private: + void* m_pluginLibHdl; + void* m_createPlugin; + void* m_destroyPlugin; + bool m_bNeedPlugin; };// END CLASS DEFINITION #endif // _360SCVP_IMPL_H_ diff --git a/src/player/SurfaceRender.cpp b/src/360SCVP/360SCVPLog.cpp similarity index 79% rename from src/player/SurfaceRender.cpp rename to src/360SCVP/360SCVPLog.cpp index ce3b9b93..02bc609b 100644 --- a/src/player/SurfaceRender.cpp +++ b/src/360SCVP/360SCVPLog.cpp @@ -22,28 +22,16 @@ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - - * */ //! -//! \file SurfaceRender.cpp -//! \brief Implement class for SurfaceRender. +//! \file: 360SCVPLog.cpp +//! \brief: Include the log function implementation +//! +//! Created on Sept. 27, 2020, 6:04 AM //! -#include "SurfaceRender.h" -#include "ShaderString.h" - -VCD_NS_BEGIN - -SurfaceRender::SurfaceRender() : m_videoShaderOfOnScreen(shader_screen_vs, shader_screen_fs) -{ - m_meshOfOnScreen = NULL; - m_renderType = 0; -} +#include "360SCVPLog.h" -SurfaceRender::~SurfaceRender() -{ -} -VCD_NS_END \ No newline at end of file +LogFunction logCallBack = GlogFunction; diff --git a/src/360SCVP/360SCVPLog.h b/src/360SCVP/360SCVPLog.h new file mode 100644 index 00000000..6097d5bf --- /dev/null +++ b/src/360SCVP/360SCVPLog.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: 360SCVPLog.h +//! \brief: Include the log function declaration +//! +//! Created on Sept. 27, 2020, 6:04 AM +//! + +#ifndef _360SCVPLOG_H_ +#define _360SCVPLOG_H_ + +#include "../utils/Log.h" +#include "../utils/error.h" + +//global logging callback function +extern LogFunction logCallBack; + +#define FILE_NAME(x) (strrchr(x, '/') ? strrchr(x, '/')+1:x) + +#define PRINT_LOG(logLevel, source, line, fmt, args...) \ + logCallBack(logLevel, source, line, fmt, ##args); \ + +#define SCVP_LOG(logLevel, fmt, args...) \ + PRINT_LOG(logLevel, FILE_NAME(__FILE__), __LINE__, fmt, ##args) \ + +#endif /* _PACKINGLOG_H_ */ diff --git a/src/360SCVP/360SCVPViewPort.cpp b/src/360SCVP/360SCVPViewPort.cpp index b6ca7bae..abc40287 100644 --- a/src/360SCVP/360SCVPViewPort.cpp +++ b/src/360SCVP/360SCVPViewPort.cpp @@ -112,6 +112,6 @@ void ViewPort::matInv(POSType K[3][3]) } void ViewPort::map3DTo2D(SPos *,SPos *) { - assert(!"Viewport 3D to 2D is not supported "); + assert(0 && "Viewport 3D to 2D is not supported "); } diff --git a/src/360SCVP/360SCVPViewPort.h b/src/360SCVP/360SCVPViewPort.h index 30ac9733..76926e1d 100644 --- a/src/360SCVP/360SCVPViewPort.h +++ b/src/360SCVP/360SCVPViewPort.h @@ -29,6 +29,15 @@ #include "360SCVPGeometry.h" #define FACE_NUMBER 6 +#define ERP_HORZ_ANGLE 360 +#define ERP_VERT_ANGLE 180 +#define ERP_HORZ_START -180 +#define ERP_VERT_START 90 +#define PI_IN_DEGREE 180 +#define HALF_PI_IN_DEGREE 90 +#define DEG2RAD_FACTOR (S_PI/PI_IN_DEGREE) +#define RAD2DEG_FACTOR (PI_IN_DEGREE/S_PI) +#define HORZ_BOUNDING_STEP 5 // ==================================================================================================================== // Class definition diff --git a/src/360SCVP/360SCVPViewportAPI.h b/src/360SCVP/360SCVPViewportAPI.h index a07d13df..a1a959cf 100644 --- a/src/360SCVP/360SCVPViewportAPI.h +++ b/src/360SCVP/360SCVPViewportAPI.h @@ -47,8 +47,8 @@ typedef struct POINTDEF //! //! \param m_iViewportWidth, input, the width for the viewport //! \param m_iViewportHeight, input, the height for the viewport -//! \param m_viewPort_fPitch, input, the angle rotated aroud z -//! \param m_viewPort_fYaw, input, the angle rotated aroud x +//! \param m_viewPort_fYaw, input, the angle rotated aroud z(-180 ~ 180) +//! \param m_viewPort_fPitch, input, the angle rotated aroud x(-90 ~ 90) //! \param m_viewPort_hFOV, input, the horizontal FOV angle //! \param m_viewPort_vFOV, input, the vertical FOV angle //! \param m_output_geoType, input, the type for the output projection(viewport) @@ -81,7 +81,8 @@ typedef struct GENERATE_VIEWPORT_PARAM point* m_pDownRight; int32_t m_viewportDestWidth; int32_t m_viewportDestHeight; - + UsageType m_usageType; + Param_VideoFPStruct m_paramVideoFP; } generateViewPortParam; //! @@ -107,6 +108,18 @@ void* genViewport_Init(generateViewPortParam* pParamGenViewport); //! int32_t genViewport_process(generateViewPortParam* pParamGenViewport, void* pGenHandle); +//! +//! \brief This function completes the viewport selection by look up table , according to the FOV information. +//! +//! \param generateViewPortParam* pParamGenViewport, output, refer to the structure generateViewPortParam +//! \param void* pGenHandle, input, which is created by the genTiledStream_Init function +//! +//! \return s32, the status of the function. +//! 0, if succeed +//! not 0, if fail +//! +int32_t genViewport_postprocess(generateViewPortParam* pParamGenViewport, void* pGenHandle); + //! //! \brief This function sets the parameter of the viewPort. //! @@ -160,6 +173,18 @@ bool genViewport_isInside(void* pGenHandle, int32_t x, int32_t y, int32_t width, //! int32_t genViewport_getFixedNumTiles(void* pGenHandle, TileDef* pOutTile); +//! +//! \brief This function output the selected tiles according to the timely changed viewPort information, +//! especially these tiles are put in the original picture order. +//! for cube map source, currently support FOV range 100 ~70 degree +//! +//! \param void* pGenHandle, input, which is created by the genTiledStream_Init function +//! \param TileDef* pOutTile, output, the list for the tiles inside the viewport +//! +//! \return int32_t, the number of the tiles inside the viewport. +//! +int32_t genViewport_getTilesInViewport(void* pGenHandle, TileDef* pOutTile); + //! //! \brief This function output the fixed number tiles according to the viewPort information in the initialization phase, //! especially these tiles are put in the original picture order. @@ -174,6 +199,18 @@ int32_t genViewport_getViewportTiles(void* pGenHandle, TileDef* pOutTile); int32_t genViewport_getContentCoverage(void* pGenHandle, CCDef* pOutCC); +//! +//! \brief This function output the selected tiles according to the timely changed viewPort information, +//! By the legacy tile selection algorithm. +//! for cube map source, currently support FOV range 100 ~70 degree +//! +//! \param void* pGenHandle, input, which is created by the genTiledStream_Init function +//! \param TileDef* pOutTile, output, the list for the tiles inside the viewport +//! +//! \return int32_t, the number of the tiles inside the viewport. +//! +int32_t genViewport_getTilesInViewportByLegacyWay(void* pGenHandle, TileDef* pOutTile); + //! //! \brief This function completes the un-initialization, free the memory //! diff --git a/src/360SCVP/360SCVPViewportImpl.cpp b/src/360SCVP/360SCVPViewportImpl.cpp index 7b657310..249c875d 100644 --- a/src/360SCVP/360SCVPViewportImpl.cpp +++ b/src/360SCVP/360SCVPViewportImpl.cpp @@ -32,24 +32,57 @@ #include #include #include +#include +#include #include "360SCVPViewPort.h" #include "360SCVPViewportImpl.h" #include "360SCVPViewportAPI.h" +#include "360SCVPLog.h" + #ifdef WIN32 #define strdup _strdup #endif using namespace std; +int32_t cubeMapFaceMap[6] = {0, 1, 4, 5, 2, 3}; void* genViewport_Init(generateViewPortParam* pParamGenViewport) { + uint32_t maxTileNumCol = 0; + uint32_t maxTileNumRow = 0; + uint32_t maxTileNum = 0; + int32_t viewPortWidth = 0; + int32_t viewPortHeight = 0; + int32_t viewPortHeightmax = 0; + if (!pParamGenViewport) return NULL; TgenViewport* cTAppConvCfg = new TgenViewport; if (!cTAppConvCfg) return NULL; - memset(&cTAppConvCfg->m_sourceSVideoInfo, 0, sizeof(struct SVideoInfo)); - memset(&cTAppConvCfg->m_codingSVideoInfo, 0, sizeof(struct SVideoInfo)); + + cTAppConvCfg->m_usageType = pParamGenViewport->m_usageType; + cTAppConvCfg->m_paramVideoFP.cols = pParamGenViewport->m_paramVideoFP.cols; + cTAppConvCfg->m_paramVideoFP.rows = pParamGenViewport->m_paramVideoFP.rows; + + /* Check the paramVideoFP rows / cols exceeds the maximum array size */ + if (pParamGenViewport->m_paramVideoFP.rows > 6 || pParamGenViewport->m_paramVideoFP.cols > 6) { + SAFE_DELETE(cTAppConvCfg); + return NULL; + } + + for (int i = 0; i < pParamGenViewport->m_paramVideoFP.rows; i++) + { + for (int j = 0; j < pParamGenViewport->m_paramVideoFP.cols; j++) + { + cTAppConvCfg->m_paramVideoFP.faces[i][j].faceHeight = pParamGenViewport->m_paramVideoFP.faces[i][j].faceHeight; + cTAppConvCfg->m_paramVideoFP.faces[i][j].faceWidth = pParamGenViewport->m_paramVideoFP.faces[i][j].faceWidth; + cTAppConvCfg->m_paramVideoFP.faces[i][j].idFace = pParamGenViewport->m_paramVideoFP.faces[i][j].idFace; + cTAppConvCfg->m_paramVideoFP.faces[i][j].rotFace = pParamGenViewport->m_paramVideoFP.faces[i][j].rotFace; + } + } + memset_s(&cTAppConvCfg->m_sourceSVideoInfo, sizeof(struct SVideoInfo), 0); + memset_s(&cTAppConvCfg->m_codingSVideoInfo, sizeof(struct SVideoInfo), 0); cTAppConvCfg->m_iCodingFaceWidth = pParamGenViewport->m_iViewportWidth; cTAppConvCfg->m_iCodingFaceHeight = pParamGenViewport->m_iViewportHeight; cTAppConvCfg->m_codingSVideoInfo.viewPort.fPitch = pParamGenViewport->m_viewPort_fPitch; @@ -60,50 +93,60 @@ void* genViewport_Init(generateViewPortParam* pParamGenViewport) cTAppConvCfg->m_sourceSVideoInfo.geoType = pParamGenViewport->m_input_geoType; cTAppConvCfg->m_iInputWidth = pParamGenViewport->m_iInputWidth; cTAppConvCfg->m_iInputHeight = pParamGenViewport->m_iInputHeight; - if (cTAppConvCfg->create(pParamGenViewport->m_tileNumRow, pParamGenViewport->m_tileNumCol) < 0) + if (cTAppConvCfg->create(pParamGenViewport->m_tileNumRow / cTAppConvCfg->m_paramVideoFP.rows, pParamGenViewport->m_tileNumCol / cTAppConvCfg->m_paramVideoFP.cols) < 0) { - delete cTAppConvCfg; - cTAppConvCfg = NULL; + SAFE_DELETE(cTAppConvCfg); return NULL; } //calculate the max tile num if the source project is cube map cTAppConvCfg->m_maxTileNum = 0; - if (pParamGenViewport->m_tileNumCol == 0 || pParamGenViewport->m_tileNumRow == 0) + if (pParamGenViewport->m_tileNumCol == 0 || pParamGenViewport->m_tileNumRow == 0) { + SAFE_DELETE(cTAppConvCfg); return NULL; + } + if (cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) { - int32_t tilewidth = cTAppConvCfg->m_iInputWidth / pParamGenViewport->m_tileNumCol; - int32_t tileheight = cTAppConvCfg->m_iInputHeight / pParamGenViewport->m_tileNumRow; - uint32_t tiles_num_row = 0; - uint32_t tiles_num_col = 0; - uint32_t tiles_num = 0; - int32_t idx = 0; - float fovh = MAX_FOV_ANGLE; - for (int32_t j = 0; j < FOV_Angle_NUM; j++) + SPos *pUpLeft = cTAppConvCfg->m_pUpLeft; + SPos *pDownRight = cTAppConvCfg->m_pDownRight; + genViewport_setViewPort((void*)cTAppConvCfg, 45, -90); + genViewport_process(pParamGenViewport, (void*)cTAppConvCfg); + + cTAppConvCfg->m_codingSVideoInfo.viewPort.fPitch = pParamGenViewport->m_viewPort_fPitch; + cTAppConvCfg->m_codingSVideoInfo.viewPort.fYaw = pParamGenViewport->m_viewPort_fYaw; + + for (int32_t i = 0; i < 6; i++) // the max count is 6 { - if (cTAppConvCfg->m_codingSVideoInfo.viewPort.hFOV <= fovh && cTAppConvCfg->m_codingSVideoInfo.viewPort.hFOV > fovh-10) + if (pDownRight->faceIdx == -1) { - idx = j; - break; + pUpLeft++; + pDownRight++; + continue; } - fovh -= 10; - } - if (tilewidth == 0 || tileheight == 0) - return NULL; - for (int32_t i = 0; i < 4; i++) - { - tiles_num_row = (Max_Viewport_Size[idx][i].x / tilewidth + ((Max_Viewport_Size[idx][i].x !=0) ? 2 : 0)); - tiles_num_col = (Max_Viewport_Size[idx][i].y / tileheight + ((Max_Viewport_Size[idx][i].y!=0) ? 2 : 0)); - if (tiles_num_row > pParamGenViewport->m_tileNumRow) - tiles_num_row = pParamGenViewport->m_tileNumRow; - if (tiles_num_col > pParamGenViewport->m_tileNumCol) - tiles_num_col = pParamGenViewport->m_tileNumCol; - tiles_num += tiles_num_row * tiles_num_col; + viewPortWidth = int32_t(pDownRight->x - pUpLeft->x); + viewPortHeight = int32_t(pDownRight->y - pUpLeft->y); + + viewPortWidth = floor((float)(viewPortWidth) / (float)(cTAppConvCfg->m_srd[0].tilewidth) + 0.499) * cTAppConvCfg->m_srd[0].tilewidth; + viewPortHeightmax = floor((float)(viewPortHeight) / (float)(cTAppConvCfg->m_srd[0].tileheight) + 0.499) * cTAppConvCfg->m_srd[0].tileheight; + SCVP_LOG(LOG_INFO, "viewPortWidthMax is %d, viewPortHeightMax is %d\n", viewPortWidth, viewPortHeightmax); + SCVP_LOG(LOG_INFO, "tilewidth is %d , tileheight is %d\n", cTAppConvCfg->m_srd[0].tilewidth, cTAppConvCfg->m_srd[0].tileheight); + + maxTileNumCol = (viewPortWidth / cTAppConvCfg->m_srd[0].tilewidth + 1); + if (maxTileNumCol > cTAppConvCfg->m_tileNumCol) + maxTileNumCol = cTAppConvCfg->m_tileNumCol; + + maxTileNumRow = (viewPortHeightmax / cTAppConvCfg->m_srd[0].tileheight + 1); + if (maxTileNumRow > cTAppConvCfg->m_tileNumRow) + maxTileNumRow = cTAppConvCfg->m_tileNumRow; + + maxTileNum += maxTileNumCol * maxTileNumRow; + pUpLeft++; + pDownRight++; } - cTAppConvCfg->m_maxTileNum = tiles_num; - pParamGenViewport->m_viewportDestWidth = tiles_num_row * cTAppConvCfg->m_srd[0].tilewidth; - pParamGenViewport->m_viewportDestHeight = tiles_num_row * cTAppConvCfg->m_srd[0].tileheight; + //cTAppConvCfg->m_maxTileNum = tiles_num; + //pParamGenViewport->m_viewportDestWidth = tiles_num_row * cTAppConvCfg->m_srd[0].tilewidth; + //pParamGenViewport->m_viewportDestHeight = tiles_num_row * cTAppConvCfg->m_srd[0].tileheight; } else if (cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_EQUIRECT) { @@ -112,12 +155,6 @@ void* genViewport_Init(generateViewPortParam* pParamGenViewport) cTAppConvCfg->m_codingSVideoInfo.viewPort.fPitch = pParamGenViewport->m_viewPort_fPitch; cTAppConvCfg->m_codingSVideoInfo.viewPort.fYaw = pParamGenViewport->m_viewPort_fYaw; - uint32_t maxTileNumCol = 0; - uint32_t maxTileNumRow = 0; - uint32_t maxTileNum = 0; - int32_t viewPortWidth = 0; - int32_t viewPortHeight = 0; - int32_t viewPortHeightmax = 0; SPos *pUpLeft = cTAppConvCfg->m_pUpLeft; SPos *pDownRight = cTAppConvCfg->m_pDownRight; for (int32_t i = 0; i < 2; i++) // the max count is 2 @@ -132,24 +169,42 @@ void* genViewport_Init(generateViewPortParam* pParamGenViewport) viewPortHeight = int32_t(pDownRight->y - pUpLeft->y); if (viewPortHeightmax < viewPortHeight) viewPortHeightmax = viewPortHeight; - printf("viewPortWidthMax = %d viewPortHeightMax = %d\n", viewPortWidth, viewPortHeightmax); + SCVP_LOG(LOG_INFO, "viewPortWidthMax = %d, viewPortHeightMax = %d\n", viewPortWidth, viewPortHeightmax); pUpLeft++; pDownRight++; } - maxTileNumCol = (viewPortWidth / cTAppConvCfg->m_srd[0].tilewidth + 2); - if (maxTileNumCol > cTAppConvCfg->m_tileNumCol) - maxTileNumCol = cTAppConvCfg->m_tileNumCol; - maxTileNumRow = (viewPortHeightmax / cTAppConvCfg->m_srd[0].tileheight + 2); - if (maxTileNumRow > cTAppConvCfg->m_tileNumRow) - maxTileNumRow = cTAppConvCfg->m_tileNumRow; + // if (pParamGenViewport->m_usageType == E_PARSER_ONENAL + // || pParamGenViewport->m_usageType == E_VIEWPORT_ONLY) + // { + // viewPortWidth = floor((float)(viewPortWidth) / (float)(cTAppConvCfg->m_srd[0].tilewidth) + 0.499) * cTAppConvCfg->m_srd[0].tilewidth; + // viewPortHeightmax = floor((float)(viewPortHeightmax) / (float)(cTAppConvCfg->m_srd[0].tileheight) + 0.499) * cTAppConvCfg->m_srd[0].tileheight; + // printf("viewPortWidthMax = %d viewPortHeightMax = %d, tile_width %d, tile_height %d\n", viewPortWidth, viewPortHeightmax, cTAppConvCfg->m_srd[0].tilewidth, cTAppConvCfg->m_srd[0].tileheight); + + // maxTileNumCol = (viewPortWidth / cTAppConvCfg->m_srd[0].tilewidth + 1); + // if (maxTileNumCol > cTAppConvCfg->m_tileNumCol) + // maxTileNumCol = cTAppConvCfg->m_tileNumCol; + + // maxTileNumRow = (viewPortHeightmax / cTAppConvCfg->m_srd[0].tileheight + 1); + // if (maxTileNumRow > cTAppConvCfg->m_tileNumRow) + // maxTileNumRow = cTAppConvCfg->m_tileNumRow; + // } + // else + // { + maxTileNumCol = (viewPortWidth / cTAppConvCfg->m_srd[0].tilewidth + 2); + if (maxTileNumCol > cTAppConvCfg->m_tileNumCol) + maxTileNumCol = cTAppConvCfg->m_tileNumCol; + maxTileNumRow = (viewPortHeightmax / cTAppConvCfg->m_srd[0].tileheight + 2); + if (maxTileNumRow > cTAppConvCfg->m_tileNumRow) + maxTileNumRow = cTAppConvCfg->m_tileNumRow; + // } maxTileNum = maxTileNumCol * maxTileNumRow; - cTAppConvCfg->m_maxTileNum = maxTileNum; pParamGenViewport->m_viewportDestWidth = maxTileNumCol * cTAppConvCfg->m_srd[0].tilewidth; pParamGenViewport->m_viewportDestHeight = maxTileNumRow * cTAppConvCfg->m_srd[0].tileheight; } + cTAppConvCfg->m_maxTileNum = maxTileNum; return (void*)cTAppConvCfg; } @@ -194,6 +249,41 @@ int32_t genViewport_process(generateViewPortParam* pParamGenViewport, void* pG } +int32_t genViewport_postprocess(generateViewPortParam* pParamGenViewport, void* pGenHandle) +{ + TgenViewport* cTAppConvCfg = (TgenViewport*)(pGenHandle); + if (!cTAppConvCfg || !pParamGenViewport) + return -1; + + cTAppConvCfg->ERPselectregion(pParamGenViewport->m_iInputWidth, pParamGenViewport->m_iInputHeight, pParamGenViewport->m_viewportDestWidth, pParamGenViewport->m_viewportDestHeight); + + pParamGenViewport->m_numFaces = cTAppConvCfg->m_numFaces; + point* pTmpUpleftDst = pParamGenViewport->m_pUpLeft; + point* pTmpDownRightDst = pParamGenViewport->m_pDownRight; + SPos * pTmpUpleftSrc = cTAppConvCfg->m_pUpLeft; + SPos * pTmpDownRightSrc = cTAppConvCfg->m_pDownRight; + + for (int32_t i = 0; i < FACE_NUMBER; i++) + { + if (pTmpUpleftSrc->faceIdx >= 0) + { + pTmpUpleftDst->faceId = (int32_t)pTmpUpleftSrc->faceIdx; + pTmpUpleftDst->x = (int32_t)pTmpUpleftSrc->x; + pTmpUpleftDst->y = (int32_t)pTmpUpleftSrc->y; + pTmpDownRightDst->faceId = (int32_t)pTmpDownRightSrc->faceIdx; + pTmpDownRightDst->x = (int32_t)pTmpDownRightSrc->x; + pTmpDownRightDst->y = (int32_t)pTmpDownRightSrc->y; + pTmpUpleftDst++; + pTmpDownRightDst++; + } + + pTmpUpleftSrc++; + pTmpDownRightSrc++; + } + return 0; +} + + int32_t genViewport_setMaxSelTiles(void* pGenHandle, int32_t maxSelTiles) { TgenViewport* cTAppConvCfg = (TgenViewport*)(pGenHandle); @@ -228,6 +318,8 @@ bool genViewport_isInside(void* pGenHandle, int32_t x, int32_t y, int32_t width, int32_t genViewport_getContentCoverage(void* pGenHandle, CCDef* pOutCC) { TgenViewport* cTAppConvCfg = (TgenViewport*)(pGenHandle); + int32_t coverageShapeType; + if (!cTAppConvCfg) return -1; @@ -235,39 +327,43 @@ int32_t genViewport_getContentCoverage(void* pGenHandle, CCDef* pOutCC) if(!pOutCC) return -1; - SPos *pUpLeft = cTAppConvCfg->m_pUpLeft; - SPos *pDownRight = cTAppConvCfg->m_pDownRight; - - int32_t videoWidth = cTAppConvCfg->m_iInputWidth; - int32_t videoHeight = cTAppConvCfg->m_iInputHeight; - - int32_t faceNum = (cTAppConvCfg->m_sourceSVideoInfo.geoType==SVIDEO_CUBEMAP) ? 6 : 2; // ERP mode may have 2 faces for boundary case if(cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_EQUIRECT) { + /* ERP used shape type 0 too. Utilize the FOV to generate CC info */ + coverageShapeType = 0; + cTAppConvCfg->getContentCoverage(pOutCC, coverageShapeType); +#if 0 + SPos *pUpLeft = cTAppConvCfg->m_pUpLeft; + SPos *pDownRight = cTAppConvCfg->m_pDownRight; + + int32_t videoWidth = cTAppConvCfg->m_iInputWidth; + int32_t videoHeight = cTAppConvCfg->m_iInputHeight; + int32_t w = 0, h = 0, x = 0, y = 0; bool coverBoundary = pUpLeft[1].faceIdx == 0 ? true : false; - x = pUpLeft[0].x; + x = coverBoundary ? max(pUpLeft[0].x, pUpLeft[1].x) : pUpLeft[0].x; y = pUpLeft[0].y; w = pDownRight[0].x - pUpLeft[0].x + coverBoundary * (pDownRight[1].x - pUpLeft[1].x); - h = pDownRight[0].y - pUpLeft[0].y + coverBoundary * (pDownRight[1].y - pUpLeft[1].y); + h = pDownRight[0].y - pUpLeft[0].y; pOutCC->centreAzimuth = (int32_t)((((videoWidth / 2) - (float)(x + w / 2)) * 360 * 65536) / videoWidth); pOutCC->centreElevation = (int32_t)((((videoHeight / 2) - (float)(y + h / 2)) * 180 * 65536) / videoHeight); pOutCC->azimuthRange = (uint32_t)((w * 360.f * 65536) / videoWidth); pOutCC->elevationRange = (uint32_t)((h * 180.f * 65536) / videoHeight); +#endif } - else //if(cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) + else if(cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) { - for (int32_t faceid = 0; faceid < faceNum; faceid++) - { - - pUpLeft++; - pDownRight++; - } + coverageShapeType = 0; + cTAppConvCfg->getContentCoverage(pOutCC, coverageShapeType); + } + else + { + SCVP_LOG(LOG_WARNING, "Only Support GeoType ERP and Cubemap for Content Coverage\n"); + return -1; } - return 0; } @@ -284,12 +380,18 @@ int32_t genViewport_getFixedNumTiles(void* pGenHandle, TileDef* pOutTile) tileNum = cTAppConvCfg->calcTilesInViewport(cTAppConvCfg->m_srd, cTAppConvCfg->m_tileNumCol, cTAppConvCfg->m_tileNumRow); maxTileNum = cTAppConvCfg->m_maxTileNum; - //select the additional tiles randomly - additionalTilesNum = maxTileNum - tileNum; - printf("the max tile count = %d additionalTilesNum = %d\n", maxTileNum, additionalTilesNum); + if (cTAppConvCfg->m_usageType == E_PARSER_ONENAL) + { + additionalTilesNum = 0; + } + else + { + additionalTilesNum = maxTileNum - tileNum; + } + SCVP_LOG(LOG_INFO, "the max tile count = %d additionalTilesNum = %d\n", maxTileNum, additionalTilesNum); if (additionalTilesNum < 0) - printf("there is an error in the judgement\n"); + SCVP_LOG(LOG_WARNING, "there is an error in the judgement!\n"); int32_t pos = 0; for (int32_t i = 0; i < additionalTilesNum; i++) { @@ -307,25 +409,123 @@ int32_t genViewport_getFixedNumTiles(void* pGenHandle, TileDef* pOutTile) //set the occupy tile into the output parameter int32_t idx = 0; TileDef* pOutTileTmp = pOutTile; - tileNum = tileNum + additionalTilesNum; - for (uint32_t col = 0; col < cTAppConvCfg->m_tileNumCol; col++) + int32_t faceNum = (cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) ? 6 : 2; + if (cTAppConvCfg->m_usageType == E_PARSER_ONENAL) { - for (uint32_t row = 0; row < cTAppConvCfg->m_tileNumRow; row++) + tileNum = maxTileNum; + } + else + { + tileNum = tileNum + additionalTilesNum; + } + if (cTAppConvCfg->m_srd[cTAppConvCfg->m_tileNumCol*cTAppConvCfg->m_tileNumRow-1].isOccupy == 1) + { + for (int32_t idFace = 0; idFace < faceNum; idFace++) { - if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + idx = idFace* cTAppConvCfg->m_tileNumCol*cTAppConvCfg->m_tileNumRow + cTAppConvCfg->m_tileNumCol*cTAppConvCfg->m_tileNumRow -1; + for (uint32_t col = cTAppConvCfg->m_tileNumCol; col > 0 ; col--) { - pOutTileTmp->faceId = cTAppConvCfg->m_srd[idx].faceId; - pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; - pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; - pOutTileTmp->idx = idx; - pOutTileTmp++; + for (uint32_t row = cTAppConvCfg->m_tileNumRow; row > 0 ; row--) + { + if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + { + pOutTileTmp->faceId = cTAppConvCfg->m_srd[idx].faceId; + pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; + pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; + pOutTileTmp->idx = idx; + pOutTileTmp++; + } + idx--; + } + } + } + } + else + { + for (int32_t idFace = 0; idFace < faceNum; idFace++) + { + for (uint32_t col = 0; col < cTAppConvCfg->m_tileNumCol; col++) + { + for (uint32_t row = 0; row < cTAppConvCfg->m_tileNumRow; row++) + { + if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + { + pOutTileTmp->faceId = cTAppConvCfg->m_srd[idx].faceId; + pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; + pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; + pOutTileTmp->idx = idx; + pOutTileTmp++; + } + idx++; + } } - idx++; } } return tileNum; } +int32_t genViewport_getTilesInViewport(void* pGenHandle, TileDef* pOutTile) +{ + TgenViewport* cTAppConvCfg = (TgenViewport*)(pGenHandle); + if (!cTAppConvCfg || !pOutTile) + return -1; + + int32_t tileNum = 0; + + tileNum = cTAppConvCfg->calcTilesInViewport(cTAppConvCfg->m_srd, cTAppConvCfg->m_tileNumCol, cTAppConvCfg->m_tileNumRow); + int32_t idx = 0; + TileDef* pOutTileTmp = pOutTile; + int32_t faceNum = (cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) ? 6 : 2; + // correct accurate needed tile number in ERP. + + if (cTAppConvCfg->m_srd[cTAppConvCfg->m_tileNumCol*cTAppConvCfg->m_tileNumRow-1].isOccupy == 1) + { + for (int32_t idFace = 0; idFace < faceNum; idFace++) + { + idx = idFace * cTAppConvCfg->m_tileNumCol * cTAppConvCfg->m_tileNumRow + cTAppConvCfg->m_tileNumCol * cTAppConvCfg->m_tileNumRow - 1; + for (uint32_t col = cTAppConvCfg->m_tileNumCol; col > 0 ; col--) + { + for (uint32_t row = cTAppConvCfg->m_tileNumRow; row > 0 ; row--) + { + if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + { + pOutTileTmp->faceId = cubeMapFaceMap[cTAppConvCfg->m_srd[idx].faceId]; + pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; + pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; + pOutTileTmp->idx = idx; + SCVP_LOG(LOG_INFO, "final decision is idx %d and face_id %d\n", idx, pOutTileTmp->faceId); + pOutTileTmp++; + } + idx--; + } + } + } + } + else + { + for (int32_t idFace = 0; idFace < faceNum; idFace++) + { + for (uint32_t col = 0; col < cTAppConvCfg->m_tileNumCol; col++) + { + for (uint32_t row = 0; row < cTAppConvCfg->m_tileNumRow; row++) + { + if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + { + pOutTileTmp->faceId = cubeMapFaceMap[cTAppConvCfg->m_srd[idx].faceId]; + pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; + pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; + pOutTileTmp->idx = idx; + SCVP_LOG(LOG_INFO, "final decision is idx %d and face_id %d\n", idx, pOutTileTmp->faceId); + pOutTileTmp++; + } + idx++; + } + } + } + } + SCVP_LOG(LOG_INFO, "Tile Selection Final Resulted %d Tiles in Total!!!\n", tileNum); + return tileNum; +} int32_t genViewport_getViewportTiles(void* pGenHandle, TileDef* pOutTile) { @@ -405,6 +605,81 @@ int32_t genViewport_getViewportTiles(void* pGenHandle, TileDef* pOutTile) return tileNum; } +int32_t genViewport_getTilesInViewportByLegacyWay(void* pGenHandle, TileDef* pOutTile) +{ + TgenViewport* cTAppConvCfg = (TgenViewport*)(pGenHandle); + if (!cTAppConvCfg || !pOutTile) + return -1; + + ITileInfo* pTileInfoTmp = cTAppConvCfg->m_srd; + int32_t faceNum = (cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) ? 6 : 1; + for (int32_t faceid = 0; faceid < faceNum; faceid++) + { + for (uint32_t row = 0; row < cTAppConvCfg->m_tileNumRow; row++) + { + for (uint32_t col = 0; col < cTAppConvCfg->m_tileNumCol; col++) + { + pTileInfoTmp->isOccupy = cTAppConvCfg->isInside(pTileInfoTmp->x, pTileInfoTmp->y, pTileInfoTmp->tilewidth, pTileInfoTmp->tileheight, faceid); + pTileInfoTmp++; + } + } + } + + int32_t idx = 0; + TileDef* pOutTileTmp = pOutTile; + faceNum = (cTAppConvCfg->m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) ? 6 : 2; + + uint32_t occupancyNum = 0; + if (cTAppConvCfg->m_srd[cTAppConvCfg->m_tileNumCol * cTAppConvCfg->m_tileNumRow - 1].isOccupy == 1) + { + for (int32_t idFace = 0; idFace < faceNum; idFace++) + { + idx = idFace * cTAppConvCfg->m_tileNumCol * cTAppConvCfg->m_tileNumRow + cTAppConvCfg->m_tileNumCol * cTAppConvCfg->m_tileNumRow - 1; + for (uint32_t col = cTAppConvCfg->m_tileNumCol; col > 0; col--) + { + for (uint32_t row = cTAppConvCfg->m_tileNumRow; row > 0; row--) + { + if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + { + pOutTileTmp->faceId = cubeMapFaceMap[cTAppConvCfg->m_srd[idx].faceId]; + pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; + pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; + pOutTileTmp->idx = idx; + SCVP_LOG(LOG_INFO, "final decision is idx %d and face_id %d\n", idx, pOutTileTmp->faceId); + pOutTileTmp++; + occupancyNum++; + } + idx--; + } + } + } + } + else + { + for (int32_t idFace = 0; idFace < faceNum; idFace++) + { + for (uint32_t col = 0; col < cTAppConvCfg->m_tileNumCol; col++) + { + for (uint32_t row = 0; row < cTAppConvCfg->m_tileNumRow; row++) + { + if (cTAppConvCfg->m_srd[idx].isOccupy == 1) + { + pOutTileTmp->faceId = cubeMapFaceMap[cTAppConvCfg->m_srd[idx].faceId]; + pOutTileTmp->x = cTAppConvCfg->m_srd[idx].x; + pOutTileTmp->y = cTAppConvCfg->m_srd[idx].y; + pOutTileTmp->idx = idx; + SCVP_LOG(LOG_INFO, "final decision is idx %d and face_id %d\n", idx, pOutTileTmp->faceId); + pOutTileTmp++; + occupancyNum++; + } + idx++; + } + } + } + } + return occupancyNum; +} + int32_t genViewport_unInit(void* pGenHandle) { TgenViewport* cTAppConvCfg = (TgenViewport*)(pGenHandle); @@ -415,20 +690,19 @@ int32_t genViewport_unInit(void* pGenHandle) if(pGenHandle) { free(pGenHandle); - pGenHandle = NULL; + pGenHandle = NULL; } return 0; } - TgenViewport::TgenViewport() { m_faceSizeAlignment = 8; m_pUpLeft = new SPos[FACE_NUMBER];// m_pDownRight = new SPos[FACE_NUMBER];// - memset(&m_codingSVideoInfo, 0, sizeof(SVideoInfo)); - memset(&m_sourceSVideoInfo, 0, sizeof(SVideoInfo)); + memset_s(&m_codingSVideoInfo, sizeof(SVideoInfo), 0); + memset_s(&m_sourceSVideoInfo, sizeof(SVideoInfo), 0); m_iCodingFaceWidth = 0; m_iCodingFaceHeight = 0; m_iSourceWidth = 0; @@ -439,27 +713,47 @@ TgenViewport::TgenViewport() m_iInputWidth = 0; m_iInputHeight = 0; m_maxTileNum = 0; + m_usageType = E_STREAM_STITCH_ONLY; m_numFaces = 0; - m_srd = new ITileInfo; + m_srd = NULL; + m_pViewportHorizontalBoudaryPoints = NULL; + m_paramVideoFP.cols = 0; + m_paramVideoFP.rows = 0; +} + +TgenViewport::TgenViewport(TgenViewport& src) +{ + m_faceSizeAlignment = src.m_faceSizeAlignment; + m_pUpLeft = new SPos[FACE_NUMBER]; + m_pDownRight = new SPos[FACE_NUMBER]; + memcpy_s(m_pUpLeft, sizeof(SPos), src.m_pUpLeft, sizeof(SPos)); + memcpy_s(m_pDownRight, sizeof(SPos), src.m_pDownRight, sizeof(SPos)); + m_codingSVideoInfo = src.m_codingSVideoInfo; + m_sourceSVideoInfo = src.m_sourceSVideoInfo; + m_iCodingFaceWidth = src.m_iCodingFaceHeight; + m_iCodingFaceHeight = src.m_iCodingFaceHeight; + m_iSourceWidth = src.m_iSourceWidth; + m_iSourceHeight = src.m_iSourceHeight; + m_tileNumCol = src.m_tileNumCol; + m_tileNumRow = src.m_tileNumRow; + m_iFrameRate = src.m_iFrameRate; + m_iInputWidth = src.m_iInputWidth; + m_iInputHeight = src.m_iInputHeight; + m_maxTileNum = src.m_maxTileNum; + m_usageType = src.m_usageType; + m_numFaces = src.m_numFaces; + m_srd = NULL; + m_pViewportHorizontalBoudaryPoints = NULL; + m_paramVideoFP.cols = src.m_paramVideoFP.cols; + m_paramVideoFP.rows = src.m_paramVideoFP.rows; } TgenViewport::~TgenViewport() { - if(m_pUpLeft) - { - delete[] m_pUpLeft; - m_pUpLeft = NULL; - } - if(m_pDownRight) - { - delete[] m_pDownRight; - m_pDownRight = NULL; - } - if(m_srd) - { - delete m_srd; - m_srd = NULL; - } + SAFE_DELETE_ARRAY(m_pUpLeft); + SAFE_DELETE_ARRAY(m_pDownRight); + SAFE_DELETE_ARRAY(m_srd); + SAFE_DELETE_ARRAY(m_pViewportHorizontalBoudaryPoints); } TgenViewport& TgenViewport::operator=(const TgenViewport& src) @@ -468,8 +762,8 @@ TgenViewport& TgenViewport::operator=(const TgenViewport& src) return *this; // memcpy(m_faceSizeAlignment, src.m_faceSizeAlignment, sizeof(int32_t)); this->m_faceSizeAlignment = src.m_faceSizeAlignment; - memcpy(this->m_pUpLeft, src.m_pUpLeft, sizeof(SPos)); - memcpy(this->m_pDownRight, src.m_pDownRight, sizeof(SPos)); + memcpy_s(this->m_pUpLeft, sizeof(SPos), src.m_pUpLeft, sizeof(SPos)); + memcpy_s(this->m_pDownRight, sizeof(SPos), src.m_pDownRight, sizeof(SPos)); this->m_codingSVideoInfo = src.m_codingSVideoInfo; this->m_sourceSVideoInfo = src.m_sourceSVideoInfo; this->m_iCodingFaceWidth = src.m_iCodingFaceHeight; @@ -481,7 +775,12 @@ TgenViewport& TgenViewport::operator=(const TgenViewport& src) this->m_iFrameRate = src.m_iFrameRate; this->m_iInputWidth = src.m_iInputWidth; this->m_iInputHeight = src.m_iInputHeight; - memcpy(this->m_srd, src.m_srd, sizeof(ITileInfo)); + this->m_usageType = src.m_usageType; + if (this->m_srd && src.m_srd) + { + int32_t totalTileInfoSize = FACE_NUMBER*m_tileNumRow*m_tileNumCol*sizeof(ITileInfo); + memcpy_s(this->m_srd, totalTileInfoSize, src.m_srd, totalTileInfoSize); + } return *this; } @@ -489,18 +788,28 @@ int32_t TgenViewport::create(uint32_t tileNumRow, uint32_t tileNumCol) { m_tileNumRow = tileNumRow; m_tileNumCol = tileNumCol; - m_srd = new ITileInfo[FACE_NUMBER*m_tileNumRow*m_tileNumCol]; - if (m_srd) - return 0; - else - return -1; + if (!m_srd) + { + m_srd = new ITileInfo[FACE_NUMBER*m_tileNumRow*m_tileNumCol]; + if (!m_srd) + return -1; + } + if (!m_pViewportHorizontalBoudaryPoints) + { + m_pViewportHorizontalBoudaryPoints = new SpherePoint[ERP_VERT_ANGLE / HORZ_BOUNDING_STEP + 1]; + if (!m_pViewportHorizontalBoudaryPoints) + return -1; + } + + return 0; } void TgenViewport::destroy() { - if(m_srd) - delete[] m_srd; - m_srd = NULL; + SAFE_DELETE_ARRAY(m_pUpLeft); + SAFE_DELETE_ARRAY(m_pDownRight); + SAFE_DELETE_ARRAY(m_srd); + SAFE_DELETE_ARRAY(m_pViewportHorizontalBoudaryPoints); } @@ -512,14 +821,14 @@ int32_t TgenViewport::parseCfg( ) { m_iFrameRate = 30; m_faceSizeAlignment = 1; - m_iSourceWidth = m_iInputWidth; - m_iSourceHeight = m_iInputHeight; + m_iSourceWidth = m_iInputWidth * m_paramVideoFP.cols; + m_iSourceHeight = m_iInputHeight * m_paramVideoFP.rows; m_sourceSVideoInfo.iNumFaces = 1; m_sourceSVideoInfo.iFaceWidth = m_iInputWidth; m_sourceSVideoInfo.iFaceHeight = m_iInputHeight; if(!m_faceSizeAlignment) { - printf("FaceSizeAlignment must be greater than 0, it is reset to 8 (default value).\n"); + SCVP_LOG(LOG_WARNING, "FaceSizeAlignment must be greater than 0, it is reset to 8 (default value)\n"); m_faceSizeAlignment = 8; } // if(m_faceSizeAlignment &1) // && numberToChromaFormat(tmpInputChromaFormat)==CHROMA_420 @@ -532,66 +841,727 @@ int32_t TgenViewport::parseCfg( ) m_codingSVideoInfo.iNumFaces = 1; m_codingSVideoInfo.iFaceWidth = m_iCodingFaceWidth ; m_codingSVideoInfo.iFaceHeight = m_iCodingFaceHeight; + m_codingSVideoInfo.fullWidth = m_iSourceWidth; + m_codingSVideoInfo.fullHeight = m_iSourceHeight; m_aiPad[1] = m_aiPad[0] = 0; if (m_tileNumCol == 0 || m_tileNumRow == 0) return -1; - // the following code is temporary, need to update accoridg to the API int32_t posY = 0; int32_t stepX = m_iInputWidth / m_tileNumCol; int32_t stepY = m_iInputHeight / m_tileNumRow; + float stepHorzPos = ERP_HORZ_ANGLE / (float)m_tileNumCol; + float stepVertPos = ERP_VERT_ANGLE / (float)m_tileNumRow; + float vertPos = ERP_VERT_START; int32_t idx = 0; int32_t faceNum = (m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) ? 6 : 1; m_sourceSVideoInfo.iNumFaces = faceNum; - for (int32_t faceid = 0; faceid < faceNum; faceid++) + if (m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) + { + for (int32_t faceid = 0; faceid < faceNum; faceid++) + { + for (uint32_t i = 0; i < m_tileNumRow; i++) + { + int32_t posX = 0; + for (uint32_t j = 0; j < m_tileNumCol; j++) + { + m_srd[idx].x = posX; + m_srd[idx].y = posY; + m_srd[idx].tilewidth = stepX; + m_srd[idx].tileheight = stepY; + m_srd[idx].faceId = faceid; + m_srd[idx].isOccupy = 0; + posX += stepX; + idx++; + } + posY += stepY; + } + posY = 0; + } + CubemapCalcTilesGrid(); + } + else //ERP uses uniform tile split { - for (uint32_t i = 0; i < m_tileNumRow; i++) + for (int32_t faceid = 0; faceid < faceNum; faceid++) { - int32_t posX = 0; - for (uint32_t j = 0; j < m_tileNumCol; j++) + for (uint32_t i = 0; i < m_tileNumRow; i++) { - m_srd[idx].x = posX; - m_srd[idx].y = posY; - m_srd[idx].tilewidth = stepX; - m_srd[idx].tileheight = stepY; - m_srd[idx].faceId = faceid; - m_srd[idx].isOccupy = 0; - posX += stepX; - idx++; + int32_t posX = 0; + float horzPos = ERP_HORZ_START; + for (uint32_t j = 0; j < m_tileNumCol; j++) + { + m_srd[idx].x = posX; + m_srd[idx].y = posY; + m_srd[idx].tilewidth = stepX; + m_srd[idx].tileheight = stepY; + m_srd[idx].faceId = faceid; + m_srd[idx].isOccupy = 0; + m_srd[idx].horzPos = horzPos; + m_srd[idx].vertPos = vertPos; + posX += stepX; + horzPos += stepHorzPos; + idx++; + } + posY += stepY; + vertPos -= stepVertPos; } - posY += stepY; + posY = 0; } } + return 0; } +static float clampAngle(float angleIn, float minDegree, float maxDegree) +{ + float angleOut; + angleOut = angleIn; -int32_t TgenViewport::convert() + while (angleOut > maxDegree) + angleOut -= 360; + + while (angleOut < minDegree) + angleOut += 360; + + return angleOut; +} + +int32_t TgenViewport::CubemapCalcTilesGrid() { - Geometry *pcInputGeomtry = NULL; - Geometry *pcCodingGeomtry = NULL; - pcInputGeomtry = Geometry::create(m_sourceSVideoInfo); - if(!pcInputGeomtry) - { - return -1; - } - pcCodingGeomtry = Geometry::create(m_codingSVideoInfo); - if (!pcCodingGeomtry) - { - delete pcInputGeomtry; - pcInputGeomtry = NULL; + uint32_t i, j, face; + POSType x, y, z; + POSType pu, pv; + ITileInfo* pTileGridCMP = m_srd; + + SPos* gridPoint3D; + SPos *pTileGrid; + + gridPoint3D = new SPos[FACE_NUMBER * (m_tileNumRow+1) * (m_tileNumCol+1)]; + if (!gridPoint3D) { + SCVP_LOG(LOG_ERROR, "Allocate 3D Grid Point Coordinates Failed\n"); return -1; } - // starting time - double dResult; - clock_t lBefore = clock(); + for (i = 0; i <= m_tileNumRow; i++) { + for (j = 0; j <= m_tileNumCol; j++) { + pu = (POSType)j*2 / m_tileNumCol - 1; + pv = (POSType)i*2 / m_tileNumRow - 1; + + pTileGrid = gridPoint3D + (i * (m_tileNumCol+1)) + j; + pTileGrid->x = 1.0; + pTileGrid->y = -pv; + pTileGrid->z = -pu; + + pTileGrid += (m_tileNumRow + 1) * (m_tileNumCol + 1); + pTileGrid->x = -1.0; + pTileGrid->y = -pv; + pTileGrid->z = pu; + + pTileGrid += (m_tileNumRow + 1) * (m_tileNumCol + 1); + pTileGrid->x = pu; + pTileGrid->y = 1.0; + pTileGrid->z = pv; + + pTileGrid += (m_tileNumRow + 1) * (m_tileNumCol + 1); + pTileGrid->x = pu; + pTileGrid->y = -1.0; + pTileGrid->z = -pv; + + pTileGrid += (m_tileNumRow + 1) * (m_tileNumCol + 1); + pTileGrid->x = pu; + pTileGrid->y = -pv; + pTileGrid->z = 1.0; + + pTileGrid += (m_tileNumRow + 1) * (m_tileNumCol + 1); + pTileGrid->x = -pu; + pTileGrid->y = -pv; + pTileGrid->z = -1.0; + } + } + pTileGrid = gridPoint3D; + pTileGridCMP = m_srd; + for (face = 0; face < FACE_NUMBER; face++) + for (i = 0; i < m_tileNumRow; i++) + for (j = 0; j < m_tileNumCol; j++) { + pTileGrid = &gridPoint3D[face * (m_tileNumRow + 1) * (m_tileNumCol + 1) + i * (m_tileNumRow + 1) + j]; + x = pTileGrid->x; + y = pTileGrid->y; + z = pTileGrid->z; + pTileGridCMP->vertPos = HALF_PI_IN_DEGREE * sasin(y / ssqrt(x * x + y * y + z * z))/ S_PI_2; + pTileGridCMP->horzPos = HALF_PI_IN_DEGREE * satan(-z / x) / S_PI_2; + if (x < 0) + pTileGridCMP->horzPos += PI_IN_DEGREE; + pTileGridCMP->horzPos = clampAngle(pTileGridCMP->horzPos, -180, 180); + + pTileGrid++; + x = pTileGrid->x; + y = pTileGrid->y; + z = pTileGrid->z; + pTileGridCMP->vertPosTopRight = HALF_PI_IN_DEGREE * sasin(y / ssqrt(x * x + y * y + z * z)) / S_PI_2; + pTileGridCMP->horzPosTopRight = HALF_PI_IN_DEGREE * satan(-z / x) / S_PI_2; + if (x < 0) + pTileGridCMP->horzPosTopRight += PI_IN_DEGREE; + pTileGridCMP->horzPosTopRight = clampAngle(pTileGridCMP->horzPosTopRight, -180, 180); + + pTileGrid += m_tileNumCol; + x = pTileGrid->x; + y = pTileGrid->y; + z = pTileGrid->z; + pTileGridCMP->vertPosBottomLeft = HALF_PI_IN_DEGREE * sasin(y / ssqrt(x * x + y * y + z * z)) / S_PI_2; + pTileGridCMP->horzPosBottomLeft = HALF_PI_IN_DEGREE * satan(-z / x) / S_PI_2; + if (x < 0) + pTileGridCMP->horzPosBottomLeft += PI_IN_DEGREE; + pTileGridCMP->horzPosBottomLeft = clampAngle(pTileGridCMP->horzPosBottomLeft, -180, 180); + + pTileGrid++; + x = pTileGrid->x; + y = pTileGrid->y; + z = pTileGrid->z; + pTileGridCMP->vertPosBottomRight = HALF_PI_IN_DEGREE * sasin(y / ssqrt(x * x + y * y + z * z)) / S_PI_2; + pTileGridCMP->horzPosBottomRight = HALF_PI_IN_DEGREE * satan(-z / x) / S_PI_2; + if (x < 0) + pTileGridCMP->horzPosBottomRight += PI_IN_DEGREE; + pTileGridCMP->horzPosBottomRight = clampAngle(pTileGridCMP->horzPosBottomRight, -180, 180); + + pTileGrid -= m_tileNumCol + 2; + pTileGridCMP++; + } + SAFE_DELETE_ARRAY(gridPoint3D); + return 0; +} - pcInputGeomtry->geoConvert(pcCodingGeomtry); - if (pcCodingGeomtry->getType() == SVIDEO_VIEWPORT) +float TgenViewport::calculateLongitudeFromThita(float Latti, float phi, float maxLongiOffset) //Latti is the point lattitude we're interested +{ + float longi = 0; + if (Latti < 90 - phi) + longi = sasin(ssin(DEG2RAD_FACTOR * phi) / scos(DEG2RAD_FACTOR * Latti)) * RAD2DEG_FACTOR; + else + longi = maxLongiOffset; + return longi; +} + +float TgenViewport::calculateLattitudeFromPhi(float phi, float pitch) //pitch is the top point lattitude +{ + float latti = 0; + latti = sasin(ssin(DEG2RAD_FACTOR * pitch) * scos(DEG2RAD_FACTOR * phi)) * RAD2DEG_FACTOR; + return latti; +} + +float TgenViewport::calculateLatti(float pitch, float hFOV) //pitch is the top point lattitude when yaw=pitch=0 +{ + double fDen, fNum; + fDen = ssin(DEG2RAD_FACTOR * hFOV / 2) * ssin(DEG2RAD_FACTOR * pitch); + fDen *= fDen; + fDen = 1 - fDen; + fNum = scos(DEG2RAD_FACTOR * pitch); + fNum *= fNum; + return sacos(sqrt(fNum / fDen))* RAD2DEG_FACTOR; +} + +float TgenViewport::calculateLongiByLatti(float latti, float pitch) //pitch is the curve top point's lattitude +{ + float instantPhi, ret; + instantPhi = sacos(ssin(latti * DEG2RAD_FACTOR) / ssin(pitch * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + ret = sasin(ssin(instantPhi * DEG2RAD_FACTOR) / scos(latti * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + return ret; +} + +int32_t TgenViewport::ERPselectTilesInsideOnOneRow(ITileInfo *pTileInfo, int32_t tileNumCol, float leftCol, float rightCol, int32_t row) +{ + if (leftCol >= 0 && rightCol < tileNumCol) + { + for (int32_t j = (int32_t)leftCol; j < rightCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + } + else if (leftCol < 0 && rightCol < tileNumCol) + { + if (rightCol >= 0) + { + for (int32_t j = 0; j < rightCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + for (int32_t j = (int32_t)(tileNumCol + leftCol); j < tileNumCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + } + else + { + for (int32_t j = (int32_t)(tileNumCol + leftCol); j < tileNumCol+rightCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + } + } + else if (leftCol >= 0 && rightCol >= tileNumCol) + { + if (leftCol < tileNumCol) + { + for (int32_t j = (int32_t)leftCol; j < tileNumCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + for (int32_t j = 0; j < rightCol - tileNumCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + } + else + { + for (int32_t j = (int32_t)(leftCol - tileNumCol); j < rightCol - tileNumCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + } + } + else + { + for (int32_t j = 0; j < tileNumCol; j++) + pTileInfo[j + row * tileNumCol].isOccupy = 1; + } + return 0; +} + +int32_t TgenViewport::ERPselectregion(short inputWidth, short inputHeight, short dstWidth, short dstHeight) +{ + float fYaw = m_codingSVideoInfo.viewPort.fYaw; + float fPitch = m_codingSVideoInfo.viewPort.fPitch; + float vFOV = m_codingSVideoInfo.viewPort.vFOV; + float hFOV = m_codingSVideoInfo.viewPort.hFOV; + + SCVP_LOG(LOG_INFO, "Yaw is %f and Pitch is %f\n", fYaw, fPitch); + SCVP_LOG(LOG_INFO, "vFOV is %f and hFOV is %f\n", vFOV, hFOV); + + // starting time + double dResult; + clock_t lBefore = clock(); + + float cal_yaw = fYaw + ERP_HORZ_ANGLE / 2; + float cal_pitch = ERP_VERT_ANGLE / 2 - fPitch; + float horzStep = ERP_HORZ_ANGLE / (float)m_tileNumCol; + float vertStep = ERP_VERT_ANGLE / (float)m_tileNumRow; + float leftCol, rightCol; + float thita, phi; + int32_t topRow = (int32_t)((cal_pitch - vFOV / 2) / vertStep); + int32_t bottomRow = (int32_t)((cal_pitch + vFOV / 2) / vertStep); + SpherePoint topPoint, bottomPoint; + SpherePoint topLeftPoint, topRightPoint, bottomLeftPoint, bottomRightPoint; + float slope, longiOffsetOnHorzBoundary; + float vertPos, vertPosBottom; + SPos* pTmpUpLeft = m_pUpLeft; + SPos* pTmpDownRight = m_pDownRight; + + for (uint32_t i = 0; i < m_tileNumCol * m_tileNumRow; i++) + { + m_srd[i].horzPosTopRight = m_srd[i].horzPosBottomRight = m_srd[i].horzPos + horzStep; + m_srd[i].horzPosBottomLeft = m_srd[i].horzPos; + m_srd[i].vertPosBottomLeft = m_srd[i].vertPosBottomRight = m_srd[i].vertPos - vertStep; + m_srd[i].vertPosTopRight = m_srd[i].vertPos; + m_srd[i].isOccupy = 0; + } + for (uint32_t i = 0; i < FACE_NUMBER; i++) + { + m_pUpLeft[i].x = 0; + m_pUpLeft[i].y = 0; + m_pDownRight[i].x = inputWidth; + m_pDownRight[i].y = inputHeight; + m_pUpLeft[i].faceIdx = -1; + m_pDownRight[i].faceIdx = -1; + } + pTmpUpLeft->faceIdx = 0; + pTmpDownRight->faceIdx = 0; + + /* Calculate the topLeft point lattitude when yaw=pitch=0 */ + thita = calculateLatti(vFOV / 2, hFOV); + /* Phi is half of the open angle of the topLeft/topRight point with the sphere center, which won't change under different pitch/yaw */ + phi = sacos(ssin(thita * DEG2RAD_FACTOR) / ssin((vFOV/2) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + /* Calculate the topLeft/topRight point position with current pitch */ + topLeftPoint.thita = topRightPoint.thita = sasin(scos(phi * DEG2RAD_FACTOR) * ssin((fPitch+vFOV/2) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + topLeftPoint.alpha = cal_yaw - fabs(sasin(ssin(phi * DEG2RAD_FACTOR) / scos(topLeftPoint.thita * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR); + topRightPoint.alpha = cal_yaw + fabs(sasin(ssin(phi * DEG2RAD_FACTOR) / scos(topLeftPoint.thita * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR); + bottomLeftPoint.thita = bottomRightPoint.thita = sasin(scos(phi * DEG2RAD_FACTOR) * ssin((fPitch - vFOV / 2) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + bottomLeftPoint.alpha = cal_yaw - fabs(sasin(ssin(phi * DEG2RAD_FACTOR) / scos(bottomLeftPoint.thita * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR); + bottomRightPoint.alpha = cal_yaw + fabs(sasin(ssin(phi * DEG2RAD_FACTOR) / scos(bottomLeftPoint.thita * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR); + + /* Calculate top/bottom point position */ + topPoint.thita = fPitch + vFOV / 2; + bottomPoint.thita = fPitch - vFOV / 2; + topPoint.alpha = bottomPoint.alpha = cal_yaw; + + /* Calculate boundary thita/alpha between every 180/(5*m_tileNumRow) degrees */ + SpherePoint* pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + SpherePoint* pHorzBoundaryPointHist = pHorzBoundaryPoint; + + for (float offsetAngle = vFOV / 2; offsetAngle >= -vFOV / 2; offsetAngle -= HORZ_BOUNDING_STEP) + { + float instantThita = calculateLatti(offsetAngle, hFOV); + float instantPhi; + if (fabs(offsetAngle) <= 1e-9) + instantPhi = hFOV / 2; + else + instantPhi = sacos(ssin(instantThita * DEG2RAD_FACTOR) / ssin((fabs(offsetAngle)) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + pHorzBoundaryPoint->thita = calculateLattitudeFromPhi(instantPhi, fPitch + offsetAngle); + pHorzBoundaryPoint->alpha = calculateLongitudeFromThita(pHorzBoundaryPoint->thita, instantPhi, hFOV/2); + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + } + + float topLatti, bottomLatti; + float leftColOnCurrentLine, rightColOnCurrentLine; + float leftColOnHorzBound, rightColOnHorzBound; + + if (fPitch + vFOV / 2 > 90) //Top row crosses the north polar + { + topLatti = topLeftPoint.thita; + topRow = (int32_t)((ERP_VERT_ANGLE / 2 - topLatti) / vertStep); + leftCol = topLeftPoint.alpha / horzStep; + rightCol = topRightPoint.alpha / horzStep; + + for (int32_t i = 0; i <= topRow; i++) + { + vertPos = m_srd[i * m_tileNumCol].vertPos; + if (vertPos >= ERP_VERT_ANGLE - (fPitch + vFOV / 2)) + { + /* Select all tiles in current row */ + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, 0, m_tileNumCol, i); + } + else { + leftColOnCurrentLine = (cal_yaw - 180 + calculateLongiByLatti(vertPos, fPitch + vFOV / 2)) / horzStep; + rightColOnCurrentLine = (cal_yaw + 180 - calculateLongiByLatti(vertPos, fPitch + vFOV / 2)) / horzStep; + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + longiOffsetOnHorzBoundary = 90; + for (int32_t j = 0; j <= vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPos >= pHorzBoundaryPoint->thita && vertPos <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPos - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + leftColOnHorzBound = (cal_yaw - 180 + longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw +180 - longiOffsetOnHorzBoundary) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnCurrentLine, leftColOnHorzBound, i); + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, rightColOnHorzBound, rightColOnCurrentLine, i); + } + } + } + else if (fPitch + vFOV / 2 < 0) //Top row below the equator + { + topLatti = topLeftPoint.thita; + topRow = (int32_t)((ERP_VERT_ANGLE / 2 - topLatti) / vertStep); + leftCol = topLeftPoint.alpha / horzStep; + rightCol = topRightPoint.alpha / horzStep; + int32_t i; + for (i = topRow; i <= (int32_t)((ERP_VERT_ANGLE / 2 - topPoint.thita) / vertStep); i++) + { + vertPosBottom = m_srd[i * m_tileNumCol].vertPosBottomLeft; + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + longiOffsetOnHorzBoundary = 90; + for (int32_t j = 0; j < vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPosBottom >= pHorzBoundaryPoint->thita && vertPosBottom <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPosBottom - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + + leftColOnHorzBound = (cal_yaw - longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw + longiOffsetOnHorzBoundary) / horzStep; + if (vertPosBottom <= topPoint.thita) + { + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, rightColOnHorzBound, i); //To be modified with left/right boundary + } + else + { + leftColOnCurrentLine = (cal_yaw - calculateLongiByLatti(vertPosBottom, topPoint.thita)) / horzStep; + rightColOnCurrentLine = (cal_yaw + calculateLongiByLatti(vertPosBottom, topPoint.thita)) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, leftColOnCurrentLine, i); //To be modified with left/right boundary + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, rightColOnCurrentLine, rightColOnHorzBound, i); //To be modified with left/right boundary + } + } + if (fPitch - vFOV / 2 <= -ERP_VERT_ANGLE / 2) + { + for (; i < (int32_t)m_tileNumRow; i++) + { + vertPosBottom = m_srd[i * m_tileNumCol].vertPosBottomLeft; + longiOffsetOnHorzBoundary = 90; + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + for (int32_t j = 0; j < vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPosBottom >= pHorzBoundaryPoint->thita && vertPosBottom <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPosBottom - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + + leftColOnHorzBound = (cal_yaw - longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw + longiOffsetOnHorzBoundary) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, rightColOnHorzBound, i); + } + } + } + else //normal top row between north polar and equator + { + topLatti = topLeftPoint.thita; + + if (topLatti == 0) + topRow = m_tileNumRow / 2; + else + topRow = (int32_t)((ERP_VERT_ANGLE / 2 - topLatti) / vertStep); + leftCol = topLeftPoint.alpha / horzStep; + rightCol = topRightPoint.alpha / horzStep; + + /* Select tiles on the row of viewport's topLeft/topRight Point */ + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftCol, rightCol, topRow); + + /* Select tiles on rows between viewport's topLeft/topRightPoint and topPoint */ + if (topPoint.thita != 0) { + if (topRow > (ERP_VERT_ANGLE / 2 - topPoint.thita) / vertStep) { + for (int32_t i = topRow-1; i >= (int32_t)((ERP_VERT_ANGLE / 2 - topPoint.thita) / vertStep); i--) + { + vertPos = m_srd[i * m_tileNumCol].vertPosBottomLeft; + + leftColOnCurrentLine = (cal_yaw - calculateLongiByLatti(vertPos, topPoint.thita)) / horzStep; + rightColOnCurrentLine = (cal_yaw + calculateLongiByLatti(vertPos, topPoint.thita)) / horzStep; + if (leftColOnCurrentLine != rightColOnCurrentLine) + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnCurrentLine, rightColOnCurrentLine, i); + } + } + } + } + + if (fPitch - vFOV / 2 < -ERP_VERT_ANGLE /2) //Bottom row crosses the south polar + { + bottomLatti = bottomLeftPoint.thita; + bottomRow = (int32_t)((ERP_VERT_ANGLE / 2 - bottomLatti) / vertStep); + leftCol = bottomLeftPoint.alpha / horzStep; + rightCol = bottomRightPoint.alpha / horzStep; + + for (int32_t i = m_tileNumRow-1; i >= bottomRow; i--) + { + vertPosBottom = m_srd[i * m_tileNumCol].vertPosBottomLeft; + if (vertPosBottom <= -ERP_VERT_ANGLE - (fPitch - vFOV / 2)) + { + /* Select all tiles in current row */ + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, 0, m_tileNumCol, i); + } + else { + leftColOnCurrentLine = (cal_yaw - 180 + calculateLongiByLatti(vertPosBottom, -ERP_VERT_ANGLE - (fPitch - vFOV / 2))) / horzStep; + rightColOnCurrentLine = (cal_yaw + 180 - calculateLongiByLatti(vertPosBottom, -ERP_VERT_ANGLE - (fPitch - vFOV / 2))) / horzStep; + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + longiOffsetOnHorzBoundary = 90; + for (int32_t j = 0; j < vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPosBottom >= pHorzBoundaryPoint->thita && vertPosBottom <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPosBottom - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + leftColOnHorzBound = (cal_yaw - 180 + longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw + 180 - longiOffsetOnHorzBoundary) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnCurrentLine, leftColOnHorzBound, i); + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, rightColOnHorzBound, rightColOnCurrentLine, i); + } + } + } + else if (fPitch - vFOV / 2 > 0) //Bottom row above the equator + { + bottomLatti = bottomLeftPoint.thita; + bottomRow = (int32_t)((ERP_VERT_ANGLE / 2 - bottomLatti) / vertStep); + leftCol = bottomLeftPoint.alpha / horzStep; + rightCol = bottomRightPoint.alpha / horzStep; + int32_t i; + for (i = bottomRow; i >= (int32_t)((ERP_VERT_ANGLE / 2 - bottomPoint.thita) / vertStep); i--) + { + vertPos = m_srd[i * m_tileNumCol].vertPos; + longiOffsetOnHorzBoundary = 90; + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + for (int32_t j = 0; j < vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPos >= pHorzBoundaryPoint->thita && vertPos <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPos - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + + leftColOnHorzBound = (cal_yaw - longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw + longiOffsetOnHorzBoundary) / horzStep; + if (vertPos >= bottomPoint.thita) + { + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, rightColOnHorzBound, i); + } + else { + leftColOnCurrentLine = (cal_yaw - calculateLongiByLatti(vertPos, bottomPoint.thita)) / horzStep; + rightColOnCurrentLine = (cal_yaw + calculateLongiByLatti(vertPos, bottomPoint.thita)) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, leftColOnCurrentLine, i); + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, rightColOnCurrentLine, rightColOnHorzBound, i); + } + } + if (fPitch + vFOV / 2 >= ERP_VERT_ANGLE / 2) + { + for (; i >= 0; i--) + { + vertPos = m_srd[i * m_tileNumCol].vertPos; + longiOffsetOnHorzBoundary = 90; + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + for (int32_t j = 0; j < vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPos >= pHorzBoundaryPoint->thita && vertPos <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPos - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + + leftColOnHorzBound = (cal_yaw - longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw + longiOffsetOnHorzBoundary) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, rightColOnHorzBound, i); + } + } + } + else { //normal bottom row between south polar and equator + bottomLatti = bottomLeftPoint.thita; + if (bottomLatti == 0) + bottomRow = m_tileNumRow / 2 - 1; + else + bottomRow = (int32_t)((ERP_VERT_ANGLE / 2 - bottomLatti) / vertStep); + leftCol = bottomLeftPoint.alpha / horzStep; + rightCol = bottomRightPoint.alpha / horzStep; + + /* Select tiles on the row of viewport's bottomLeft/bottomRight Point */ + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftCol, rightCol, bottomRow); + + /* Select tiles on rows between viewport's bottomLeft/bottomRight and bottomPoint */ + if (bottomPoint.thita != 0) { + if (bottomRow < (ERP_VERT_ANGLE / 2 - bottomPoint.thita) / vertStep) { + for (int32_t i = bottomRow+1; i <= (int32_t)((ERP_VERT_ANGLE / 2 - bottomPoint.thita) / vertStep); i++) + { + vertPos = m_srd[i * m_tileNumCol].vertPos; + leftColOnCurrentLine = (cal_yaw - calculateLongiByLatti(vertPos, bottomPoint.thita)) / horzStep; + rightColOnCurrentLine = (cal_yaw + calculateLongiByLatti(vertPos, bottomPoint.thita)) / horzStep; + if (leftColOnCurrentLine != rightColOnCurrentLine) + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnCurrentLine, rightColOnCurrentLine, i); + } + } + } + } + + if (topRow != bottomRow) + { + int32_t startRow, endRow; + if (fPitch > 0) { + /* The topLeft point has the biggest longitude offset for the topRow, then start from topRow+1 */ + startRow = topRow + 1; + endRow = bottomRow; + } + else { + /* The bottomLeft point has the biggest longitude offset for the bottomRow, so end at bottomRow-1 */ + startRow = topRow; + endRow = bottomRow - 1; + } + for (int32_t i = startRow; i <= endRow; i++) + { + if (fPitch > 0) + vertPos = m_srd[i * m_tileNumCol].vertPos; + else + vertPos = m_srd[i * m_tileNumCol].vertPosBottomLeft; + + pHorzBoundaryPoint = m_pViewportHorizontalBoudaryPoints; + pHorzBoundaryPointHist = pHorzBoundaryPoint; + pHorzBoundaryPoint++; + longiOffsetOnHorzBoundary = 90; + for (int32_t j = 0; j < vFOV / HORZ_BOUNDING_STEP; j++) + { + if (vertPos >= pHorzBoundaryPoint->thita && vertPos <= pHorzBoundaryPointHist->thita) + { + slope = (pHorzBoundaryPoint->alpha - pHorzBoundaryPointHist->alpha) / (pHorzBoundaryPoint->thita - pHorzBoundaryPointHist->thita); + longiOffsetOnHorzBoundary = slope * (vertPos - pHorzBoundaryPointHist->thita) + pHorzBoundaryPointHist->alpha; + break; + } + else + { + pHorzBoundaryPoint++; + pHorzBoundaryPointHist++; + } + } + + leftColOnHorzBound = (cal_yaw - longiOffsetOnHorzBoundary) / horzStep; + rightColOnHorzBound = (cal_yaw + longiOffsetOnHorzBoundary) / horzStep; + ERPselectTilesInsideOnOneRow(m_srd, m_tileNumCol, leftColOnHorzBound, rightColOnHorzBound, i); //To be modified with left/right boundary + } + } + + dResult = (double)(clock() - lBefore) / CLOCKS_PER_SEC; + SCVP_LOG(LOG_INFO, "Total Time for tile selection: %f s\n", dResult); + return 0; +} + +int32_t TgenViewport::convert() +{ + Geometry *pcInputGeomtry = NULL; + Geometry *pcCodingGeomtry = NULL; + pcInputGeomtry = Geometry::create(m_sourceSVideoInfo); + if(!pcInputGeomtry) + { + return -1; + } + pcCodingGeomtry = Geometry::create(m_codingSVideoInfo); + if (pcCodingGeomtry == NULL) { + SAFE_DELETE(pcInputGeomtry); + return ERROR_INVALID; + } + + // starting time + double dResult; + clock_t lBefore = clock(); + + pcInputGeomtry->geoConvert(pcCodingGeomtry); + + if (pcCodingGeomtry->getType() == SVIDEO_VIEWPORT) { ViewPort* pViewPort = (ViewPort*)pcCodingGeomtry; m_numFaces = pViewPort->m_numFaces; @@ -616,18 +1586,10 @@ int32_t TgenViewport::convert() } dResult = (double)(clock() - lBefore) / CLOCKS_PER_SEC; - printf("\n Total Time: %12.3f sec.\n", dResult); + SCVP_LOG(LOG_INFO, "Total Time: %f second. \n", dResult); - if(pcInputGeomtry) - { - delete pcInputGeomtry; - pcInputGeomtry=NULL; - } - if(pcCodingGeomtry) - { - delete pcCodingGeomtry; - pcCodingGeomtry=NULL; - } + SAFE_DELETE(pcInputGeomtry); + SAFE_DELETE(pcCodingGeomtry); return 0; } bool TgenViewport::isInside(int32_t x, int32_t y, int32_t width, int32_t height, int32_t faceId) @@ -651,27 +1613,33 @@ bool TgenViewport::isInside(int32_t x, int32_t y, int32_t width, int32_t height, if (bFind == 0) return ret; + if (x + width > pUpLeft->x && + pUpLeft->x + (pDownRight->x - pUpLeft->x) > x && + y + height > pUpLeft->y && + pUpLeft->y + (pDownRight->y - pUpLeft->y) > y) + ret = 1; +/* if ((x >= pUpLeft->x) - && (x <= pDownRight->x) + && (x < pDownRight->x) && (y >= pUpLeft->y) - && (y <= pDownRight->y)) + && (y < pDownRight->y)) ret = 1; else if ((x + width >= pUpLeft->x) - && (x + width <= pDownRight->x) + && (x + width < pDownRight->x) && (y + height >= pUpLeft->y) - && (y + height <= pDownRight->y)) + && (y + height < pDownRight->y)) ret = 1; else if ((x + width >= pUpLeft->x) - && (x + width <= pDownRight->x) + && (x + width < pDownRight->x) && (y >= pUpLeft->y) - && (y <= pDownRight->y)) + && (y < pDownRight->y)) ret = 1; else if ((x >= pUpLeft->x) - && (x <= pDownRight->x) + && (x < pDownRight->x) && (y + height >= pUpLeft->y) - && (y + height <= pDownRight->y)) + && (y + height < pDownRight->y)) ret = 1; - +*/ //for erp format source, need to judge the boudary if (m_sourceSVideoInfo.geoType == SVIDEO_EQUIRECT) { @@ -704,6 +1672,499 @@ bool TgenViewport::isInside(int32_t x, int32_t y, int32_t width, int32_t height, return ret; } +int32_t TgenViewport::CubemapGetFaceBoundaryCrossingPoints(SpherePoint* upLeftPoint, SpherePoint* downRightPoint, int32_t faceWidth, int32_t faceHeight, std::list* crossBoundaryPoints) +{ + SpherePoint crossUpLeftPoint, crossDownRightPoint; + POSType newAX, newAY; + + if ( !upLeftPoint || !downRightPoint) { + SCVP_LOG(LOG_ERROR, "The given sphere point is NULL!\n"); + return ERROR_NULL_PTR; + } + if (faceWidth <= 0 || faceHeight <= 0) { + SCVP_LOG(LOG_ERROR, "The face width/height is less than 0!\n"); + return ERROR_INVALID; + } + + /* No crossing point should be added when the given two points are inside the same face */ + if (upLeftPoint->cord2D.faceIdx == downRightPoint->cord2D.faceIdx) + return ERROR_NONE; + + /* CrossPointA is the coordinate on face A, * + * CrossPointB is the coordinate on face B */ + crossUpLeftPoint.cord2D.faceIdx = upLeftPoint->cord2D.faceIdx; + crossDownRightPoint.cord2D.faceIdx = downRightPoint->cord2D.faceIdx; + newAX = 0; + newAY = 0; + if (upLeftPoint->cord2D.faceIdx == FACE_PY) { + /* In case the top point locates in top face */ + switch (downRightPoint->cord2D.faceIdx) { + case FACE_PX: + newAX = faceHeight - upLeftPoint->cord2D.y; + newAY = upLeftPoint->cord2D.x; + break; + case FACE_NX: + newAX = upLeftPoint->cord2D.y; + newAY = faceWidth - upLeftPoint->cord2D.x; + break; + case FACE_PZ: + newAX = upLeftPoint->cord2D.x; + newAY = upLeftPoint->cord2D.y; + break; + case FACE_NZ: + newAX = faceWidth - upLeftPoint->cord2D.x; + newAY = faceHeight - upLeftPoint->cord2D.y; + default:break; + } + float slope = (newAX - downRightPoint->cord2D.x) / (newAY - downRightPoint->cord2D.y - faceWidth); + crossDownRightPoint.cord2D.x = -slope * downRightPoint->cord2D.y + downRightPoint->cord2D.x; + crossDownRightPoint.cord2D.y = 0; + switch (downRightPoint->cord2D.faceIdx) { + case FACE_PX: + crossUpLeftPoint.cord2D.x = faceWidth; + crossUpLeftPoint.cord2D.y = faceHeight - crossDownRightPoint.cord2D.x; + break; + case FACE_NX: + crossUpLeftPoint.cord2D.x = 0; + crossUpLeftPoint.cord2D.y = crossDownRightPoint.cord2D.x; + break; + case FACE_PZ: + crossUpLeftPoint.cord2D.x = crossDownRightPoint.cord2D.x; + crossUpLeftPoint.cord2D.y = faceHeight; + break; + case FACE_NZ: + crossUpLeftPoint.cord2D.x = faceWidth - crossDownRightPoint.cord2D.x; + crossUpLeftPoint.cord2D.y = 0; + break; + default:break; + } + } + else if (downRightPoint->cord2D.faceIdx == FACE_PY) { + /* In case the top point locates in top face */ + switch (upLeftPoint->cord2D.faceIdx) { + case FACE_PX: + newAX = faceHeight - downRightPoint->cord2D.y; + newAY = downRightPoint->cord2D.x; + break; + case FACE_NX: + newAX = downRightPoint->cord2D.y; + newAY = faceWidth - downRightPoint->cord2D.x; + break; + case FACE_PZ: + newAX = downRightPoint->cord2D.x; + newAY = downRightPoint->cord2D.y; + break; + case FACE_NZ: + newAX = faceWidth - downRightPoint->cord2D.x; + newAY = faceHeight - downRightPoint->cord2D.y; + break; + default:break; + } + float slope = (newAX - upLeftPoint->cord2D.x) / (newAY - upLeftPoint->cord2D.y - faceWidth); + crossUpLeftPoint.cord2D.x = slope * (-upLeftPoint->cord2D.y) + upLeftPoint->cord2D.x; + crossUpLeftPoint.cord2D.y = 0; + switch (upLeftPoint->cord2D.faceIdx) { + case FACE_PX: + crossDownRightPoint.cord2D.x = faceWidth; + crossDownRightPoint.cord2D.y = faceHeight - crossUpLeftPoint.cord2D.x; + break; + case FACE_NX: + crossDownRightPoint.cord2D.x = 0; + crossDownRightPoint.cord2D.y = crossUpLeftPoint.cord2D.x; + break; + case FACE_PZ: + crossDownRightPoint.cord2D.x = crossUpLeftPoint.cord2D.x; + crossDownRightPoint.cord2D.y = faceHeight; + break; + case FACE_NZ: + crossDownRightPoint.cord2D.x = faceWidth - crossUpLeftPoint.cord2D.x; + crossDownRightPoint.cord2D.y = 0; + break; + default:break; + } + } + /* In case the bottom point locates in bottom face */ + else if (downRightPoint->cord2D.faceIdx == FACE_NY) { + switch (upLeftPoint->cord2D.faceIdx) { + case FACE_PX: + newAX = downRightPoint->cord2D.y; + newAY = faceWidth - downRightPoint->cord2D.x; + break; + case FACE_NX: + newAX = faceWidth - downRightPoint->cord2D.y; + newAY = downRightPoint->cord2D.x; + break; + case FACE_PZ: + newAX = downRightPoint->cord2D.x; + newAY = downRightPoint->cord2D.y; + break; + case FACE_NZ: + newAX = faceWidth - downRightPoint->cord2D.x; + newAY = faceHeight - downRightPoint->cord2D.y; + break; + default:break; + } + float slope = (upLeftPoint->cord2D.x - newAX) / (upLeftPoint->cord2D.y - faceHeight - newAY); + crossUpLeftPoint.cord2D.x = slope * (faceHeight - upLeftPoint->cord2D.y) + upLeftPoint->cord2D.x; + crossUpLeftPoint.cord2D.y = faceHeight; + switch (upLeftPoint->cord2D.faceIdx) { + case FACE_PX: + crossDownRightPoint.cord2D.x = faceWidth; + crossDownRightPoint.cord2D.y = crossUpLeftPoint.cord2D.x; + break; + case FACE_NX: + crossDownRightPoint.cord2D.x = 0; + crossDownRightPoint.cord2D.y = faceHeight - crossUpLeftPoint.cord2D.x; + break; + case FACE_PZ: + crossDownRightPoint.cord2D.x = crossUpLeftPoint.cord2D.x; + crossDownRightPoint.cord2D.y = 0; + break; + case FACE_NZ: + crossDownRightPoint.cord2D.x = faceWidth - crossUpLeftPoint.cord2D.x; + crossDownRightPoint.cord2D.y = faceHeight; + break; + default:break; + } + } + else if (upLeftPoint->cord2D.faceIdx == FACE_NY) { + switch (downRightPoint->cord2D.faceIdx) { + case FACE_PX: + newAX = upLeftPoint->cord2D.y; + newAY = faceWidth - upLeftPoint->cord2D.x; + break; + case FACE_NX: + newAX = faceWidth - upLeftPoint->cord2D.y; + newAY = upLeftPoint->cord2D.x; + break; + case FACE_PZ: + newAX = upLeftPoint->cord2D.x; + newAY = upLeftPoint->cord2D.y; + break; + case FACE_NZ: + newAX = faceWidth - upLeftPoint->cord2D.x; + newAY = faceHeight - upLeftPoint->cord2D.y; + break; + default:break; + } + float slope = (downRightPoint->cord2D.x - newAX) / (downRightPoint->cord2D.y - faceHeight - newAY); + crossDownRightPoint.cord2D.x = slope * (faceHeight - downRightPoint->cord2D.y) + downRightPoint->cord2D.x; + crossDownRightPoint.cord2D.y = faceHeight; + switch (downRightPoint->cord2D.faceIdx) { + case FACE_PX: + crossUpLeftPoint.cord2D.x = faceWidth; + crossUpLeftPoint.cord2D.y = crossDownRightPoint.cord2D.x; + break; + case FACE_NX: + crossUpLeftPoint.cord2D.x = 0; + crossUpLeftPoint.cord2D.y = faceHeight - crossDownRightPoint.cord2D.x; + break; + case FACE_PZ: + crossUpLeftPoint.cord2D.x = crossDownRightPoint.cord2D.x; + crossUpLeftPoint.cord2D.y = 0; + break; + case FACE_NZ: + crossUpLeftPoint.cord2D.x = faceWidth - crossDownRightPoint.cord2D.x; + crossUpLeftPoint.cord2D.y = faceHeight; + break; + default:break; + } + } + /* In case the two points are not in the same face */ + else if (upLeftPoint->cord2D.faceIdx != downRightPoint->cord2D.faceIdx) { + if (((upLeftPoint->cord2D.faceIdx == FACE_PX) && (downRightPoint->cord2D.faceIdx == FACE_NZ)) + || ((upLeftPoint->cord2D.faceIdx == FACE_NZ) && (downRightPoint->cord2D.faceIdx == FACE_NX)) + || ((upLeftPoint->cord2D.faceIdx == FACE_NX) && (downRightPoint->cord2D.faceIdx == FACE_PZ)) + || ((upLeftPoint->cord2D.faceIdx == FACE_PZ) && (downRightPoint->cord2D.faceIdx == FACE_PX))) { + float slope = (downRightPoint->cord2D.y - upLeftPoint->cord2D.y) / (downRightPoint->cord2D.x + faceWidth - upLeftPoint->cord2D.x); + crossUpLeftPoint.cord2D.x = faceWidth; + crossUpLeftPoint.cord2D.y = slope * (faceWidth - upLeftPoint->cord2D.x) + upLeftPoint->cord2D.y; + crossDownRightPoint.cord2D.x = 0; + crossDownRightPoint.cord2D.y = crossUpLeftPoint.cord2D.y; + } + else { + float slope = (upLeftPoint->cord2D.y - downRightPoint->cord2D.y) / (upLeftPoint->cord2D.x + faceWidth - downRightPoint->cord2D.x); + crossDownRightPoint.cord2D.x = faceWidth; + crossDownRightPoint.cord2D.y = slope * (faceWidth - downRightPoint->cord2D.x) + downRightPoint->cord2D.y; + crossUpLeftPoint.cord2D.x = 0; + crossUpLeftPoint.cord2D.y = crossDownRightPoint.cord2D.y; + } + } + else { + return ERROR_NONE; + } + crossBoundaryPoints->push_back(crossUpLeftPoint); + crossBoundaryPoints->push_back(crossDownRightPoint); + return ERROR_NONE; +} + +int32_t TgenViewport::CubemapGetViewportProjInFace(int32_t faceId, std::list* Points) +{ + SpherePoint point; + int32_t tileWidth = m_srd[0].tilewidth; + int32_t tileHeight = m_srd[0].tileheight; + SPos* pTmpUpLeft = &m_pUpLeft[faceId]; + SPos* pTmpDownRight = &m_pDownRight[faceId]; + + if (Points->size() == 0) { + SCVP_LOG(LOG_WARNING, "Cubemap viewport projection reference points list is NULL!\n") + return ERROR_NO_VALUE; + } + pTmpUpLeft->x = m_sourceSVideoInfo.iFaceWidth; + pTmpUpLeft->y = m_sourceSVideoInfo.iFaceHeight; + pTmpDownRight->x = 0; + pTmpDownRight->y = 0; + for (auto it = Points->begin(); it != Points->end(); it++) { + point = *it; + if (point.cord2D.faceIdx == faceId) { + pTmpUpLeft->x = fmin(point.cord2D.x, pTmpUpLeft->x); + pTmpUpLeft->y = fmin(point.cord2D.y, pTmpUpLeft->y); + pTmpDownRight->x = fmax(point.cord2D.x, pTmpDownRight->x); + pTmpDownRight->y = fmax(point.cord2D.y, pTmpDownRight->y); + } + } + for (uint32_t i = 0; i < m_tileNumRow; i++) { + for (uint32_t j = 0; j < m_tileNumCol; j++) { + if ((pTmpUpLeft->x < (int32_t)((j + 1) * tileWidth)) && (pTmpUpLeft->y < (int32_t)((i + 1) * tileHeight)) + && ((pTmpDownRight->x >= (int32_t)(j * tileWidth))) && (pTmpDownRight->y >= (int32_t)(i * tileHeight))) + { + m_srd[faceId * m_tileNumRow * m_tileNumCol + i * m_tileNumCol + j].isOccupy = 1; + m_srd[faceId * m_tileNumRow * m_tileNumCol + i * m_tileNumCol + j].faceId = faceId; + } + } + } + pTmpUpLeft->faceIdx = faceId; + pTmpDownRight->faceIdx = faceId; + return ERROR_NONE; +} + + +int32_t TgenViewport::CubemapPolar2Cartesian(SpherePoint* pPoint) +{ + if (!pPoint) { + SCVP_LOG(LOG_ERROR, "The input spherer point is NULL!\n"); + return ERROR_NULL_PTR; + } + pPoint->cord3D.x = scos(pPoint->thita * DEG2RAD_FACTOR) * scos(pPoint->alpha * DEG2RAD_FACTOR); + pPoint->cord3D.y = ssin(pPoint->thita * DEG2RAD_FACTOR); + pPoint->cord3D.z = -scos(pPoint->thita * DEG2RAD_FACTOR) * ssin(pPoint->alpha * DEG2RAD_FACTOR); + + POSType aX = sfabs(pPoint->cord3D.x); + POSType aY = sfabs(pPoint->cord3D.y); + POSType aZ = sfabs(pPoint->cord3D.z); + POSType pu, pv; + if (aX >= aY && aX >= aZ) + { + if (pPoint->cord3D.x > 0) + { + pPoint->cord2D.faceIdx = FACE_PX; + pu = -pPoint->cord3D.z / aX; + pv = -pPoint->cord3D.y / aX; + } + else + { + pPoint->cord2D.faceIdx = FACE_NX; + pu = pPoint->cord3D.z / aX; + pv = -pPoint->cord3D.y / aX; + } + } + else if (aY >= aX && aY >= aZ) + { + if (pPoint->cord3D.y > 0) + { + pPoint->cord2D.faceIdx = FACE_PY; + pu = pPoint->cord3D.x / aY; + pv = pPoint->cord3D.z / aY; + } + else + { + pPoint->cord2D.faceIdx = FACE_NY; + pu = pPoint->cord3D.x / aY; + pv = -pPoint->cord3D.z / aY; + } + } + else + { + if (pPoint->cord3D.z > 0) + { + pPoint->cord2D.faceIdx = FACE_PZ; + pu = pPoint->cord3D.x / aZ; + pv = -pPoint->cord3D.y / aZ; + } + else + { + pPoint->cord2D.faceIdx = FACE_NZ; + pu = -pPoint->cord3D.x / aZ; + pv = -pPoint->cord3D.y / aZ; + } + } + //convert pu, pv to [0, width], [0, height]; + pPoint->cord2D.z = 0; + pPoint->cord2D.x = (POSType)((pu + 1.0) * (m_sourceSVideoInfo.iFaceWidth >> 1) + (-0.5)); + pPoint->cord2D.y = (POSType)((pv + 1.0) * (m_sourceSVideoInfo.iFaceHeight >> 1) + (-0.5)); + + return ERROR_NONE; +} + +int32_t TgenViewport::CubemapIsInsideFaces() +{ + int32_t faceWidth = m_sourceSVideoInfo.iFaceWidth; + int32_t faceHeight = m_sourceSVideoInfo.iFaceHeight; + + float fPitch = m_codingSVideoInfo.viewPort.fPitch; + float fYaw = m_codingSVideoInfo.viewPort.fYaw; + float hFOV = m_codingSVideoInfo.viewPort.hFOV; + float vFOV = m_codingSVideoInfo.viewPort.vFOV; + SpherePoint topPoint, bottomPoint, leftPoint, rightPoint; + SpherePoint centerPoint; + SpherePoint topLeftPoint, topRightPoint, bottomLeftPoint, bottomRightPoint; + + int32_t selectedTilesNum = 0; + int32_t idx; + + double dResult; + clock_t lBefore = clock(); + + float cal_yaw = fYaw + ERP_HORZ_ANGLE / 2; + /* Calculate the topLeft point lattitude when yaw=pitch=0 */ + float thita = calculateLatti(vFOV / 2, hFOV); + /* Phi is half of the open angle of the topLeft/topRight point with the sphere center, which won't change under different pitch/yaw */ + float phi = sacos(ssin(thita * DEG2RAD_FACTOR) / ssin((vFOV / 2) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + /* Calculate the topLeft/topRight point position with current pitch */ + topLeftPoint.thita = topRightPoint.thita = sasin(scos(phi * DEG2RAD_FACTOR) * ssin((fPitch + vFOV / 2) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + float tempValue = ssin(phi * DEG2RAD_FACTOR) / scos(topLeftPoint.thita * DEG2RAD_FACTOR); + if (tempValue > 1) + tempValue = 1; + else if (tempValue < -1) + tempValue = -1; + topLeftPoint.alpha = cal_yaw - fabs(sasin(tempValue) * RAD2DEG_FACTOR); + topRightPoint.alpha = cal_yaw + fabs(sasin(tempValue) * RAD2DEG_FACTOR); + bottomLeftPoint.thita = bottomRightPoint.thita = sasin(scos(phi * DEG2RAD_FACTOR) * ssin((fPitch - vFOV / 2) * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + tempValue = ssin(phi * DEG2RAD_FACTOR) / scos(bottomLeftPoint.thita * DEG2RAD_FACTOR); + if (tempValue > 1) + tempValue = 1; + else if (tempValue < -1) + tempValue = -1; + bottomLeftPoint.alpha = cal_yaw - fabs(sasin(tempValue) * RAD2DEG_FACTOR); + bottomRightPoint.alpha = cal_yaw + fabs(sasin(tempValue) * RAD2DEG_FACTOR); + + /* Calculate coordinates of each viewport vertex on the cube faces */ + if (fPitch + vFOV / 2 >= ERP_VERT_ANGLE / 2) { + float fTmp = topRightPoint.alpha; + topRightPoint.alpha = topLeftPoint.alpha; + topLeftPoint.alpha = fTmp; + } + else { + topLeftPoint.alpha -= ERP_HORZ_ANGLE / 2; + topRightPoint.alpha -= ERP_HORZ_ANGLE / 2; + } + if (fPitch - vFOV / 2 <= -ERP_VERT_ANGLE / 2) { + float fTmp = bottomRightPoint.alpha; + bottomRightPoint.alpha = bottomLeftPoint.alpha; + bottomLeftPoint.alpha = fTmp; + } + else { + bottomLeftPoint.alpha -= ERP_HORZ_ANGLE / 2; + bottomRightPoint.alpha -= ERP_HORZ_ANGLE / 2; + } + + CubemapPolar2Cartesian(&topLeftPoint); + CubemapPolar2Cartesian(&topRightPoint); + CubemapPolar2Cartesian(&bottomLeftPoint); + CubemapPolar2Cartesian(&bottomRightPoint); + + /* Calculate top/bottom point position */ + topPoint.thita = fPitch + vFOV / 2; + bottomPoint.thita = fPitch - vFOV / 2; + topPoint.alpha = bottomPoint.alpha = fYaw; + CubemapPolar2Cartesian(&topPoint); + CubemapPolar2Cartesian(&bottomPoint); + + /* Calculate Left/Right point */ + leftPoint.thita = rightPoint.thita = sasin(scos(hFOV / 2 * DEG2RAD_FACTOR) * ssin(fPitch * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR; + leftPoint.alpha = fYaw - fabs(sasin(ssin(hFOV / 2 * DEG2RAD_FACTOR) / scos(leftPoint.thita * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR); + rightPoint.alpha = fYaw + fabs(sasin(ssin(hFOV / 2 * DEG2RAD_FACTOR) / scos(rightPoint.thita * DEG2RAD_FACTOR)) * RAD2DEG_FACTOR); + if (leftPoint.alpha < -ERP_HORZ_ANGLE / 2) + leftPoint.alpha += ERP_HORZ_ANGLE; + if (rightPoint.alpha > ERP_HORZ_ANGLE / 2) + rightPoint.alpha -= ERP_HORZ_ANGLE; + CubemapPolar2Cartesian(&leftPoint); + CubemapPolar2Cartesian(&rightPoint); + + /* Calculate center point position */ + centerPoint.thita = fPitch; + centerPoint.alpha = fYaw; + CubemapPolar2Cartesian(¢erPoint); + + /* Reset viewport area */ + for (int32_t i = 0; i < FACE_NUMBER; i++) { + m_pUpLeft[i].faceIdx = -1; + m_pDownRight[i].faceIdx = -1; + } + for (idx = 0; idx < (int32_t)(FACE_NUMBER * m_tileNumRow * m_tileNumCol); idx++) { + m_srd[idx].faceId = -1; + m_srd[idx].isOccupy = 0; + } + + /* Calculate the crossing points on * + * each face if the neighbor reference * + * points are not in the same face */ + std::list referencePoints; + CubemapGetFaceBoundaryCrossingPoints(&topLeftPoint, &topPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&topPoint, &topRightPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&topLeftPoint, &leftPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&leftPoint, &bottomLeftPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&bottomLeftPoint, &bottomPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&bottomPoint, &bottomRightPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&topRightPoint, &rightPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&rightPoint, &bottomRightPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&leftPoint, ¢erPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&topPoint, ¢erPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(¢erPoint, &rightPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(¢erPoint, &bottomPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&topLeftPoint, &bottomLeftPoint, faceWidth, faceHeight, &referencePoints); + CubemapGetFaceBoundaryCrossingPoints(&topRightPoint, &bottomRightPoint, faceWidth, faceHeight, &referencePoints); + + /* The viewport bounding points are also useful for tile selection */ + referencePoints.push_back(topLeftPoint); + referencePoints.push_back(topPoint); + referencePoints.push_back(leftPoint); + referencePoints.push_back(bottomLeftPoint); + referencePoints.push_back(bottomPoint); + referencePoints.push_back(bottomRightPoint); + referencePoints.push_back(rightPoint); + referencePoints.push_back(topRightPoint); + referencePoints.push_back(centerPoint); + + /* Get the tile selection on each face */ + for (int32_t faceIdx = 0; faceIdx < FACE_NUMBER; faceIdx++) + CubemapGetViewportProjInFace(faceIdx, &referencePoints); + + ITileInfo* pTileInfoTmp = m_srd; + int32_t faceNum = (m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) ? 6 : 1; + for (int32_t faceid = 0; faceid < faceNum; faceid++) + { + SCVP_LOG(LOG_INFO, "Face is %d\n", faceid); + for (uint32_t row = 0; row < m_tileNumRow; row++) + { + for (uint32_t col = 0; col < m_tileNumCol; col++) + { + if (pTileInfoTmp->isOccupy) { + SCVP_LOG(LOG_INFO, "Selecte Tile at row %d and col %d\n", row, col); + selectedTilesNum++; + } + pTileInfoTmp++; + } + } + } + SCVP_LOG(LOG_INFO, "Total Selected Tile Number is %d\n", selectedTilesNum); + + referencePoints.clear(); + dResult = (double)(clock() - lBefore) / CLOCKS_PER_SEC; + SCVP_LOG(LOG_INFO, "Total Time for tile selection: %f ms to find inside tile number is %d\n", dResult, selectedTilesNum); + + return selectedTilesNum; +} int32_t TgenViewport::calcTilesInViewport(ITileInfo* pTileInfo, int32_t tileCol, int32_t tileRow) { if (!pTileInfo) @@ -711,21 +2172,59 @@ int32_t TgenViewport::calcTilesInViewport(ITileInfo* pTileInfo, int32_t tileCol, int32_t ret = 0; ITileInfo *pTileInfoTmp = pTileInfo; int32_t faceNum = (m_sourceSVideoInfo.geoType==SVIDEO_CUBEMAP) ? 6 : 1; + if (m_sourceSVideoInfo.geoType == SVIDEO_EQUIRECT) { for (int32_t faceid = 0; faceid < faceNum; faceid++) { for (int32_t row = 0; row < tileRow; row++) { for (int32_t col = 0; col < tileCol; col++) { - pTileInfoTmp->isOccupy = isInside(pTileInfoTmp->x, pTileInfoTmp->y, pTileInfoTmp->tilewidth, pTileInfoTmp->tileheight, faceid); + //pTileInfoTmp->isOccupy = isInside(pTileInfoTmp->x, pTileInfoTmp->y, pTileInfoTmp->tilewidth, pTileInfoTmp->tileheight, faceid); if (pTileInfoTmp->isOccupy == 1) - { ret++; - } pTileInfoTmp++; } } } + } + else if (m_sourceSVideoInfo.geoType == SVIDEO_CUBEMAP) { + ret = CubemapIsInsideFaces(); + } + return ret; +} + +int32_t TgenViewport::getContentCoverage(CCDef* pOutCC, int32_t coverageShapeType) { + int32_t ret; + /* Shape Type: + * 0: CubeMap + * 1: ERP + */ + switch (coverageShapeType) { + case 0: + pOutCC->azimuthRange = m_codingSVideoInfo.viewPort.hFOV * 65536.f; + pOutCC->elevationRange = m_codingSVideoInfo.viewPort.vFOV * 65536.f; + pOutCC->centreAzimuth = m_codingSVideoInfo.viewPort.fYaw * 65536.f; + pOutCC->centreElevation = m_codingSVideoInfo.viewPort.fPitch * 65536.f; + ret = 0; + break; + case 1: + /* TBD: implemented by Yaw/Pitch/hFOV/vFOV directly + * double hFOVInRadian, pitchInRadian; + * hFOVInRadian = m_codingSVideoInfo.viewPort.hFOV / 180.f * S_PI; + * pitchInRadian = m_codingSVideoInfo.viewPort.fPitch / 180.f * S_PI; + + * pOutCC->azimuthRange = sasin(sfabs(ssin(hFOVInRadian/2) / scos(pitchInRadian/2))) / S_PI * 360.f * 65536.f; + * pOutCC->elevationRange = m_codingSVideoInfo.viewPort.vFOV * 65536.f; + * pOutCC->centreAzimuth = m_codingSVideoInfo.viewPort.fYaw * 65536.f; + * pOutCC->centreElevation = m_codingSVideoInfo.viewPort.fPitch * 65536.f; + */ + SCVP_LOG(LOG_WARNING, "Doesnt' Support to Get CC by Viewport Settings Directly for Shape Type 1\n"); + ret = -1; + break; + default: + SCVP_LOG(LOG_WARNING, "Coverage type must be 0 or 1\n"); + ret = -1; + break; + } return ret; } -//! \} diff --git a/src/360SCVP/360SCVPViewportImpl.h b/src/360SCVP/360SCVPViewportImpl.h index a9afa8d4..4abf589f 100644 --- a/src/360SCVP/360SCVPViewportImpl.h +++ b/src/360SCVP/360SCVPViewportImpl.h @@ -26,10 +26,12 @@ #ifndef __360SCVP_VIEWPORTIMPL__ #define __360SCVP_VIEWPORTIMPL__ +#include "360SCVPAPI.h" #include "360SCVPGeometry.h" #include #include +#include ///< for cubemap, given the facesize (960x960), the maxsimum viewport size is defined in the below table typedef struct SIZE_DEF @@ -38,8 +40,10 @@ typedef struct SIZE_DEF int32_t y; }SIZE; -#define MAX_FOV_ANGLE 100; -#define NORMAL_FACE_SIZE 960; +#define MAX_FOV_ANGLE 100 +#define NORMAL_FACE_SIZE 960 +#define NORMAL_PITCH_MIN -75 +#define NORMAL_PITCH_MAX -15 enum FOVAngle { @@ -57,6 +61,16 @@ SIZE Max_Viewport_Size[FOV_Angle_NUM][4] = {{479, 959}, {134, 161}, {134, 160}, {394, 959}}, {{351, 946}, {437, 944}, {0, 0 }, {0, 0 }} }; + +enum CubeFace +{ + FACE_PX = 0, + FACE_NX, + FACE_PY, + FACE_NY, + FACE_PZ, + FACE_NZ +}; // ==================================================================================================================== // Class definition, // ==================================================================================================================== @@ -68,7 +82,26 @@ struct ITileInfo short tileheight; int32_t faceId; uint32_t isOccupy; + float vertPos; + float horzPos; + float horzPosBottomRight; + float horzPosBottomLeft; + float horzPosTopRight; + float vertPosBottomRight; //Record all 4 endpoint position for Cubemap usage + float vertPosBottomLeft; + float vertPosTopRight; }; + +/* Point description on Sperial surface */ +typedef struct SPHEREPOINT +{ + POSType alpha; + POSType thita; + POSType phi; + SPos cord3D; + SPos cord2D; +} SpherePoint; + /// generate viewport class class TgenViewport { @@ -93,9 +126,14 @@ class TgenViewport int32_t m_aiPad[2]; ///< number of padded pixels for width and height int32_t m_faceSizeAlignment; int32_t m_maxTileNum; - inline int32_t round(POSType t) { return (int32_t)(t+ (t>=0? 0.5 :-0.5)); }; + UsageType m_usageType; + Param_VideoFPStruct m_paramVideoFP; + SpherePoint *m_pViewportHorizontalBoudaryPoints; + inline int32_t round(POSType t) { return (int32_t)(t+ (t>=0? 0.5 :-0.5)); } + public: TgenViewport(); + TgenViewport(TgenViewport& src); virtual ~TgenViewport(); TgenViewport& operator=(const TgenViewport& src); @@ -104,10 +142,95 @@ class TgenViewport void destroy(); ///< destroy option handling class int32_t parseCfg( ); ///< parse configuration file to fill member variables int32_t convert(); + int32_t ERPselectregion(short inputWidth, short inputHeight, short dstWidth, short dstHeight); //analysis; bool isInside(int32_t x, int32_t y, int32_t width, int32_t height, int32_t faceId); + int32_t CubemapIsInsideFaces(); int32_t calcTilesInViewport(ITileInfo* pTileInfo, int32_t tileCol, int32_t tileRow); + int32_t CubemapCalcTilesGrid(); + int32_t getContentCoverage(CCDef* pOutCC, int32_t coverageShapeType); +private: + /* calculateLongitudeFromThita: * + * Param: * + * Latti: Point spherial lattitude * + * Phi: The angle to the sphere center * + * maxLongiOffset: The maximum longitude offset * + * Return: * + * The point longitude */ + float calculateLongitudeFromThita(float Latti, float phi, float maxLongiOffset); + /* calculateLattitudeFromPhi: * + * Param: * + * phi: The angle to the sphere center * + * pitch: The lattitude of the center point of the * + * current great circle * + * Return: * + * The point lattitude */ + float calculateLattitudeFromPhi(float phi, float pitch); + /* calculateLatti: * + * Param: * + * pitch: The lattitude of the center point of the * + * current great circle * + * hFOV: The horizontal FOV of the viewport * + * Return: * + * The viewport's topleft point lattitude */ + float calculateLatti(float pitch, float hFOV); + /* calculateLatti: * + * Param: * + * pitch: The lattitude of the center point of the * + * current great circle * + * latti: The point lattitudehorizontal FOV of the * + * viewport * + * Return: * + * The point longitude offset to the viewport center */ + float calculateLongiByLatti(float latti, float pitch); + /* ERPselectTilesInsideOnOneRow: Choose tiles in the give row * + * Param: * + * pTileInfo: Tile Info for output * + * leftCol: The most left tile index of current row * + * rightCol: The most left tile index of current row * + * tileNumCol: The tile number in one row * + * row: The row number * + * hFOV: The horizontal FOV of the viewport * + * Return: * + * Error code. */ + int32_t ERPselectTilesInsideOnOneRow(ITileInfo* pTileInfo, int32_t tileNumCol, float leftCol, float rightCol, int32_t row); + /* CubemapPolar2Cartesian: Convert point coordinates from polar to * + * cartesian expression in both 2D and 3D * + * Param: * + * pPoint: Point with polar coordinates expression * + * Return: * + * Error code */ + int32_t CubemapPolar2Cartesian(SpherePoint* pPoint); + /* CubemapGetFaceBoundaryCrossingPoints: * + * Calculate cross point axis of the face * + * boundary and the connection line of two given * + * points * + * Param: * + * upLeftPoint: The first given point which is * + * offen in the up or left direction * + * downRightPoint: The second given point which is * + * in the down or right direction * + * faceWidth: The face width * + * faceHeight: The face height * + * crossBoundaryPoints: The list which stores the crossing * + * point of the cube's boundary and * + * connection line of the two input * + * points * + * Return: * + * Error code */ + int32_t CubemapGetFaceBoundaryCrossingPoints(SpherePoint* upLeftPoint, SpherePoint* downRightPoint, int32_t faceWidth, int32_t faceHeight, std::list* crossBoundaryPoints); + /* CubemapGetViewportProjInFace: Calculate the projection of * + * the viewport on the give * + * cube face * + * Param: * + * faceId: The face Id * + * refBoundaryPoints: The reference points in a list * + * which can provide the * + * projection area on each face * + * Return: * + * error code */ + int32_t CubemapGetViewportProjInFace(int32_t faceId, std::list* crossBoundaryPoints); };// END CLASS DEFINITION //! \} diff --git a/src/360SCVP/BUILD.gn b/src/360SCVP/BUILD.gn new file mode 100644 index 00000000..b613c9e6 --- /dev/null +++ b/src/360SCVP/BUILD.gn @@ -0,0 +1,39 @@ +# for owt-client-android. Tested with Android arm64 device only + +config("my_config") { + # compare with CMakeLists.txt + cflags = [ + "-fstack-protector-strong", + "-D_FORTIFY_SOURCE=2", + "-g", + "-c", + "-fPIC", + "-fPIE", + "-O2", + "-Wformat", + "-Wformat-security", + "-Wall", + "-Werror", + "-std=c++11", + ] +} + +static_library("lib360SCVP") { + configs += [ ":my_config" ] + + sources = [ + "360SCVPAPIImpl.cpp", + "360SCVPBitstream.cpp", + "360SCVPCubeMap.cpp", + "360SCVPEquiRect.cpp", + "360SCVPGeometry.cpp", + "360SCVPHevcEncHdr.cpp", + "360SCVPHevcParser.cpp", + "360SCVPHevcTileMerge.cpp", + "360SCVPHevcTilestream.cpp", + "360SCVPImpl.cpp", + "360SCVPViewPort.cpp", + "360SCVPViewportImpl.cpp", + ] +} + diff --git a/src/360SCVP/CMakeLists.txt b/src/360SCVP/CMakeLists.txt index d217b144..fd3d46e0 100644 --- a/src/360SCVP/CMakeLists.txt +++ b/src/360SCVP/CMakeLists.txt @@ -1,15 +1,28 @@ cmake_minimum_required(VERSION 2.8) +option(USE_ANDROID_NDK + "Use android ndk" + OFF +) + project(360SCVP) AUX_SOURCE_DIRECTORY(. DIR_SRC) +SET(DIR_SRC + ${DIR_SRC} +) + +if(NOT USE_ANDROID_NDK) ADD_DEFINITIONS("-z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -Werror -g -c -fPIC -std=c++11") +else() +ADD_DEFINITIONS("-fPIE -fPIC -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wall -Werror -g -c -fPIC -std=c++11") +endif() -INCLUDE_DIRECTORIES(/usr/local/include) +INCLUDE_DIRECTORIES(/usr/local/include ./ ../utils ../plugins/360SCVP_Plugins/TileSelection_Plugins) LINK_DIRECTORIES(/usr/local/lib) -ADD_LIBRARY(360SCVP SHARED ${DIR_SRC}) +ADD_LIBRARY(360SCVP SHARED ${DIR_SRC} ../utils/Log.cpp ../utils/tinyxml2.cpp) if(NOT DEFINED CMAKE_INSTALL_PREFIX OR CMAKE_INSTALL_PREFIX STREQUAL "") set(CMAKE_INSTALL_PREFIX "/usr/local" CACHE PATH "..." FORCE) @@ -28,12 +41,19 @@ if(NOT DEFINED CMAKE_INSTALL_INCLUDEDIR) set(CMAKE_INSTALL_INCLUDEDIR "include") endif() +TARGET_LINK_LIBRARIES(360SCVP glog) +TARGET_LINK_LIBRARIES(360SCVP safestring_shared) +TARGET_LINK_LIBRARIES(360SCVP dl) + install(TARGETS 360SCVP LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}") +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/error.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/common_data.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/Log.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/pose.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") install(FILES ${PROJECT_SOURCE_DIR}/360SCVPAPI.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") install(FILES ${PROJECT_SOURCE_DIR}/360SCVPViewportAPI.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") install(FILES ${PROJECT_SOURCE_DIR}/360SCVP.pc DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") - diff --git a/src/360SCVP/test/testI360SCVP.cpp b/src/360SCVP/test/testI360SCVP.cpp index 084e68f5..5326e53d 100644 --- a/src/360SCVP/test/testI360SCVP.cpp +++ b/src/360SCVP/test/testI360SCVP.cpp @@ -29,6 +29,10 @@ #include #include "../360SCVPAPI.h" +extern "C" { + #include "safestringlib/safe_mem_lib.h" +} + namespace{ class I360SCVPTest : public testing::Test { public: @@ -44,7 +48,7 @@ class I360SCVPTest : public testing::Test { frameHeightlow = 768; bufferlen = frameWidth * frameHeight * 3 / 2; bufferlenlow = frameWidthlow * frameHeightlow * 3 / 2; - memset((void*)¶m, 0, sizeof(param_360SCVP)); + memset_s((void*)¶m, sizeof(param_360SCVP), 0); pInputBuffer = new unsigned char[bufferlen]; pInputBufferlow = new unsigned char[bufferlenlow]; pOutputBuffer = new unsigned char[bufferlen]; @@ -126,6 +130,10 @@ TEST_F(I360SCVPTest, I360SCVPCreate_type1) param.paramViewPort.viewPortPitch = 0; param.paramViewPort.viewPortFOVH = 80; param.paramViewPort.viewPortFOVV = 80; + param.paramViewPort.paramVideoFP.cols = 1; + param.paramViewPort.paramVideoFP.rows = 1; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; void* pI360SCVP = I360SCVP_Init(¶m); bool notnull = (pI360SCVP != NULL); //EXPECT_TRUE(pI360SCVP != NULL); @@ -173,6 +181,10 @@ TEST_F(I360SCVPTest, ParseNAL_type1) param.paramViewPort.viewPortFOVV = 80; param.paramViewPort.tileNumCol = 6; param.paramViewPort.tileNumRow = 3; + param.paramViewPort.paramVideoFP.cols = 1; + param.paramViewPort.paramVideoFP.rows = 1; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; param.usedType = E_MERGE_AND_VIEWPORT; void* pI360SCVP = I360SCVP_Init(¶m); @@ -326,6 +338,10 @@ TEST_F(I360SCVPTest, GetParameter_PicInfo_type1) param.paramViewPort.tileNumCol = 6; param.paramViewPort.tileNumRow = 3; param.usedType = E_MERGE_AND_VIEWPORT; + param.paramViewPort.paramVideoFP.cols = 1; + param.paramViewPort.paramVideoFP.rows = 1; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; void* pI360SCVP = I360SCVP_Init(¶m); EXPECT_TRUE(pI360SCVP != NULL); if (!pI360SCVP) @@ -416,6 +432,11 @@ TEST_F(I360SCVPTest, SetParameter_SetViewport) paramViewPorInfo.viewPortFOVV = 80; paramViewPorInfo.tileNumCol = 6; paramViewPorInfo.tileNumRow = 3; + paramViewPorInfo.usageType = E_STREAM_STITCH_ONLY; + paramViewPorInfo.paramVideoFP.cols = 1; + paramViewPorInfo.paramVideoFP.rows = 1; + paramViewPorInfo.paramVideoFP.faces[0][0].idFace = 0; + paramViewPorInfo.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; ret = I360SCVP_SetParameter(pI360SCVP, ID_SCVP_PARAM_VIEWPORT, ¶mViewPorInfo); I360SCVP_unInit(pI360SCVP); EXPECT_TRUE(ret ==0); @@ -482,7 +503,7 @@ TEST_F(I360SCVPTest, GenerateRWPK) { while (num) { - memset(pRectRegionPackTmp, 0, sizeof(RectangularRegionWisePacking)); + memset_s(pRectRegionPackTmp, sizeof(RectangularRegionWisePacking), 0); pRectRegionPackTmp++; num--; } @@ -547,6 +568,11 @@ TEST_F(I360SCVPTest, SetViewportSEI) paramViewPorInfo.viewPortFOVV = 80; paramViewPorInfo.tileNumCol = 6; paramViewPorInfo.tileNumRow = 3; + paramViewPorInfo.usageType = E_STREAM_STITCH_ONLY; + paramViewPorInfo.paramVideoFP.cols = 1; + paramViewPorInfo.paramVideoFP.rows = 1; + paramViewPorInfo.paramVideoFP.faces[0][0].idFace = 0; + paramViewPorInfo.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; ret = I360SCVP_SetParameter(pI360SCVP, ID_SCVP_PARAM_VIEWPORT, ¶mViewPorInfo); EXPECT_TRUE(ret == 0); if (ret) { @@ -555,7 +581,7 @@ TEST_F(I360SCVPTest, SetViewportSEI) } pTiledBitstreamTotal = new param_oneStream_info[param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum]; - memset(pTiledBitstreamTotal, 0, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info)); + memset_s(pTiledBitstreamTotal, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info), 0); EXPECT_TRUE(pTiledBitstreamTotal != NULL); param_oneStream_info *ptemp = pTiledBitstreamTotal; @@ -567,7 +593,7 @@ TEST_F(I360SCVPTest, SetViewportSEI) ptemp->tilesHeightCount = 1; ptemp->tilesWidthCount = 1; ptemp->inputBufferLen = 4096; - memcpy(ptemp->pTiledBitstreamBuffer, pInputBuffer, 4096); + memcpy_s(ptemp->pTiledBitstreamBuffer, 4096, pInputBuffer, 4096); ptemp++; } } @@ -630,7 +656,7 @@ TEST_F(I360SCVPTest, SetRWPKSEI) { while (num) { - memset(pRectRegionPackTmp, 0, sizeof(RectangularRegionWisePacking)); + memset_s(pRectRegionPackTmp, sizeof(RectangularRegionWisePacking), 0); pRectRegionPackTmp++; num--; } @@ -653,6 +679,11 @@ TEST_F(I360SCVPTest, SetRWPKSEI) paramViewPorInfo.viewPortFOVV = 80; paramViewPorInfo.tileNumCol = 6; paramViewPorInfo.tileNumRow = 3; + paramViewPorInfo.usageType = E_STREAM_STITCH_ONLY; + paramViewPorInfo.paramVideoFP.cols = 1; + paramViewPorInfo.paramVideoFP.rows = 1; + paramViewPorInfo.paramVideoFP.faces[0][0].idFace = 0; + paramViewPorInfo.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; ret = I360SCVP_SetParameter(pI360SCVP, ID_SCVP_PARAM_VIEWPORT, ¶mViewPorInfo); EXPECT_TRUE(ret == 0); if (ret) @@ -662,7 +693,7 @@ TEST_F(I360SCVPTest, SetRWPKSEI) } pTiledBitstreamTotal = new param_oneStream_info[param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum]; - memset(pTiledBitstreamTotal, 0, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info)); + memset_s(pTiledBitstreamTotal, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info), 0); EXPECT_TRUE(pTiledBitstreamTotal != NULL); param_oneStream_info *ptemp = pTiledBitstreamTotal; @@ -674,7 +705,7 @@ TEST_F(I360SCVPTest, SetRWPKSEI) ptemp->tilesHeightCount = 1; ptemp->tilesWidthCount = 1; ptemp->inputBufferLen = 4096; - memcpy(ptemp->pTiledBitstreamBuffer, pInputBuffer, 4096); + memcpy_s(ptemp->pTiledBitstreamBuffer, 4096, pInputBuffer, 4096); ptemp++; } } @@ -740,6 +771,11 @@ TEST_F(I360SCVPTest, SetRotationSEI) paramViewPorInfo.viewPortFOVV = 80; paramViewPorInfo.tileNumCol = 6; paramViewPorInfo.tileNumRow = 3; + paramViewPorInfo.usageType = E_STREAM_STITCH_ONLY; + paramViewPorInfo.paramVideoFP.cols = 1; + paramViewPorInfo.paramVideoFP.rows = 1; + paramViewPorInfo.paramVideoFP.faces[0][0].idFace = 0; + paramViewPorInfo.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; ret = I360SCVP_SetParameter(pI360SCVP, ID_SCVP_PARAM_VIEWPORT, ¶mViewPorInfo); EXPECT_TRUE(ret == 0); if (ret) @@ -749,7 +785,7 @@ TEST_F(I360SCVPTest, SetRotationSEI) } pTiledBitstreamTotal = new param_oneStream_info[param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum]; - memset(pTiledBitstreamTotal, 0, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info)); + memset_s(pTiledBitstreamTotal, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info), 0); EXPECT_TRUE(pTiledBitstreamTotal != NULL); param_oneStream_info *ptemp = pTiledBitstreamTotal; @@ -761,7 +797,7 @@ TEST_F(I360SCVPTest, SetRotationSEI) ptemp->tilesHeightCount = 1; ptemp->tilesWidthCount = 1; ptemp->inputBufferLen = 4096; - memcpy(ptemp->pTiledBitstreamBuffer, pInputBuffer, 4096); + memcpy_s(ptemp->pTiledBitstreamBuffer, 4096, pInputBuffer, 4096); ptemp++; } } @@ -802,7 +838,7 @@ TEST_F(I360SCVPTest, SetFramePackingSEI) } FramePacking framepack; - memset(&framepack, 0, sizeof(FramePacking)); + memset_s(&framepack, sizeof(FramePacking), 0); framepack.frame0GridX = 10; framepack.frame0GridY = 10; framepack.frame1GridX = 100; @@ -827,6 +863,11 @@ TEST_F(I360SCVPTest, SetFramePackingSEI) paramViewPorInfo.viewPortFOVV = 80; paramViewPorInfo.tileNumCol = 6; paramViewPorInfo.tileNumRow = 3; + paramViewPorInfo.usageType = E_STREAM_STITCH_ONLY; + paramViewPorInfo.paramVideoFP.cols = 1; + paramViewPorInfo.paramVideoFP.rows = 1; + paramViewPorInfo.paramVideoFP.faces[0][0].idFace = 0; + paramViewPorInfo.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; ret = I360SCVP_SetParameter(pI360SCVP, ID_SCVP_PARAM_VIEWPORT, ¶mViewPorInfo); EXPECT_TRUE(ret == 0); if (ret) @@ -836,7 +877,7 @@ TEST_F(I360SCVPTest, SetFramePackingSEI) } pTiledBitstreamTotal = new param_oneStream_info[param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum]; - memset(pTiledBitstreamTotal, 0, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info)); + memset_s(pTiledBitstreamTotal, param.paramPicInfo.tileHeightNum*param.paramPicInfo.tileWidthNum * sizeof(param_oneStream_info), 0); EXPECT_TRUE(pTiledBitstreamTotal != NULL); param_oneStream_info *ptemp = pTiledBitstreamTotal; @@ -848,7 +889,7 @@ TEST_F(I360SCVPTest, SetFramePackingSEI) ptemp->tilesHeightCount = 1; ptemp->tilesWidthCount = 1; ptemp->inputBufferLen = 4096; - memcpy(ptemp->pTiledBitstreamBuffer, pInputBuffer, 4096); + memcpy_s(ptemp->pTiledBitstreamBuffer, 4096, pInputBuffer, 4096); ptemp++; } } @@ -887,6 +928,10 @@ TEST_F(I360SCVPTest, webrtcUsage) param.paramViewPort.viewPortFOVH = 80; param.paramViewPort.viewPortFOVV = 80; param.usedType = E_MERGE_AND_VIEWPORT; + param.paramViewPort.paramVideoFP.cols = 1; + param.paramViewPort.paramVideoFP.rows = 1; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; void* pI360SCVP = I360SCVP_Init(¶m); EXPECT_TRUE(pI360SCVP != NULL); if (!pI360SCVP) @@ -933,6 +978,10 @@ TEST_F(I360SCVPTest, parseRWPK) param.paramViewPort.viewPortFOVH = 80; param.paramViewPort.viewPortFOVV = 80; param.usedType = E_MERGE_AND_VIEWPORT; + param.paramViewPort.paramVideoFP.cols = 1; + param.paramViewPort.paramVideoFP.rows = 1; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; void* pI360SCVP = I360SCVP_Init(¶m); EXPECT_TRUE(pI360SCVP != NULL); if (!pI360SCVP) @@ -1004,4 +1053,161 @@ TEST_F(I360SCVPTest, parseRWPK) EXPECT_TRUE(ret == 0); } +/* CubeMap on Merge&Viewport mode is not able to use this input ERP coded stream * + * Comment this UT and will enable it after Cubemap supports Merge&Viewport * + * */ +#if 0 +TEST_F(I360SCVPTest, cubemapUsage) +{ + int ret = 0; + param.paramViewPort.faceWidth = 512 * 4; + param.paramViewPort.faceHeight = 512 * 4; + param.paramViewPort.geoTypeInput = EGeometryType(E_SVIDEO_CUBEMAP); + param.paramViewPort.viewportHeight = 960; + param.paramViewPort.viewportWidth = 960; + param.paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; + param.paramViewPort.tileNumCol = 4; + param.paramViewPort.tileNumRow = 4; + param.paramViewPort.viewPortYaw = -90; + param.paramViewPort.viewPortPitch = 0; + param.paramViewPort.viewPortFOVH = 80; + param.paramViewPort.viewPortFOVV = 80; + param.usedType = E_MERGE_AND_VIEWPORT; + param.paramViewPort.paramVideoFP.cols = 3; + param.paramViewPort.paramVideoFP.rows = 2; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 4; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + param.paramViewPort.paramVideoFP.faces[0][0].faceHeight = 512 * 4; + param.paramViewPort.paramVideoFP.faces[0][0].faceHeight = 512 * 4; + param.paramViewPort.paramVideoFP.faces[0][1].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][1].rotFace = NO_TRANSFORM; + param.paramViewPort.paramVideoFP.faces[0][2].idFace = 5; + param.paramViewPort.paramVideoFP.faces[0][2].rotFace = NO_TRANSFORM; + + param.paramViewPort.paramVideoFP.faces[1][0].idFace = 3; + param.paramViewPort.paramVideoFP.faces[1][0].rotFace = ROTATION_180_ANTICLOCKWISE; + param.paramViewPort.paramVideoFP.faces[1][1].idFace = 1; + param.paramViewPort.paramVideoFP.faces[1][1].rotFace = ROTATION_270_ANTICLOCKWISE; + param.paramViewPort.paramVideoFP.faces[1][2].idFace = 2; + param.paramViewPort.paramVideoFP.faces[1][2].rotFace = NO_TRANSFORM; + void* pI360SCVP = I360SCVP_Init(¶m); + EXPECT_TRUE(pI360SCVP != NULL); + if (!pI360SCVP) + { + I360SCVP_unInit(pI360SCVP); + return; + } + float raw = -90; + float pitch = 84; + + I360SCVP_setViewPort(pI360SCVP, raw, pitch); + + ret = I360SCVP_process(¶m, pI360SCVP); + I360SCVP_unInit(pI360SCVP); + EXPECT_TRUE(ret == 0); + EXPECT_TRUE(param.outputBitstreamLen > 0); +} +#endif + +TEST_F(I360SCVPTest, cubemapGetTilesInViewport) +{ + int ret = 0; + param.paramViewPort.faceWidth = 512 * 4; + param.paramViewPort.faceHeight = 512 * 4; + param.paramViewPort.geoTypeInput = EGeometryType(E_SVIDEO_CUBEMAP); + param.paramViewPort.viewportHeight = 960; + param.paramViewPort.viewportWidth = 960; + param.paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; + param.paramViewPort.tileNumCol = 4; + param.paramViewPort.tileNumRow = 4; + param.paramViewPort.viewPortYaw = -90; + param.paramViewPort.viewPortPitch = 0; + param.paramViewPort.viewPortFOVH = 80; + param.paramViewPort.viewPortFOVV = 80; + param.usedType = E_VIEWPORT_ONLY; + param.paramViewPort.paramVideoFP.cols = 3; + param.paramViewPort.paramVideoFP.rows = 2; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 4; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + param.paramViewPort.paramVideoFP.faces[0][0].faceHeight = 512 * 4; + param.paramViewPort.paramVideoFP.faces[0][0].faceHeight = 512 * 4; + param.paramViewPort.paramVideoFP.faces[0][1].idFace = 0; + param.paramViewPort.paramVideoFP.faces[0][1].rotFace = NO_TRANSFORM; + param.paramViewPort.paramVideoFP.faces[0][2].idFace = 5; + param.paramViewPort.paramVideoFP.faces[0][2].rotFace = NO_TRANSFORM; + + param.paramViewPort.paramVideoFP.faces[1][0].idFace = 3; + param.paramViewPort.paramVideoFP.faces[1][0].rotFace = ROTATION_180_ANTICLOCKWISE; + param.paramViewPort.paramVideoFP.faces[1][1].idFace = 1; + param.paramViewPort.paramVideoFP.faces[1][1].rotFace = ROTATION_270_ANTICLOCKWISE; + param.paramViewPort.paramVideoFP.faces[1][2].idFace = 2; + param.paramViewPort.paramVideoFP.faces[1][2].rotFace = NO_TRANSFORM; + void* pI360SCVP = I360SCVP_Init(¶m); + EXPECT_TRUE(pI360SCVP != NULL); + if (!pI360SCVP) + { + I360SCVP_unInit(pI360SCVP); + return; + } + float raw = 0; + float pitch = 60; + Param_ViewportOutput paramViewportOutput; + TileDef* tilesInViewport = new TileDef[1024]; + + I360SCVP_setViewPort(pI360SCVP, raw, pitch); + I360SCVP_getTilesInViewport(tilesInViewport, ¶mViewportOutput, pI360SCVP); + ret = I360SCVP_process(¶m, pI360SCVP); + + I360SCVP_unInit(pI360SCVP); + EXPECT_TRUE(ret == 0); +} + +TEST_F(I360SCVPTest, ERPSelectViewportTiles) +{ + int32_t tileNum_fast, tileNum_legacy; + TileDef pOutTile[1024]; + Param_ViewportOutput paramViewportOutput; + + param.paramViewPort.faceWidth = 7680; + param.paramViewPort.faceHeight = 3840; + param.paramViewPort.geoTypeInput = EGeometryType(E_SVIDEO_EQUIRECT); + param.paramViewPort.viewportHeight = 1024; + param.paramViewPort.viewportWidth = 1024; + param.paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; + param.paramViewPort.tileNumCol = 20; + param.paramViewPort.tileNumRow = 10; + param.paramViewPort.viewPortYaw = 0; + param.paramViewPort.viewPortPitch = 0; + param.paramViewPort.viewPortFOVH = 80; + param.paramViewPort.viewPortFOVV = 90; + param.usedType = E_VIEWPORT_ONLY; + param.paramViewPort.paramVideoFP.cols = 1; + param.paramViewPort.paramVideoFP.rows = 1; + param.paramViewPort.paramVideoFP.faces[0][0].faceWidth = param.paramViewPort.faceWidth; + param.paramViewPort.paramVideoFP.faces[0][0].faceHeight = param.paramViewPort.faceHeight; + param.paramViewPort.paramVideoFP.faces[0][0].idFace = 1; + param.paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + + void* pI360SCVP = I360SCVP_Init(¶m); + EXPECT_TRUE(pI360SCVP != NULL); + if (!pI360SCVP) + { + I360SCVP_unInit(pI360SCVP); + printf( "Init 360SCVP failure: pI360SCVP is NULL!!!\n"); + return; + } + float yaw = param.paramViewPort.viewPortYaw ; + float pitch = param.paramViewPort.viewPortPitch; + + tileNum_fast = I360SCVP_getTilesInViewport(pOutTile, ¶mViewportOutput, pI360SCVP); + + I360SCVP_process(¶m, pI360SCVP); + I360SCVP_SetParameter(pI360SCVP, ID_SCVP_PARAM_VIEWPORT, ¶m.paramViewPort); + tileNum_legacy = I360SCVP_GetTilesByLegacyWay(&pOutTile[tileNum_fast], pI360SCVP); + + I360SCVP_unInit(pI360SCVP); + EXPECT_TRUE(tileNum_fast >= 0); + EXPECT_TRUE(tileNum_legacy >= 0); +} + } diff --git a/src/CHANGELOG.md b/src/CHANGELOG.md index b2596374..d3439539 100644 --- a/src/CHANGELOG.md +++ b/src/CHANGELOG.md @@ -1,4 +1,75 @@ # **Changelog** +--- +## [1.4.0] - 2021-1-14 +**Features & bug fix:** +- OMAF Packing Library + + Support packing for cube-map projection in extractor track mode + + Support both fixed and dynamic sub-picture resolutions in extractor track mode + + Support packing for AAC audio stream + + Support packing for planar projection in late-binding mode + + Plugin mode to support customized media stream process + + Support external log callback + + bug fix: memory leak, hang / crash in some condition + +- OMAF Dash Access Library + + Support cube-map projection in extractor track mode + + Support maximum decodable picture width and height limitation in late-binding mode + + Support DASH de-packing for AAC audio stream segments + + Support planar projection in late-binding mode + + bug fix: memory leak, time out, tiles stitching disorder in some condition + +- 360SCVP (Stream Concatenation and Viewport Processing) Library + + code refactor: add plugin definition for tile selection + + optimization for tile selection to improve performance, accuracy and efficiency + + Support external log callback + +- Reference OMAF Player + + Android Player: with ERP and Cube-map support + + Android platform: extend DashAccess JNI library with MediaCodec decoder integrated. + + Linux Player: Support WebRTC source with multiple video stream decoding, rendering; and RTCP FOV feedback + + Linux Player: Support Planar Video + + Code refactor + +- FFmpeg Plugins & Encoder Library + + Encoder Library: Bug fix for memory leak + + FFmpeg Plugins: add option for external log callback and log level set + + FFmpeg Plugins: add option for fixed/dynamic sub-picture resolution for extractor track mode + + FFmpeg Plugins: add audio stream input process + + FFmpeg Plugins: add option for planar projection support + + FFmpeg Plugins: add option for customized packing plugin and media stream process plugin set + +--- +## [1.2.0] - 2020-8-14 +**Features & bug fix:** +- OMAF Packing Library + + Support late-binding mode, option for extractor track generation + + Support packing for cube-map projection in late-binding mode + + Optimize tile partition for extractor generation + + Plugin mode to support customized packing method + + bug fix: memory leak, hang / crash in some condition + +- OMAF Dash Access Library + + Support late-binding mode: tile-selection and bit-stream rewriting in client side + + Support cube-map projection in late-binding mode + + Enable NDK cross-compiling, Add JNI support for Dash Access API + + Optimization of downloading and segmentation parsing for fault-tolerance and stability + + Plugin mode to support customized FOV sequence operation + +- 360SCVP (Stream Concatenation and Viewport Processing) Library + + Support Cube-map projection: tile processing and viewport processing + + optimization for tile selection to improve performance and accuracy + +- Reference OMAF Player + + Support Cube-map projection + + Support multiple video streams decoding and tile rendering + + Code refactor + +- FFmpeg Plugins & Encoder Library + + Encoder Library: Enable local session support to improve performance + + Encoder Library: Bug fix for memory leak, resource release, share memory usage, call mechanism, etc. + + FFmpeg Plugins: add option for Cube-map projection support + + FFmpeg Plugins: add option for late-binding support + --- ## [1.0.0] - 2020-01-13 **Features:** @@ -16,7 +87,7 @@ + Support OMAF-compliant metadata parsing; + Support HTTPS/HTTP; -- 360 Video Processing Library +- 360SCVP (Stream Concatenation and Viewport Processing) Library + Provide a unify interface to process tile-based HEVC bitstream processing and viewport-based Content Processing; + Support HEVC bitstream processing: VPS/SPS/PPS parsing and generating, 360Video-relative SEI Generating and parsing, HEVC tile-based bitstream aggregation; + Support Viewport generation, viewport-based tile selection and extractor selection based on content coverage; @@ -31,8 +102,8 @@ - FFMPEG Plugins + Demux plugin with OMAF Dash accessing support; + Multiplexing plugin with OMAF Packing library support; + + SVT encoder support **Know Issues:** -- Cube-map projection format hasn't been support yet; - Audio segmentation hasn't been support yet; --- diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 995d0287..20af549c 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1,9 +1,23 @@ CMAKE_MINIMUM_REQUIRED(VERSION 2.12) PROJECT(immersive-media) +SET(CMAKE_SKIP_INSTALL_ALL_DEPENDENCY TRUE) + +OPTION(USE_TRACE + "Use trace" + OFF) + +IF(USE_TRACE) + ADD_DEFINITIONS("-D_USE_TRACE_") +ENDIF() + +IF(NOT DE_FLAG) + SET(DE_FLAG false) +ENDIF() IF(NOT CMAKE_BUILD_TYPE) SET(CMAKE_BUILD_TYPE Release) ENDIF() + MESSAGE("Build type: " ${CMAKE_BUILD_TYPE}) IF(NOT TARGET) @@ -11,52 +25,123 @@ IF(NOT TARGET) ENDIF() IF(${TARGET} STREQUAL "server") - SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -lpthread -g") - SET(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -lpthread") + SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} \ + -std=c++11 -lpthread -lsafestring_shared \ + -D_GLIBCXX_USE_CXX11_ABI=0") + SET(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} \ + -z noexecstack -z relro -z now \ + -fstack-protector-strong -fPIE -fPIC -pie -O2 \ + -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security \ + -Wl,-S -Wall -std=c++11 -lpthread -lsafestring_shared \ + -D_GLIBCXX_USE_CXX11_ABI=0") SET(FFMPEG_DIR "ffmpeg") SET(FFMPEG_FILE "ffmpeg.txt") - SET(FFMPEG_FLAG --prefix=/usr --libdir=/usr/lib --enable-static --disable-shared --enable-gpl --enable-nonfree --disable-optimizations --disable-vaapi --enable-libDistributedEncoder --enable-libVROmafPacking) - include_directories(${CMAKE_CURRENT_SOURCE_DIR}/360SCVP) - include_directories(${CMAKE_CURRENT_SOURCE_DIR}/VROmafPacking) + SET(FFMPEG_FLAG --prefix=/usr --libdir=/usr/lib --enable-static + --disable-shared --enable-gpl --enable-nonfree + --disable-optimizations --disable-vaapi --enable-libopenhevc + --enable-libDistributedEncoder --enable-libVROmafPacking) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/360SCVP) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/VROmafPacking) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/plugins/OMAFPacking_Plugin) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/plugins/StreamProcess_Plugin) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/plugins/StreamProcess_Plugin/VideoStream_Plugin) ADD_SUBDIRECTORY(360SCVP) + ADD_SUBDIRECTORY(isolib) + IF(USE_TRACE) + ADD_SUBDIRECTORY(trace) + ENDIF() + ADD_SUBDIRECTORY(plugins/OMAFPacking_Plugin) + ADD_SUBDIRECTORY(plugins/StreamProcess_Plugin) + IF(${DE_FLAG} STREQUAL "true") + ADD_SUBDIRECTORY(distributed_encoder) + ENDIF() ADD_SUBDIRECTORY(VROmafPacking) - ADD_CUSTOM_COMMAND( - OUTPUT ${FFMPEG_FILE} - COMMAND export PKG_CONFIG_PATH=${CMAKE_CURRENT_SOURCE_DIR}/VROmafPacking/:/usr/local/lib/pkgconfig/:$PKG_CONFIG_PATH && mkdir -p ${FFMPEG_DIR} && cd ${FFMPEG_DIR} && ${CMAKE_CURRENT_SOURCE_DIR}/FFmpeg/configure ${FFMPEG_FLAG} --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/VROmafPacking/" --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/360SCVP/" --extra-ldflags="-L${CMAKE_BINARY_DIR}/VROmafPacking/" --extra-ldflags="-L${CMAKE_BINARY_DIR}/360SCVP" && make -j100 - COMMENT "Building ffmpeg..." - VERBATIM - ) ADD_CUSTOM_TARGET(ffmpeg ALL COMMAND echo "Target ffmpeg" - DEPENDS ${FFMPEG_FILE} - VERBATIM - ) + VERBATIM) + ADD_CUSTOM_COMMAND(TARGET ffmpeg + COMMAND export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/:${CMAKE_CURRENT_SOURCE_DIR}/distributed_encoder/main_encoder/:${CMAKE_CURRENT_SOURCE_DIR}/VROmafPacking/:$PKG_CONFIG_PATH && + mkdir -p ${FFMPEG_DIR} && cd ${FFMPEG_DIR} && + ${CMAKE_CURRENT_SOURCE_DIR}/FFmpeg/configure ${FFMPEG_FLAG} + --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/VROmafPacking/" + --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/360SCVP/" + --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/distributed_encoder/main_encoder/" + --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/distributed_encoder/util/" + --extra-cflags="-I${CMAKE_CURRENT_SOURCE_DIR}/utils/" + --extra-ldflags="-L${CMAKE_BINARY_DIR}/VROmafPacking/" + --extra-ldflags="-L${CMAKE_BINARY_DIR}/360SCVP/" + --extra-ldflags="-L${CMAKE_BINARY_DIR}/distributed_encoder/main_encoder/" && + make -j $(nproc) + COMMENT "Building ffmpeg..." + VERBATIM) + ADD_DEPENDENCIES(ffmpeg VROmafPacking) ELSEIF(${TARGET} STREQUAL "client") - SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -I${CMAKE_CURRENT_SOURCE_DIR}/360SCVP -g") - SET(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -I${CMAKE_CURRENT_SOURCE_DIR}/360SCVP") + SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} \ + -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 \ + -I${CMAKE_CURRENT_SOURCE_DIR}/360SCVP") + SET(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} \ + -z noexecstack -z relro -z now \ + -fstack-protector-strong -fPIE -fPIC -pie -O2 \ + -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security \ + -Wl,-S -Wall -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 \ + -I${CMAKE_CURRENT_SOURCE_DIR}/360SCVP") SET(UTILS_DIR "utils") - SET(PLAYER_DIR "player") - SET(PLAYER_FILE "player.txt") + SET(PLAYER_LIB_DIR "player/player_lib") + SET(PLAYER_LIB_FLAGS "-DCMAKE_CXX_FLAGS= \ + -L${CMAKE_BINARY_DIR}/OmafDashAccess \ + -L${CMAKE_BINARY_DIR}/360SCVP") + SET(PLAYER_MEDIASOURCE_DIR "player/player_lib/MediaSource") + SET(PLAYER_APP_DIR "player/app") + SET(PLAYER_APP_FLAGS "-DCMAKE_CXX_FLAGS= \ + -L${CMAKE_BINARY_DIR}/OmafDashAccess \ + -L${CMAKE_BINARY_DIR}/360SCVP \ + -L${CMAKE_BINARY_DIR}/player/player_lib") + SET(PLAYER_COMMON_DIR "player/player_lib/Common") ADD_SUBDIRECTORY(360SCVP) + ADD_SUBDIRECTORY(isolib) + IF(USE_TRACE) + ADD_SUBDIRECTORY(trace) + SET(PLAYER_LIB_FLAGS "${PLAYER_LIB_FLAGS} -L${CMAKE_BINARY_DIR}/trace") + SET(PLAYER_APP_FLAGS "${PLAYER_APP_FLAGS} -L${CMAKE_BINARY_DIR}/trace") + ENDIF() ADD_SUBDIRECTORY(OmafDashAccess) - ADD_CUSTOM_COMMAND( - OUTPUT ${PLAYER_FILE} - COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/data_type.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} - COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/ns_def.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} - COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/error.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} - COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/OmafStructure.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} - COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/360SCVP/360SCVPAPI.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} - COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/OmafDashAccess/OmafDashAccessApi.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} - COMMAND mkdir -p ${PLAYER_DIR} && cd ${PLAYER_DIR} && cmake -DUSE_OMAF=ON -DUSE_WEBRTC=OFF "-DCMAKE_CXX_FLAGS=-L${CMAKE_BINARY_DIR}/OmafDashAccess -L${CMAKE_BINARY_DIR}/360SCVP" ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_DIR} && make - COMMENT "Building player..." - VERBATIM - ) + ADD_CUSTOM_TARGET(player ALL COMMAND echo "Target player" - DEPENDS ${PLAYER_FILE} - VERBATIM - ) + VERBATIM) + ADD_CUSTOM_COMMAND(TARGET player + COMMAND export LD_LIBRARY_PATH=${CMAKE_BINARY_DIR}/OmafDashAccess:${CMAKE_BINARY_DIR}/360SCVP:$LD_LIBRARY_PATH && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/ns_def.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/error.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/OmafStructure.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/OmafDashAccess/OmafDashAccessApi.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_MEDIASOURCE_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/data_type.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/data_type.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_COMMON_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/pose.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/pose.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_MEDIASOURCE_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/${UTILS_DIR}/pose.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_COMMON_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/360SCVP/360SCVPAPI.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/360SCVP/360SCVPAPI.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_MEDIASOURCE_DIR} && + cp ${CMAKE_CURRENT_SOURCE_DIR}/360SCVP/360SCVPAPI.h ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_COMMON_DIR} && + mkdir -p ${PLAYER_LIB_DIR} && cd ${PLAYER_LIB_DIR} && + cmake -DUSE_OMAF=ON + -DUSE_WEBRTC=OFF + -DLINUX_OS=ON + -DUSE_TRACE=${USE_TRACE} + ${PLAYER_LIB_FLAGS} + ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_LIB_DIR} && + make -j $(nproc) && + sudo make install && + mkdir -p ../../${PLAYER_APP_DIR} && cd ../../${PLAYER_APP_DIR} && + cmake -DLINUX_OS=ON + -DUSE_TRACE=${USE_TRACE} + ${PLAYER_APP_FLAGS} + ${CMAKE_CURRENT_SOURCE_DIR}/${PLAYER_APP_DIR} && + make -j $(nproc) + COMMENT "Building player..." + VERBATIM) + ADD_DEPENDENCIES(player OmafDashAccess) ELSE() diff --git a/src/JNIOmafDashAccess/OmafAccess.iml b/src/JNIOmafDashAccess/OmafAccess.iml new file mode 100644 index 00000000..ad88013b --- /dev/null +++ b/src/JNIOmafDashAccess/OmafAccess.iml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/README.md b/src/JNIOmafDashAccess/README.md new file mode 100644 index 00000000..e7e343c3 --- /dev/null +++ b/src/JNIOmafDashAccess/README.md @@ -0,0 +1,44 @@ +# Guidance of OmafDashAccess JNI project + +------ + +### 1. How to get libOmafDashAccess and its dependent libraries using NDK compile +```bash +git clone from the repo +cd src/external +./prebuild_android.sh +./make_android.sh +``` +And then you can get libs at the following path: +| LIB name | path | +| -------- | -----: | +| libglog.so | src/build/external/android/glog/build/ | +| libssl.so | src/build/external/android/openssl-output/lib/ | +| libcrypto.so | src/build/external/android/openssl-output/lib/ | +| libcurl.so | src/build/external/android/curl-output/arm64-v8a/lib/ | +| lib360SCVP.so | src/build/android/360SCVP/ | +| libdashparser.a | src/build/android/isolib/dash_parser/ | +| libOmafDashAccess.so | src/build/android/OmafDashAccess/ | + +Copy libs to the path "src/JNIOmafDashAccess/omafdashaccesslibrary/libs/arm64-v8a/". +### 2. Prerequistes +To build the whole JNI project, there are some prerequistes must be ready. +```bash +install Android studio 3.5.1 +JRE: 1.8.0_202-release-1483-b49-5587405 amd64 +JVM: OpenJDK 64-Bit Server VM by JetBrains s.r.o + +Android Gradle Plugion version = 3.5.1 +Android Gradle version = 5.4.1 +Compile SDK version = API 29: Android 10.0(Q) +Build Tools verion = 29.0.3 + +arm processer: arm64-v8a +``` + +### 3. How to run JNI project + +> 1. Once the required libs and the tools mentioned above are installed, you can open the JNIOmafDashAccess project with Android Studio. +> 2. Check the android cellphone is recognized as running device. +> 3. Click 'green triangle' run button. +> 4. After the program finished, you can check a test HEVC file located at the cache path on the cellphone. diff --git a/src/JNIOmafDashAccess/app/app.iml b/src/JNIOmafDashAccess/app/app.iml new file mode 100644 index 00000000..e5820b17 --- /dev/null +++ b/src/JNIOmafDashAccess/app/app.iml @@ -0,0 +1,141 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/app/build.gradle b/src/JNIOmafDashAccess/app/build.gradle new file mode 100644 index 00000000..912ca918 --- /dev/null +++ b/src/JNIOmafDashAccess/app/build.gradle @@ -0,0 +1,31 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 29 + buildToolsVersion "29.0.3" + defaultConfig { + applicationId "com.example.myapplication" + minSdkVersion 15 + targetSdkVersion 29 + versionCode 1 + versionName "1.0" + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation 'androidx.appcompat:appcompat:1.0.2' + implementation 'androidx.constraintlayout:constraintlayout:1.1.3' + testImplementation 'junit:junit:4.12' + implementation files('libs/jna.jar') + androidTestImplementation 'androidx.test.ext:junit:1.1.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' + androidTestImplementation project(path: ':omafdashaccesslibrary') +} diff --git a/src/JNIOmafDashAccess/app/libs/jna.jar b/src/JNIOmafDashAccess/app/libs/jna.jar new file mode 100644 index 00000000..af5dd080 Binary files /dev/null and b/src/JNIOmafDashAccess/app/libs/jna.jar differ diff --git a/src/JNIOmafDashAccess/app/proguard-rules.pro b/src/JNIOmafDashAccess/app/proguard-rules.pro new file mode 100644 index 00000000..f1b42451 --- /dev/null +++ b/src/JNIOmafDashAccess/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/src/JNIOmafDashAccess/app/src/androidTest/java/com/example/myapplication/ExampleInstrumentedTest.java b/src/JNIOmafDashAccess/app/src/androidTest/java/com/example/myapplication/ExampleInstrumentedTest.java new file mode 100644 index 00000000..c98ece2c --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/androidTest/java/com/example/myapplication/ExampleInstrumentedTest.java @@ -0,0 +1,38 @@ +package com.example.myapplication; + +import android.content.Context; +import android.os.Environment; +import android.util.Log; + +import androidx.test.platform.app.InstrumentationRegistry; +import androidx.test.ext.junit.runners.AndroidJUnit4; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.junit.Assert.*; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class ExampleInstrumentedTest { + @Test + public void useAppContext() { + // Context of the app under test. + Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); + + assertEquals("com.example.myapplication", appContext.getPackageName()); + } +} diff --git a/src/JNIOmafDashAccess/app/src/androidTest/java/com/example/myapplication/UnitTestOmafDashAccess.java b/src/JNIOmafDashAccess/app/src/androidTest/java/com/example/myapplication/UnitTestOmafDashAccess.java new file mode 100644 index 00000000..7f889beb --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/androidTest/java/com/example/myapplication/UnitTestOmafDashAccess.java @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.example.myapplication; + +import androidx.test.ext.junit.runners.AndroidJUnit4; + +import com.example.omafdashaccesslibrary.JnaOmafAccess; +import com.example.omafdashaccesslibrary.OmafAccess; +import com.sun.jna.ptr.IntByReference; +import com.sun.jna.ptr.LongByReference; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; + +import static com.example.omafdashaccesslibrary.JnaOmafAccess.SourceType.MultiResSource; +import static org.junit.Assert.assertEquals; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class UnitTestOmafDashAccess { + //set up initial parameters + private final String url_static = "http://192.168.43.166:8080/4k_small_tiles_30M/Test.mpd"; + private final String url_static_https = "https://192.168.43.166:443/4k_small_tiles_30M/Test.mpd"; + private final String cache_path = "sdcard/Android/data/tmp/"; + + + + @Test + public void sampleOmafDashAccessAPI() { + //1. new OmafAccess object + int source_type = MultiResSource; + boolean enable_extractor = false; + //http proxy + JnaOmafAccess._omafHttpProxy.ByValue proxy = new JnaOmafAccess._omafHttpProxy.ByValue(); + proxy.http_proxy = null; + proxy.https_proxy = null; + proxy.no_proxy = null; + proxy.proxy_user = null; + proxy.proxy_wd = null; + //http params + int ssl_verify_host = 0; + int ssl_verify_peer = 0; + long conn_timeout = -1; + int retry_times = 3; + long total_timeout = -1; + JnaOmafAccess._omafHttpParams.ByValue http_params = new JnaOmafAccess._omafHttpParams.ByValue(); + http_params.conn_timeout = conn_timeout; + http_params.total_timeout = total_timeout; + http_params.retry_times = retry_times; + http_params.ssl_verify_host = ssl_verify_host; + http_params.ssl_verify_peer = ssl_verify_peer; + long max_parallel_transfers = 256; + int segment_open_timeout_ms = 3000; + //statistic params + int statistic_enable = 0; + int window_size_ms = 10000; + JnaOmafAccess._omafStatisticsParams.ByValue statistic_params = new JnaOmafAccess._omafStatisticsParams.ByValue(); + statistic_params.window_size_ms = window_size_ms; + statistic_params.enable = statistic_enable; + //synchronizer_params + int synchronizer_enable = 0; + int segment_range_size = 20; + JnaOmafAccess._omafSynchronizerParams.ByValue synchronizer_params = new JnaOmafAccess._omafSynchronizerParams.ByValue(); + synchronizer_params.segment_range_size = segment_range_size; + synchronizer_params.enable = synchronizer_enable; + //predictor params + JnaOmafAccess._omafPredictorParams.ByValue predictor_params = new JnaOmafAccess._omafPredictorParams.ByValue(); + predictor_params.libpath = ""; + predictor_params.name = ""; + predictor_params.enable = 0; + JnaOmafAccess._omafDashParams.ByValue omaf_params = new JnaOmafAccess._omafDashParams.ByValue(); + omaf_params.proxy = proxy; + omaf_params.http_params = http_params; + omaf_params.statistic_params = statistic_params; + omaf_params.synchronizer_params = synchronizer_params; + omaf_params.predictor_params = predictor_params; + omaf_params.max_parallel_transfers = max_parallel_transfers; + omaf_params.segment_open_timeout_ms = segment_open_timeout_ms; + OmafAccess omafAccess = new OmafAccess(url_static, cache_path, source_type, enable_extractor, omaf_params); + //2. initialize + int ret = 0; + ret = omafAccess.Initialize(); + assertEquals(ret, 0); + //3. set headinfo + JnaOmafAccess.HEADPOSE.ByReference pose = new JnaOmafAccess.HEADPOSE.ByReference(); + JnaOmafAccess.HEADSETINFO clientInfo = new JnaOmafAccess.HEADSETINFO(pose, 80, 80, 960, 960); + ret = omafAccess.SetupHeadSetInfo(clientInfo); + assertEquals(ret, 0); + //4. open media + ret = omafAccess.OpenMedia(false, "", ""); // -1 + assertEquals(ret, 0); + //5. get media information + JnaOmafAccess.DASHMEDIAINFO info = new JnaOmafAccess.DASHMEDIAINFO(); + ret = omafAccess.GetMediaInfo(info); + assertEquals(ret, 0); + try { + //6. test hevc file output + int stream_id = 0; + File file1 = new File(cache_path + "frame300high.h265"); + if (file1.exists()) { + file1.delete(); + } + file1.createNewFile(); + FileOutputStream outStream1 = null; + outStream1 = new FileOutputStream(file1); + + File file2 = new File(cache_path + "frame300low.h265"); + if (file2.exists()) { + file2.delete(); + } + file2.createNewFile(); + FileOutputStream outStream2 = null; + outStream2 = new FileOutputStream(file2); + //7. get packets + byte needHeader = 1; + int frame_count = 0; + while (frame_count < 300) { + JnaOmafAccess.DASHPACKET[] dashPackets = new JnaOmafAccess.DASHPACKET[16]; + IntByReference size = new IntByReference(); + LongByReference pts = new LongByReference(); + byte clearbuf = 0; + ret = omafAccess.GetPacket(stream_id, dashPackets, size, pts, needHeader, clearbuf); + if (ret == 0 && dashPackets[0].buf != null && dashPackets[0].size != 0 && size.getValue() != 0) { + ByteBuffer byteBuffer1 = dashPackets[0].buf.getByteBuffer(0, dashPackets[0].size); + byte[] bytes1 = new byte[byteBuffer1.remaining()]; + byteBuffer1.get(bytes1, 0, bytes1.length); + outStream1.write(bytes1); + frame_count++; + } + if (ret == 0 && dashPackets[1].buf != null && dashPackets[1].size != 0 && size.getValue() != 0) { + ByteBuffer byteBuffer2 = dashPackets[1].buf.getByteBuffer(0, dashPackets[1].size); + byte[] bytes2 = new byte[byteBuffer2.remaining()]; + byteBuffer2.get(bytes2, 0, bytes2.length); + outStream2.write(bytes2); + } + } + outStream1.close(); + outStream2.close(); + omafAccess.CloseMedia(); + omafAccess.Close(); + }catch (IOException e) { + e.printStackTrace(); + } + } + +} diff --git a/src/JNIOmafDashAccess/app/src/main/AndroidManifest.xml b/src/JNIOmafDashAccess/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..c6d91f89 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/AndroidManifest.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/app/src/main/java/com/example/myapplication/MainActivity.java b/src/JNIOmafDashAccess/app/src/main/java/com/example/myapplication/MainActivity.java new file mode 100644 index 00000000..ca37a8a5 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/java/com/example/myapplication/MainActivity.java @@ -0,0 +1,14 @@ +package com.example.myapplication; + +import androidx.appcompat.app.AppCompatActivity; + +import android.os.Bundle; + +public class MainActivity extends AppCompatActivity { + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + } +} diff --git a/src/JNIOmafDashAccess/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/src/JNIOmafDashAccess/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 00000000..1f6bb290 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + diff --git a/src/JNIOmafDashAccess/app/src/main/res/drawable/ic_launcher_background.xml b/src/JNIOmafDashAccess/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 00000000..0d025f9b --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/JNIOmafDashAccess/app/src/main/res/layout/activity_main.xml b/src/JNIOmafDashAccess/app/src/main/res/layout/activity_main.xml new file mode 100644 index 00000000..4fc24441 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,18 @@ + + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/src/JNIOmafDashAccess/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 00000000..eca70cfe --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/src/JNIOmafDashAccess/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml new file mode 100644 index 00000000..eca70cfe --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-hdpi/ic_launcher.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 00000000..898f3ed5 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 00000000..dffca360 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-mdpi/ic_launcher.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 00000000..64ba76f7 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 00000000..dae5e082 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 00000000..e5ed4659 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 00000000..14ed0af3 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 00000000..b0907cac Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 00000000..d8ae0315 Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 00000000..2c18de9e Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 00000000..beed3cdd Binary files /dev/null and b/src/JNIOmafDashAccess/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/src/JNIOmafDashAccess/app/src/main/res/values/colors.xml b/src/JNIOmafDashAccess/app/src/main/res/values/colors.xml new file mode 100644 index 00000000..69b22338 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #008577 + #00574B + #D81B60 + diff --git a/src/JNIOmafDashAccess/app/src/main/res/values/strings.xml b/src/JNIOmafDashAccess/app/src/main/res/values/strings.xml new file mode 100644 index 00000000..efd30732 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + My Application + diff --git a/src/JNIOmafDashAccess/app/src/main/res/values/styles.xml b/src/JNIOmafDashAccess/app/src/main/res/values/styles.xml new file mode 100644 index 00000000..5885930d --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/main/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/src/JNIOmafDashAccess/app/src/test/java/com/example/myapplication/ExampleUnitTest.java b/src/JNIOmafDashAccess/app/src/test/java/com/example/myapplication/ExampleUnitTest.java new file mode 100644 index 00000000..bf43ee59 --- /dev/null +++ b/src/JNIOmafDashAccess/app/src/test/java/com/example/myapplication/ExampleUnitTest.java @@ -0,0 +1,17 @@ +package com.example.myapplication; + +import org.junit.Test; + +import static org.junit.Assert.*; + +/** + * Example local unit test, which will execute on the development machine (host). + * + * @see Testing documentation + */ +public class ExampleUnitTest { + @Test + public void addition_isCorrect() { + assertEquals(4, 2 + 2); + } +} \ No newline at end of file diff --git a/src/JNIOmafDashAccess/build.gradle b/src/JNIOmafDashAccess/build.gradle new file mode 100644 index 00000000..05d53ae2 --- /dev/null +++ b/src/JNIOmafDashAccess/build.gradle @@ -0,0 +1,27 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + google() + jcenter() + + } + dependencies { + classpath 'com.android.tools.build:gradle:3.5.1' + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + jcenter() + + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/src/JNIOmafDashAccess/gradle.properties b/src/JNIOmafDashAccess/gradle.properties new file mode 100644 index 00000000..c73d2393 --- /dev/null +++ b/src/JNIOmafDashAccess/gradle.properties @@ -0,0 +1,19 @@ +# Project-wide Gradle settings. +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx1536m +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true +# AndroidX package structure to make it clearer which packages are bundled with the +# Android operating system, and which are packaged with your app's APK +# https://developer.android.com/topic/libraries/support-library/androidx-rn +android.useAndroidX=true +# Automatically convert third-party libraries to use AndroidX +android.enableJetifier=true diff --git a/src/JNIOmafDashAccess/gradle/wrapper/gradle-wrapper.jar b/src/JNIOmafDashAccess/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000..f6b961fd Binary files /dev/null and b/src/JNIOmafDashAccess/gradle/wrapper/gradle-wrapper.jar differ diff --git a/src/JNIOmafDashAccess/gradle/wrapper/gradle-wrapper.properties b/src/JNIOmafDashAccess/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..48e5452c --- /dev/null +++ b/src/JNIOmafDashAccess/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Fri Feb 28 10:04:19 CST 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip diff --git a/src/JNIOmafDashAccess/gradlew b/src/JNIOmafDashAccess/gradlew new file mode 100755 index 00000000..cccdd3d5 --- /dev/null +++ b/src/JNIOmafDashAccess/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/src/JNIOmafDashAccess/gradlew.bat b/src/JNIOmafDashAccess/gradlew.bat new file mode 100644 index 00000000..f9553162 --- /dev/null +++ b/src/JNIOmafDashAccess/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/src/JNIOmafDashAccess/local.properties b/src/JNIOmafDashAccess/local.properties new file mode 100644 index 00000000..fe5641c3 --- /dev/null +++ b/src/JNIOmafDashAccess/local.properties @@ -0,0 +1,8 @@ +## This file must *NOT* be checked into Version Control Systems, +# as it contains information specific to your local configuration. +# +# Location of the SDK. This is only used by Gradle. +# For customization when using a Version Control System, please read the +# header note. +#Wed Mar 11 13:25:15 CST 2020 +sdk.dir=/home/media/Downloads/android-sdk-linux diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/build.gradle b/src/JNIOmafDashAccess/omafdashaccesslibrary/build.gradle new file mode 100644 index 00000000..11ec1a8f --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/build.gradle @@ -0,0 +1,39 @@ +apply plugin: 'com.android.library' + +android { + sourceSets { + main{ + jniLibs.srcDirs = ['src/main/jniLibs','libs'] + } + } + compileSdkVersion 29 + buildToolsVersion "29.0.3" + + + defaultConfig { + minSdkVersion 15 + targetSdkVersion 29 + versionCode 1 + versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + consumerProguardFiles 'consumer-rules.pro' + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' + } + } + +} + +dependencies { + implementation fileTree(dir: 'libs', include: ['*.jar']) + + implementation 'androidx.appcompat:appcompat:1.0.2' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test.ext:junit:1.1.0' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' +} diff --git a/OMAF-Sample/.gitkeep b/src/JNIOmafDashAccess/omafdashaccesslibrary/consumer-rules.pro similarity index 100% rename from OMAF-Sample/.gitkeep rename to src/JNIOmafDashAccess/omafdashaccesslibrary/consumer-rules.pro diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/libs/jna.jar b/src/JNIOmafDashAccess/omafdashaccesslibrary/libs/jna.jar new file mode 100644 index 00000000..af5dd080 Binary files /dev/null and b/src/JNIOmafDashAccess/omafdashaccesslibrary/libs/jna.jar differ diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/omafdashaccesslibrary.iml b/src/JNIOmafDashAccess/omafdashaccesslibrary/omafdashaccesslibrary.iml new file mode 100644 index 00000000..da1a8c6f --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/omafdashaccesslibrary.iml @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/proguard-rules.pro b/src/JNIOmafDashAccess/omafdashaccesslibrary/proguard-rules.pro new file mode 100644 index 00000000..f1b42451 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/androidTest/java/com/example/omafdashaccesslibrary/ExampleInstrumentedTest.java b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/androidTest/java/com/example/omafdashaccesslibrary/ExampleInstrumentedTest.java new file mode 100644 index 00000000..516c16a2 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/androidTest/java/com/example/omafdashaccesslibrary/ExampleInstrumentedTest.java @@ -0,0 +1,27 @@ +package com.example.omafdashaccesslibrary; + +import android.content.Context; + +import androidx.test.platform.app.InstrumentationRegistry; +import androidx.test.ext.junit.runners.AndroidJUnit4; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import static org.junit.Assert.*; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class ExampleInstrumentedTest { + @Test + public void useAppContext() { + // Context of the app under test. + Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); + + assertEquals("com.example.omafdashaccesslibrary.test", appContext.getPackageName()); + } +} diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/AndroidManifest.xml b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/AndroidManifest.xml new file mode 100644 index 00000000..0a210b99 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/AndroidManifest.xml @@ -0,0 +1,2 @@ + diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/java/com/example/omafdashaccesslibrary/JnaOmafAccess.java b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/java/com/example/omafdashaccesslibrary/JnaOmafAccess.java new file mode 100644 index 00000000..0009abc0 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/java/com/example/omafdashaccesslibrary/JnaOmafAccess.java @@ -0,0 +1,806 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.example.omafdashaccesslibrary; + +import com.sun.jna.Library; +import com.sun.jna.Native; +import com.sun.jna.NativeLibrary; +import com.sun.jna.Pointer; +import com.sun.jna.Structure; +import com.sun.jna.ptr.IntByReference; +import com.sun.jna.ptr.LongByReference; + +import java.nio.IntBuffer; +import java.nio.LongBuffer; +import java.util.Arrays; +import java.util.List; +/** + * JNA Wrapper for library JnaOmafAccess
+ * This file was autogenerated by JNAerator,
+ * a tool written by Olivier Chafik that uses a few opensource projects..
+ * For help, please visit NativeLibs4Java , Rococoa, or JNA. + */ +public interface JnaOmafAccess extends Library { + public static final String JNA_LIBRARY_NAME = "OmafDashAccess"; + public static final NativeLibrary JNA_NATIVE_LIB = NativeLibrary.getInstance(JnaOmafAccess.JNA_LIBRARY_NAME); + public static final JnaOmafAccess INSTANCE = (JnaOmafAccess)Native.load(JnaOmafAccess.JNA_LIBRARY_NAME, JnaOmafAccess.class); //load 3rd-party library + + + /** enum values */ + public static interface Codec_Type { + /** native declaration : line 7 */ + public static final int VideoCodec_NONE = 0; + /** native declaration : line 8 */ + public static final int VideoCodec_AVC = 1; + /** native declaration : line 9 */ + public static final int VideoCodec_HEVC = 2; + /** native declaration : line 10 */ + public static final int VideoCodec_AV1 = 3; + /** native declaration : line 11 */ + public static final int AudioCodec_NONE = 100; + /** native declaration : line 12 */ + public static final int AudioCodec_AAC = 101; + /** native declaration : line 13 */ + public static final int AudioCodec_AV3 = 102; + /** native declaration : line 14 */ + public static final int AudioCodec_MP3 = 103; + }; + + /** enum values */ + public static interface MediaType { + /** native declaration : line 7 */ + public static final int MediaType_NONE = 0; + /** native declaration : line 8 */ + public static final int MediaType_Video = 1; + /** native declaration : line 9 */ + public static final int MediaType_Audio = 2; + }; + + /** enum values */ + public static interface MPD_MODE { + /** native declaration : line 13 */ + public static final int MODE_DEFAULT = 0; + /** native declaration : line 14 */ + public static final int MODE_TILE_MultiRes = 1; + /** native declaration : line 15 */ + public static final int MODE_TILE_MultiRate = 2; + /** native declaration : line 16 */ + public static final int MODE_NONE = 3; + }; + + /** enum values */ + public static interface SourceType { + /** native declaration : line 162 */ + public static final int DefaultSource = 0; + /** native declaration : line 163 */ + public static final int MultiResSource = 1; + /** native declaration : line 164 */ + public static final int Reserved = 2; + }; + + /** enum values */ + public static final int MAX_QUALITY_NUM = 2; + public static interface QualityRank { + /** native declaration : line 7 */ + public static final int HIGHEST_QUALITY_RANKING = 1; + /** native declaration : line 8 */ + public static final int NORMAL_QUALITY_RANKING = 2; + /** native declaration : line 9 */ + public static final int INVALID_QUALITY_RANKING = 3; + }; + + /** enum values */ + public static interface DashStreamType { + /** native declaration : line 7 */ + public static final int DASH_STREAM_STATIC = 1; + /** native declaration : line 8 */ + public static final int DASH_STREAM_DYNMIC = 2; + }; + + public static class RECTANGUALAR_REGION_WIZE_PACKING extends Structure { + public byte transformType; + public byte guardBandFlag; + public int projRegWidth; + public int projRegHeight; + public int projRegTop; + public int projRegLeft; + public short packedRegWidth; + public short packedRegHeight; + public short packedRegTop; + public short packedRegLeft; + /** below fields take effect when .guardBandFlag. is true */ + public byte leftGbWidth; + public byte rightGbWidth; + public byte topGbHeight; + public byte bottomGbHeight; + public byte gbNotUsedForPredFlag; + public byte gbType0; + public byte gbType1; + public byte gbType2; + public byte gbType3; + public RECTANGUALAR_REGION_WIZE_PACKING() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("transformType", "guardBandFlag", "projRegWidth", "projRegHeight", "projRegTop", "projRegLeft", "packedRegWidth", "packedRegHeight", "packedRegTop", "packedRegLeft", "leftGbWidth", "rightGbWidth", "topGbHeight", "bottomGbHeight", "gbNotUsedForPredFlag", "gbType0", "gbType1", "gbType2", "gbType3"); + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected RECTANGUALAR_REGION_WIZE_PACKING newInstance() { return new RECTANGUALAR_REGION_WIZE_PACKING(); } + + + public static class ByReference extends RECTANGUALAR_REGION_WIZE_PACKING implements Structure.ByReference { }; + public static class ByValue extends RECTANGUALAR_REGION_WIZE_PACKING implements Structure.ByValue { }; + }; + + public static class REGION_WIZE_PACKING extends Structure { + public byte constituentPicMatching; + public byte numRegions; + public int projPicWidth; + public int projPicHeight; + public short packedPicWidth; + public short packedPicHeight; + /** C type : RectangularRegionWisePacking* */ + public JnaOmafAccess.RECTANGUALAR_REGION_WIZE_PACKING.ByReference rectRegionPacking; + public byte numHiRegions; + public int lowResPicWidth; + public int lowResPicHeight; + public int timeStamp; + public REGION_WIZE_PACKING() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("constituentPicMatching", "numRegions", "projPicWidth", "projPicHeight", "packedPicWidth", "packedPicHeight", "rectRegionPacking", "numHiRegions", "lowResPicWidth", "lowResPicHeight", "timeStamp"); + } + /** @param rectRegionPacking C type : RectangularRegionWisePacking* */ + public REGION_WIZE_PACKING(byte constituentPicMatching, byte numRegions, int projPicWidth, int projPicHeight, short packedPicWidth, short packedPicHeight, JnaOmafAccess.RECTANGUALAR_REGION_WIZE_PACKING.ByReference rectRegionPacking, byte numHiRegions, int lowResPicWidth, int lowResPicHeight, int timeStamp) { + super(); + this.constituentPicMatching = constituentPicMatching; + this.numRegions = numRegions; + this.projPicWidth = projPicWidth; + this.projPicHeight = projPicHeight; + this.packedPicWidth = packedPicWidth; + this.packedPicHeight = packedPicHeight; + this.rectRegionPacking = rectRegionPacking; + this.numHiRegions = numHiRegions; + this.lowResPicWidth = lowResPicWidth; + this.lowResPicHeight = lowResPicHeight; + this.timeStamp = timeStamp; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected REGION_WIZE_PACKING newInstance() { return new REGION_WIZE_PACKING(); } + + public static class ByReference extends REGION_WIZE_PACKING implements Structure.ByReference { }; + public static class ByValue extends REGION_WIZE_PACKING implements Structure.ByValue { }; + }; + + public static class HEADPOSE extends Structure { + public float yaw; + public float pitch; + public HEADPOSE() { + super(); + this.yaw = 0; + this.pitch = 0; + } + protected List getFieldOrder() { + return Arrays.asList("yaw", "pitch"); + } + public HEADPOSE(float yaw, float pitch) { + super(); + this.yaw = yaw; + this.pitch = pitch; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected HEADPOSE newInstance() { return new HEADPOSE(); } + + public static class ByReference extends HEADPOSE implements Structure.ByReference { }; + public static class ByValue extends HEADPOSE implements Structure.ByValue { }; + }; + + public static class HEADSETINFO extends Structure { + /** C type : HeadPose* */ + public JnaOmafAccess.HEADPOSE.ByReference pose; + public float viewPort_hFOV; + public float viewPort_vFOV; + public int viewPort_Width; + public int viewPort_Height; + public HEADSETINFO() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("pose", "viewPort_hFOV", "viewPort_vFOV", "viewPort_Width", "viewPort_Height"); + } + /** @param pose C type : HeadPose* */ + public HEADSETINFO(JnaOmafAccess.HEADPOSE.ByReference pose, float viewPort_hFOV, float viewPort_vFOV, int viewPort_Width, int viewPort_Height) { + super(); + this.pose = pose; + this.viewPort_hFOV = viewPort_hFOV; + this.viewPort_vFOV = viewPort_vFOV; + this.viewPort_Width = viewPort_Width; + this.viewPort_Height = viewPort_Height; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected HEADSETINFO newInstance() { return new HEADSETINFO(); } + + public static class ByReference extends HEADSETINFO implements Structure.ByReference { }; + public static class ByValue extends HEADSETINFO implements Structure.ByValue { }; + }; + + public static class Viewport extends Structure { + public int x; + public int y; + public int height; + public int width; + public int faceId; + public Viewport() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("x", "y", "height", "width", "faceId"); + } + public Viewport(int x, int y, int height, int width, int faceId) { + super(); + this.x = x; + this.y = y; + this.height = height; + this.width = width; + this.faceId = faceId; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected Viewport newInstance() { return new Viewport(); } + + public static class ByReference extends Viewport implements Structure.ByReference { }; + public static class ByValue extends Viewport implements Structure.ByValue { }; + }; + + public static class SOURCERESOLUTION extends Structure { + public int qualityRanking; + public int top; + public int left; + public int width; + public int height; + public SOURCERESOLUTION() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("qualityRanking", "top", "left", "width", "height"); + } + public SOURCERESOLUTION(int qualityRanking, int top, int left, int width, int height) { + super(); + this.qualityRanking = qualityRanking; + this.top = top; + this.left = left; + this.width = width; + this.height = height; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected SOURCERESOLUTION newInstance() { return new SOURCERESOLUTION(); } + + public static class ByReference extends SOURCERESOLUTION implements Structure.ByReference { }; + public static class ByValue extends SOURCERESOLUTION implements Structure.ByValue { }; + }; + + public static class DASHSTATISTICINFO extends Structure { + public int avg_bandwidth; + public int immediate_bandwidth; + public DASHSTATISTICINFO() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("avg_bandwidth", "immediate_bandwidth"); + } + public DASHSTATISTICINFO(int avg_bandwidth, int immediate_bandwidth) { + super(); + this.avg_bandwidth = avg_bandwidth; + this.immediate_bandwidth = immediate_bandwidth; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected DASHSTATISTICINFO newInstance() { return new DASHSTATISTICINFO(); } + + public static class ByReference extends DASHSTATISTICINFO implements Structure.ByReference { }; + public static class ByValue extends DASHSTATISTICINFO implements Structure.ByValue { }; + }; + + public static class _omafHttpProxy extends Structure { + public String http_proxy; + public String https_proxy; + public String no_proxy; + public String proxy_user; + public String proxy_wd; + public _omafHttpProxy() { + super(); + this.http_proxy = ""; + this.https_proxy = ""; + this.no_proxy = ""; + this.proxy_user = ""; + this.proxy_wd = ""; + } + protected List getFieldOrder() { + return Arrays.asList("http_proxy", "https_proxy", "no_proxy", "proxy_user", "proxy_wd"); + } + public _omafHttpProxy(String http_proxy, String https_proxy, String no_proxy, String proxy_user, String proxy_wd) { + super(); + this.http_proxy = http_proxy; + this.https_proxy = https_proxy; + this.no_proxy = no_proxy; + this.proxy_user = proxy_user; + this.proxy_wd = proxy_wd; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected _omafHttpProxy newInstance() { return new _omafHttpProxy(); } + + public static class ByReference extends _omafHttpProxy implements Structure.ByReference { }; + public static class ByValue extends _omafHttpProxy implements Structure.ByValue { }; + }; + + public static class _omafHttpParams extends Structure { + public long conn_timeout; + public long total_timeout; + public int retry_times; + public int ssl_verify_peer; + public int ssl_verify_host; + public _omafHttpParams() { + super(); + this.conn_timeout = 0; + this.total_timeout = 0; + this.retry_times = 0; + this.ssl_verify_peer = 0; + this.ssl_verify_host = 0; + } + protected List getFieldOrder() { + return Arrays.asList("conn_timeout", "total_timeout", "retry_times", "ssl_verify_peer", "ssl_verify_host"); + } + public _omafHttpParams(long conn_timeout, long total_timeout, int retry_times, int ssl_verify_peer, int ssl_verify_host) { + super(); + this.conn_timeout = conn_timeout; + this.total_timeout = total_timeout; + this.retry_times = retry_times; + this.ssl_verify_peer = ssl_verify_peer; + this.ssl_verify_host = ssl_verify_host; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected _omafHttpParams newInstance() { return new _omafHttpParams(); } + + public static class ByReference extends _omafHttpParams implements Structure.ByReference { }; + public static class ByValue extends _omafHttpParams implements Structure.ByValue { }; + }; + + public static class _omafStatisticsParams extends Structure { + public int window_size_ms; + public int enable; + public _omafStatisticsParams() { + super(); + this.window_size_ms = 0; + this.enable = 0; + } + protected List getFieldOrder() { + return Arrays.asList("window_size_ms", "enable"); + } + public _omafStatisticsParams(int window_size_ms, int enable) { + super(); + this.window_size_ms = window_size_ms; + this.enable = enable; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected _omafStatisticsParams newInstance() { return new _omafStatisticsParams(); } + + public static class ByReference extends _omafStatisticsParams implements Structure.ByReference { }; + public static class ByValue extends _omafStatisticsParams implements Structure.ByValue { }; + }; + + public static class _omafSynchronizerParams extends Structure { + public int segment_range_size; + public int enable; + public _omafSynchronizerParams() { + super(); + this.segment_range_size = 0; + this.enable = 0; + } + protected List getFieldOrder() { + return Arrays.asList("segment_range_size", "enable"); + } + public _omafSynchronizerParams(int segment_range_size, int enable) { + super(); + this.segment_range_size = segment_range_size; + this.enable = enable; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected _omafSynchronizerParams newInstance() { return new _omafSynchronizerParams(); } + + public static class ByReference extends _omafSynchronizerParams implements Structure.ByReference { }; + public static class ByValue extends _omafSynchronizerParams implements Structure.ByValue { }; + }; + + public static class _omafPredictorParams extends Structure { + public String name; + public String libpath; + public int enable; + public _omafPredictorParams() { + super(); + this.name = ""; + this.libpath = ""; + this.enable = 0; + } + protected List getFieldOrder() { + return Arrays.asList("name", "libpath", "enable"); + } + public _omafPredictorParams(String name, String libpath, int enable) { + super(); + this.name = name; + this.libpath = libpath; + this.enable = enable; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected _omafPredictorParams newInstance() { return new _omafPredictorParams(); } + + public static class ByReference extends _omafPredictorParams implements Structure.ByReference { }; + public static class ByValue extends _omafPredictorParams implements Structure.ByValue { }; + }; + + public static class _omafDashParams extends Structure { + public JnaOmafAccess._omafHttpProxy.ByValue proxy; + public JnaOmafAccess._omafHttpParams.ByValue http_params; + public JnaOmafAccess._omafStatisticsParams.ByValue statistic_params; + public JnaOmafAccess._omafSynchronizerParams.ByValue synchronizer_params; + public JnaOmafAccess._omafPredictorParams.ByValue predictor_params; + public long max_parallel_transfers; + public int segment_open_timeout_ms; + public _omafDashParams() { + super(); + this.proxy = null; + this.http_params = null; + this.statistic_params = null; + this.synchronizer_params = null; + this.predictor_params = null; + this.max_parallel_transfers = 0; + this.segment_open_timeout_ms = 0; + } + protected List getFieldOrder() { + return Arrays.asList("proxy", "http_params", "statistic_params", "synchronizer_params", "predictor_params", "max_parallel_transfers", "segment_open_timeout_ms"); + } + public _omafDashParams(JnaOmafAccess._omafHttpProxy.ByValue proxy, JnaOmafAccess._omafHttpParams.ByValue http_params, JnaOmafAccess._omafStatisticsParams.ByValue statistic_params, + JnaOmafAccess._omafSynchronizerParams.ByValue synchronizer_params, JnaOmafAccess._omafPredictorParams.ByValue predictor_params, long max_parallel_transfers, int segment_open_timeout_ms) { + super(); + this.proxy = proxy; + this.http_params = http_params; + this.statistic_params = statistic_params; + this.synchronizer_params = synchronizer_params; + this.predictor_params = predictor_params; + this.max_parallel_transfers = max_parallel_transfers; + this.segment_open_timeout_ms = segment_open_timeout_ms; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected _omafDashParams newInstance() { return new _omafDashParams(); } + + public static class ByReference extends _omafDashParams implements Structure.ByReference { }; + public static class ByValue extends _omafDashParams implements Structure.ByValue { }; + }; + + public static class DASHSTREAMINFO extends Structure { + /** + * @see MediaType + * C type : MediaType + */ + public int stream_type; + public int codec_type; + public int height; + public int width; + public int tileRowNum; + public int tileColNum; + public int framerate_num; + public int framerate_den; + public long segmentDuration; + public int bit_rate; + public int channels; + public int sample_rate; + public int channel_bytes; + public int mProjFormat; + public int mFpt; + /** C type : const char* */ + public Pointer mime_type; + /** C type : const char* */ + public Pointer codec; + public int source_number; + /** C type : SourceResolution* */ + public JnaOmafAccess.SOURCERESOLUTION.ByReference source_resolution; + public DASHSTREAMINFO() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("stream_type", "codec_type", "height", "width", "tileRowNum", "tileColNum", "framerate_num", "framerate_den", "segmentDuration", "bit_rate", "channels", "sample_rate", "channel_bytes", "mProjFormat", "mFpt", "mime_type", "codec", "source_number", "source_resolution"); + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected DASHSTREAMINFO newInstance() { return new DASHSTREAMINFO(); } + + public static class ByReference extends DASHSTREAMINFO implements Structure.ByReference { }; + public static class ByValue extends DASHSTREAMINFO implements Structure.ByValue { }; + }; + + public static class DASHMEDIAINFO extends Structure{ + public long duration; + public int streaming_type; + public int stream_count; + /** C type : DashStreamInfo[16] */ + public JnaOmafAccess.DASHSTREAMINFO[] stream_info = new JnaOmafAccess.DASHSTREAMINFO[16]; + public DASHMEDIAINFO() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("duration", "streaming_type", "stream_count", "stream_info"); + } + /** @param stream_info C type : DashStreamInfo[16] */ + public DASHMEDIAINFO(long duration, int streaming_type, int stream_count, JnaOmafAccess.DASHSTREAMINFO stream_info[]) { + super(); + this.duration = duration; + this.streaming_type = streaming_type; + this.stream_count = stream_count; + if ((stream_info.length != this.stream_info.length)) + throw new IllegalArgumentException("Wrong array size !"); + this.stream_info = stream_info; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected DASHMEDIAINFO newInstance() { return new DASHMEDIAINFO(); } + + public static class ByReference extends DASHMEDIAINFO implements Structure.ByReference { }; + public static class ByValue extends DASHMEDIAINFO implements Structure.ByValue { }; + }; + + public static class DASHPACKET extends Structure { + public int videoID; + public int video_codec; + public int pts; + public long size; + /** C type : char* */ + public Pointer buf; + /** C type : RegionWisePacking* */ + public JnaOmafAccess.REGION_WIZE_PACKING.ByReference rwpk; + public int segID; + public int height; + public int width; + public int numQuality; + public JnaOmafAccess.SOURCERESOLUTION.ByReference qtyResolution; + public int tileRowNum; + public int tileColNum; + public boolean bEOS; + public DASHPACKET() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("videoID", "video_codec", "pts", "size", "buf", "rwpk", "segID", "height", "width", "numQuality", "qtyResolution", "tileRowNum", "tileColNum", "bEOS"); + } + /** + * @param buf C type : char*
+ * @param rwpk C type : RegionWisePacking* + */ + public DASHPACKET(int videoID, int video_codec, int pts, long size, Pointer buf, JnaOmafAccess.REGION_WIZE_PACKING.ByReference rwpk, int segID, int height, int width, int numQuality, JnaOmafAccess.SOURCERESOLUTION.ByReference qtyResolution, int tileRowNum, int tileColNum, boolean bEOS) { + super(); + this.videoID = videoID; + this.video_codec = video_codec; + this.pts = pts; + this.size = size; + this.buf = buf; + this.rwpk = rwpk; + this.segID = segID; + this.height = height; + this.width = width; + this.numQuality = numQuality; + this.qtyResolution = qtyResolution; + this.tileRowNum = tileRowNum; + this.tileColNum = tileColNum; + this.bEOS = bEOS; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected DASHPACKET newInstance() { return new DASHPACKET(); } + + public static class ByReference extends DASHPACKET implements Structure.ByReference { }; + public static class ByValue extends DASHPACKET implements Structure.ByValue { }; + }; + + public static class DASHSTREAMINGCLIENT extends Structure { + /** + * @see SourceType + * C type : SourceType + */ + public int source_type; + public JnaOmafAccess._omafDashParams.ByValue omaf_params; + /** C type : const char* */ + public String media_url; + /** C type : const char* */ + public String cache_path; + /** C type : bool */ + public boolean enable_extractor; + public DASHSTREAMINGCLIENT() { + super(); + } + protected List getFieldOrder() { + return Arrays.asList("source_type", "omaf_params", "media_url", "cache_path", "enable_extractor"); + } + /** + * @param media_url C type : const char*
+ * @param source_type @see SourceType
+ * @param enable_extractor C type : bool
+ * C type : SourceType
+ * @param cache_path C type : const char* + */ + public DASHSTREAMINGCLIENT(int source_type, JnaOmafAccess._omafDashParams.ByValue omaf_params, String media_url, String cache_path, boolean enable_extractor) { + super(); + this.source_type = source_type; + this.omaf_params = omaf_params; + this.media_url = media_url; + this.cache_path = cache_path; + this.enable_extractor = enable_extractor; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected DASHSTREAMINGCLIENT newInstance() { return new DASHSTREAMINGCLIENT(); } + + public static class ByReference extends DASHSTREAMINGCLIENT implements Structure.ByReference { }; + public static class ByValue extends DASHSTREAMINGCLIENT implements Structure.ByValue { }; + }; + + public static class VIEWPORTANGLE extends Structure { + public float yaw; + public float pitch; + public float roll; + public VIEWPORTANGLE() { + super(); + this.yaw = 0; + this.pitch = 0; + this.roll = 0; + } + protected List getFieldOrder() { + return Arrays.asList("yaw", "pitch", "roll"); + } + public VIEWPORTANGLE(float yaw, float pitch, float roll) { + super(); + this.yaw = yaw; + this.pitch = pitch; + this.roll = roll; + } + protected ByReference newByReference() { return new ByReference(); } + protected ByValue newByValue() { return new ByValue(); } + protected VIEWPORTANGLE newInstance() { return new VIEWPORTANGLE(); } + + public static class ByReference extends VIEWPORTANGLE implements Structure.ByReference { }; + public static class ByValue extends VIEWPORTANGLE implements Structure.ByValue { }; + }; + /** + * description: API to initialize API handle and relative context
+ * params: pCtx - [in] the structure for the necessary parameters to handle an dash stream
+ * return: the handle created for the API
+ * Original signature : Handler OmafAccess_Init(DashStreamingClient*)
+ * native declaration : line 184 + */ + Pointer OmafAccess_Init(JnaOmafAccess.DASHSTREAMINGCLIENT pCtx); + /** + * description: API to open a dash stream
+ * params: hdl - [in] handler created with DashStreaming_Init
+ * pCtx - [in] the structure for the necessary parameters to handle an dash stream
+ * enablePredictor - [in] flag for use predictor or not
+ * predictPluginName - [in] viewport predict plugin name
+ * libPath - [in] path of viewport predict plugin library
+ * return: the error return from the API
+ * Original signature : int OmafAccess_OpenMedia(Handler, DashStreamingClient*, bool, char*, char*)
+ * native declaration : line 193 + */ + int OmafAccess_OpenMedia(Pointer hdl, DASHSTREAMINGCLIENT pCtx, boolean enablePredictor, String predictPluginName, String libPath); + /** + * description: API to seek a stream. only work with static mode. not implement yet.
+ * params: hdl - [in] handler created with DashStreaming_Init
+ * time - [in] the position to be seek
+ * return: the error return from the API
+ * Original signature : int OmafAccess_SeekMedia(Handler, uint64_t)
+ * native declaration : line 201 + */ + int OmafAccess_SeekMedia(Pointer hdl, long time); + /** + * description: API to close a dash stream
+ * params: hdl - [in]handler created with DashStreaming_Init
+ * return: the error return from the API
+ * Original signature : int OmafAccess_CloseMedia(Handler)
+ * native declaration : line 208 + */ + int OmafAccess_CloseMedia(Pointer hdl); + /** + * description: API to get information of opened dashed stream
+ * params: hdl - [in] handler created with DashStreaming_Init
+ * info - [out] the media info of opened dash media
+ * return: the error return from the API
+ * Original signature : int OmafAccess_GetMediaInfo(Handler, DashMediaInfo*)
+ * native declaration : line 216 + */ + int OmafAccess_GetMediaInfo(Pointer hdl, JnaOmafAccess.DASHMEDIAINFO info); + + /** + * description: API to get packets according to stream id in the dash media. As for viewport-based
+ * Tile dashing streaming with low Resolution video, the packet is composed of viewport
+ * -wise tiles and low-res tiles.
+ * params: hdl - [in]handler created with DashStreaming_Init
+ * stream_id - [in] the stream id the packet is gotten from
+ * size - [out] the size of gotten packet;
+ * buf - [out] the payload of the packet;
+ * pts - [out] the timestamp of the packet
+ * needParams - [bool] flag to include VPS/SPS/PPS in packet
+ * clearBuf - [bool] flag to clear output packet buffer
+ * return: the error return from the API, ERROR_EOS means reach end of
+ * stream for static source
+ * Original signature : int OmafAccess_GetPacket(Handler, int, DashPacket*, int*, uint64_t*, bool, bool)
+ * native declaration : line 232 + */ + int OmafAccess_GetPacket(Pointer hdl, int stream_id, JnaOmafAccess.DASHPACKET[] packet, IntByReference size, LongByReference pts, byte needParams, byte clearBuf); + /** + * description: API to set InitViewport before downloading segment.
+ * params: hdl - [in]handler created with DashStreaming_Init
+ * clientInfo - [in] the headset info which is needed to calculate viewport
+ * return: the error return from the API
+ * Original signature : int OmafAccess_SetupHeadSetInfo(Handler, HeadSetInfo*)
+ * native declaration : line 240 + */ + int OmafAccess_SetupHeadSetInfo(Pointer hdl, JnaOmafAccess.HEADSETINFO clientInfo); + /** + * description: API to update Viewport when input shows that viewport is changed
+ * params: hdl - [in]handler created with DashStreaming_Init
+ * pose - [in] changed pose info
+ * return: the error return from the API
+ * Original signature : int OmafAccess_ChangeViewport(Handler, HeadPose*)
+ * native declaration : line 248 + */ + int OmafAccess_ChangeViewport(Pointer hdl, JnaOmafAccess.HEADPOSE pose); + /** + * description: API to get statistic data such as bandwith etc.
+ * params: hdl - [in] handler created with DashStreaming_Init
+ * info - [out] the information current statistic data
+ * return: the error return from the API
+ * Original signature : int OmafAccess_Statistic(Handler, DashStatisticInfo*)
+ * native declaration : line 256 + */ + int OmafAccess_Statistic(Pointer hdl, JnaOmafAccess.DASHSTATISTICINFO info); + /** + * description: API to Close the Handle and release relative resources after dealing with
+ * the media
+ * params: hdl - [in] handler created with DashStreaming_Init
+ * return: the error return from the API
+ * Original signature : int OmafAccess_Close(Handler)
+ * native declaration : line 264 + */ + int OmafAccess_Close(Pointer hdl); +} diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/java/com/example/omafdashaccesslibrary/OmafAccess.java b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/java/com/example/omafdashaccesslibrary/OmafAccess.java new file mode 100644 index 00000000..9d37e520 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/java/com/example/omafdashaccesslibrary/OmafAccess.java @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.example.omafdashaccesslibrary; + +import android.util.Log; +import com.sun.jna.Pointer; +import com.sun.jna.Memory; +import com.sun.jna.ptr.IntByReference; +import com.sun.jna.ptr.LongByReference; + +import java.nio.IntBuffer; +import java.nio.LongBuffer; + +public class OmafAccess { + private final String TAG = "OMAF_DASH_ACCESS"; + private Pointer mHandle; + private JnaOmafAccess.DASHSTREAMINGCLIENT mContext; + + public OmafAccess(){ + mContext = new JnaOmafAccess.DASHSTREAMINGCLIENT(); + mHandle = null; + } + + public OmafAccess(String url, String cache, int source_type, boolean enable_extractor, JnaOmafAccess._omafDashParams.ByValue omaf_params){ + this(); + SetClientCtx(url, cache, source_type, enable_extractor, omaf_params); + } + + private void SetClientCtx(String url, String cache, int source_type, boolean enable_extractor, JnaOmafAccess._omafDashParams.ByValue omaf_params){ + mContext.media_url = url; + mContext.cache_path = cache; + mContext.source_type = source_type; + mContext.enable_extractor = enable_extractor; + mContext.omaf_params = omaf_params; + } + + public int Initialize( ){ + mHandle = JnaOmafAccess.INSTANCE.OmafAccess_Init(this.mContext); + + if(mHandle == null){ + Log.e(TAG, "Failed to initialize dash access !!!"); + return -1; + } + + return 0; + } + + public int OpenMedia(boolean enablePredictor, String pluginName, String libPath){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + + return JnaOmafAccess.INSTANCE.OmafAccess_OpenMedia(this.mHandle, this.mContext, enablePredictor, pluginName, libPath); + } + + public int SeekMedia( long time){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + + return JnaOmafAccess.INSTANCE.OmafAccess_SeekMedia( this.mHandle, time ); + } + + public int CloseMedia(){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + + return JnaOmafAccess.INSTANCE.OmafAccess_CloseMedia(this.mHandle); + } + + public int GetMediaInfo( JnaOmafAccess.DASHMEDIAINFO info){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + return JnaOmafAccess.INSTANCE.OmafAccess_GetMediaInfo(this.mHandle, info); + } + + public int GetPacket(int stream_id, JnaOmafAccess.DASHPACKET[] packet, IntByReference size, LongByReference pts, byte needParams, byte clearBuf){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + return JnaOmafAccess.INSTANCE.OmafAccess_GetPacket(this.mHandle, stream_id, packet, size, pts, needParams, clearBuf); + } + + public int SetupHeadSetInfo( JnaOmafAccess.HEADSETINFO clientInfo){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + return JnaOmafAccess.INSTANCE.OmafAccess_SetupHeadSetInfo(this.mHandle, clientInfo); + } + + public int ChangeViewport( JnaOmafAccess.HEADPOSE pose){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + return JnaOmafAccess.INSTANCE.OmafAccess_ChangeViewport(this.mHandle, pose); + } + + public int Statistic( JnaOmafAccess.DASHSTATISTICINFO info){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + return JnaOmafAccess.INSTANCE.OmafAccess_Statistic(this.mHandle, info); + } + + public int Close(){ + if(mHandle == null){ + Log.e(TAG, "Omaf Access Handle is NULL; cannot continue !!!"); + return -1; + } + return JnaOmafAccess.INSTANCE.OmafAccess_Close(this.mHandle); + } + +} diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/jniLibs/arm64-v8a/libjnidispatch.so b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/jniLibs/arm64-v8a/libjnidispatch.so new file mode 100644 index 00000000..61553b21 Binary files /dev/null and b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/jniLibs/arm64-v8a/libjnidispatch.so differ diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/res/values/strings.xml b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/res/values/strings.xml new file mode 100644 index 00000000..658fa519 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + OmafDashAccessLibrary + diff --git a/src/JNIOmafDashAccess/omafdashaccesslibrary/src/test/java/com/example/omafdashaccesslibrary/ExampleUnitTest.java b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/test/java/com/example/omafdashaccesslibrary/ExampleUnitTest.java new file mode 100644 index 00000000..685bd938 --- /dev/null +++ b/src/JNIOmafDashAccess/omafdashaccesslibrary/src/test/java/com/example/omafdashaccesslibrary/ExampleUnitTest.java @@ -0,0 +1,17 @@ +package com.example.omafdashaccesslibrary; + +import org.junit.Test; + +import static org.junit.Assert.*; + +/** + * Example local unit test, which will execute on the development machine (host). + * + * @see Testing documentation + */ +public class ExampleUnitTest { + @Test + public void addition_isCorrect() { + assertEquals(4, 2 + 2); + } +} \ No newline at end of file diff --git a/src/JNIOmafDashAccess/settings.gradle b/src/JNIOmafDashAccess/settings.gradle new file mode 100644 index 00000000..ba4df544 --- /dev/null +++ b/src/JNIOmafDashAccess/settings.gradle @@ -0,0 +1,2 @@ +include ':app', ':omafdashaccesslibrary' +rootProject.name='OmafAccess' diff --git a/src/Makefile b/src/Makefile deleted file mode 100644 index db59ced7..00000000 --- a/src/Makefile +++ /dev/null @@ -1,246 +0,0 @@ -CC = gcc -CXX = g++ -CC_VERSION = $(shell gcc -dumpversion) -PKG_CONFIG_PATH = "/usr/local/lib/pkgconfig:/usr/lib/pkgconfig" - -all : server client test - -server : - @ echo "$(CC_VERSION)" - - @ echo "Start build server on $(SYSTEM) ..." - @ mkdir -p build/server/360SCVP - @ mkdir -p build/server/distributed_encoder - @ mkdir -p build/server/ffmpeg - @ mkdir -p build/server/VROmafPacking - - @ # Install 360SCVP - @ cd build/server/360SCVP && \ - cmake ../../../360SCVP && \ - make -j `nproc` && \ - sudo make install \ - - @ # Install distributed_encoder - @ cd build/server/distributed_encoder && \ - cmake ../../../distributed_encoder && \ - make -j `nproc` && \ - sudo make install \ - - @ # Install VROMAFPACKING - @ export PKG_CONFIG_PATH=${PKG_CONFIG_PATH} && \ - cd external && \ - cd ../build/server/VROmafPacking && \ - cmake ../../../VROmafPacking && \ - make -j `nproc` && \ - sudo make install - - @ # Install ffmpeg - @ export PKG_CONFIG_PATH=${PKG_CONFIG_PATH} && \ - cd build/server/ffmpeg && \ - ../../../ffmpeg/configure \ - --prefix=/usr --libdir=/usr/lib --enable-static --disable-shared \ - --enable-gpl --enable-nonfree --disable-optimizations --disable-vaapi \ - --enable-libDistributedEncoder --enable-libVROmafPacking && \ - make -j `nproc` - -client : - @ echo "Start build client on $(SYSTEM) ..." - - @ mkdir -p build/client/360SCVP - @ mkdir -p build/client/OmafDashAccess - @ mkdir -p build/client/player - @ mkdir -p build/client/ffmpeg - - @ # Install 360SCVP - @ cd build/client/360SCVP && \ - cmake ../../../360SCVP && \ - make -j `nproc` && \ - sudo make install - - @ # Install OmafDashAccess - @ cd build/client/OmafDashAccess && \ - cmake ../../../OmafDashAccess && \ - make -j `nproc` && \ - sudo make install - - @ # Install Player - @ cd player && cp ../utils/tinyxml2.h ./ && \ - cp ../utils/tinyxml2.cpp ./ && \ - cd ../build/client/player && \ - cmake -DUSE_OMAF=ON -DUSE_WEBRTC=OFF ../../../player && \ - make -j `nproc` - - @ # Install ffmpeg - @ export PKG_CONFIG_PATH=${PKG_CONFIG_PATH} && \ - cd build/client/ffmpeg && \ - ../../../ffmpeg/configure \ - --prefix=/usr --libdir=/usr/lib --enable-static --disable-shared \ - --enable-gpl --enable-nonfree --disable-optimizations \ - --enable-libOmafDashAccess --enable-ffplay && \ - make -j `nproc` - -test: - @ echo "Compiling and running unit test ..." - @ mkdir -p build/test/360SCVP - @ mkdir -p build/test/distributed_encoder - @ mkdir -p build/test/VROmafPacking - @ mkdir -p build/test/OmafDashAccess - - @ # Compile 360SCVP test - @ cd build/test/360SCVP && \ - g++ -I../../../google_test -std=c++11 -I../util/ -g -c \ - ../../../360SCVP/test/testI360SCVP.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -L/usr/local/lib testI360SCVP.o \ - ../googletest/googletest/build/libgtest.a -o \ - testI360SCVP -I/usr/local/include/ -l360SCVP \ - -lstdc++ -lpthread -lm -L/usr/local/lib - - @ # Compile distributed_encoder test - @ cd build/test/distributed_encoder && \ - g++ -I../../../google_test -std=c++11 \ - -I../../../distributed_encoder/util/ -g -c \ - ../../../distributed_encoder/test/testMainEncoder.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 \ - -I../../../distributed_encoder/util/ -g -c \ - ../../../distributed_encoder/test/testWorkSession.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 \ - -I../../../distributed_encoder/util/ -g -c \ - ../../../distributed_encoder/test/testDecoder.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 \ - -I../../../distributed_encoder/util/ -g -c \ - ../../../distributed_encoder/test/testSubEncoder.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 -I/usr/local/include/svt-hevc && \ - g++ -I../../../google_test -std=c++11 \ - -I../../../distributed_encoder/util/ -g -c \ - ../../../distributed_encoder/test/testSubEncoderManager.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 \ - -I../../../distributed_encoder/util/ -g -c \ - ../../../distributed_encoder/test/testEncoder.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -L/usr/local/lib testMainEncoder.o \ - ../googletest/googletest/build/libgtest.a -o testMainEncoder \ - -I/usr/local/include/thrift -I/usr/local/include/svt-hevc \ - -lDistributedEncoder -lEncoder -lstdc++ -lpthread -lthrift \ - -lSvtHevcEnc -lopenhevc -lthriftnb -levent -lglog -pthread \ - -lavdevice -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -lavfilter \ - -lswscale -lavformat -lavcodec -llzma -lz -lswresample -lavutil \ - -lva-drm -lva-x11 -lm -lva -lXv -lX11 -lXext -l360SCVP \ - -L/usr/local/lib && \ - g++ -L/usr/local/lib testWorkSession.o \ - ../googletest/googletest/build/libgtest.a -o testWorkSession \ - -I/usr/local/include/thrift -I/usr/local/include/svt-hevc \ - -lDistributedEncoder -lEncoder -lstdc++ -lpthread -lthrift \ - -lSvtHevcEnc -lopenhevc -lthriftnb -levent -lglog -pthread \ - -lavdevice -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -lavfilter \ - -lswscale -lavformat -lavcodec -llzma -lz -lswresample -lavutil \ - -lva-drm -lva-x11 -lm -lva -lXv -lX11 -lXext -l360SCVP \ - -L/usr/local/lib && \ - g++ -L/usr/local/lib testDecoder.o \ - ../googletest/googletest/build/libgtest.a -o testDecoder \ - -I/usr/local/include/thrift -I/usr/local/include/svt-hevc \ - -lDistributedEncoder -lEncoder -lstdc++ -lpthread -lthrift \ - -lSvtHevcEnc -lopenhevc -lthriftnb -levent -lglog -pthread \ - -lavdevice -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -lavfilter \ - -lswscale -lavformat -lavcodec -llzma -lz -lswresample -lavutil \ - -lva-drm -lva-x11 -lm -lva -lXv -lX11 -lXext -l360SCVP \ - -L/usr/local/lib && \ - g++ -L/usr/local/lib testSubEncoder.o \ - ../googletest/googletest/build/libgtest.a -o testSubEncoder \ - -I/usr/local/include/thrift -I/usr/local/include/svt-hevc \ - -lDistributedEncoder -lEncoder -lstdc++ -lpthread -lthrift \ - -lSvtHevcEnc -lopenhevc -lthriftnb -levent -lglog -pthread \ - -lavdevice -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -lavfilter \ - -lswscale -lavformat -lavcodec -llzma -lz -lswresample -lavutil \ - -lva-drm -lva-x11 -lm -lva -lXv -lX11 -lXext -l360SCVP \ - -L/usr/local/lib && \ - g++ -L/usr/local/lib testEncoder.o \ - ../googletest/googletest/build/libgtest.a -o testEncoder \ - -I/usr/local/include/thrift -I/usr/local/include/svt-hevc \ - -lDistributedEncoder -lEncoder -lstdc++ -lpthread -lthrift \ - -lSvtHevcEnc -lopenhevc -lthriftnb -levent -lglog -pthread \ - -lavdevice -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -lavfilter \ - -lswscale -lavformat -lavcodec -llzma -lz -lswresample -lavutil \ - -lva-drm -lva-x11 -lm -lva -lXv -lX11 -lXext -l360SCVP \ - -L/usr/local/lib && \ - g++ -L/usr/local/lib testSubEncoderManager.o \ - ../googletest/googletest/build/libgtest.a -o testSubEncoderManager \ - -I/usr/local/include/thrift -I/usr/local/include/svt-hevc \ - -lDistributedEncoder -lEncoder -lstdc++ -lpthread -lthrift \ - -lSvtHevcEnc -lopenhevc -lthriftnb -levent -lglog -pthread \ - -lavdevice -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -lavfilter \ - -lswscale -lavformat -lavcodec -llzma -lz -lswresample -lavutil \ - -lva-drm -lva-x11 -lm -lva -lXv -lX11 -lXext -l360SCVP \ - -L/usr/local/lib - - @ # Compile OmafDashAccess test - @ cd build/test/OmafDashAccess && \ - g++ -I../../../google_test -std=c++11 -I../util/ -g -c \ - ../../../OmafDashAccess/test/testMediaSource.cpp \ - -I../../../utils -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 -I../util/ -g -c \ - ../../../OmafDashAccess/test/testMPDParser.cpp \ - -I../../../utils -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 -I../util/ -g -c \ - ../../../OmafDashAccess/test/testOmafReader.cpp \ - -I../../../utils -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 -I../util/ -g -c \ - ../../../OmafDashAccess/test/testOmafReaderManager.cpp \ - -I../../../utils -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -L/usr/local/lib testMediaSource.o \ - ../googletest/googletest/build/libgtest.a -o \ - testMediaSource -I/usr/local/include/ -lOmafDashAccess \ - -lstdc++ -lpthread -lglog -l360SCVP -lm -L/usr/local/lib && \ - g++ -L/usr/local/lib testMPDParser.o \ - ../googletest/googletest/build/libgtest.a -o \ - testMPDParser -I/usr/local/include/ -lOmafDashAccess \ - -lstdc++ -lpthread -lglog -l360SCVP -lm -L/usr/local/lib && \ - g++ -L/usr/local/lib testOmafReader.o \ - ../googletest/googletest/build/libgtest.a -o \ - testOmafReader -I/usr/local/include/ -lOmafDashAccess \ - -lstdc++ -lpthread -lglog -l360SCVP -lm -L/usr/local/lib && \ - g++ -L/usr/local/lib testOmafReaderManager.o \ - ../googletest/googletest/build/libgtest.a -o \ - testOmafReaderManager -I/usr/local/include/ -lOmafDashAccess\ - -lstdc++ -lpthread -lglog -l360SCVP -lm -L/usr/local/lib - - @ # Compile VROmafPacking test - @ cd build/test/VROmafPacking && \ - g++ -I../../../google_test -std=c++11 -g -c \ - ../../../VROmafPacking/test/testHevcNaluParser.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 -g -c \ - ../../../VROmafPacking/test/testVideoStream.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 -g -c \ - ../../../VROmafPacking/test/testExtractorTrack.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -I../../../google_test -std=c++11 -g -c \ - ../../../VROmafPacking/test/testDefaultSegmentation.cpp \ - -D_GLIBCXX_USE_CXX11_ABI=0 && \ - g++ -L/usr/local/lib testHevcNaluParser.o \ - ../googletest/googletest/build/libgtest.a -o \ - testHevcNaluParser -I/usr/local/lib -lVROmafPacking \ - -l360SCVP -lstdc++ -lpthread -lm -L/usr/local/lib && \ - g++ -L/usr/local/lib testVideoStream.o \ - ../googletest/googletest/build/libgtest.a -o \ - testVideoStream -I/usr/local/lib -lVROmafPacking \ - -l360SCVP -lstdc++ -lpthread -lm -L/usr/local/lib && \ - g++ -L/usr/local/lib testExtractorTrack.o \ - ../googletest/googletest/build/libgtest.a -o \ - testExtractorTrack -I/usr/local/lib -lVROmafPacking \ - -l360SCVP -lstdc++ -lpthread -lm -L/usr/local/lib && \ - g++ -L/usr/local/lib testDefaultSegmentation.o \ - ../googletest/googletest/build/libgtest.a -o \ - testDefaultSegmentation -I/usr/local/lib -lVROmafPacking \ - -l360SCVP -lstdc++ -lpthread -lm -L/usr/local/lib - -clean : - @ rm -rf ./build - - -.PHONY : server client test clean diff --git a/src/OmafDashAccess/CMakeLists.txt b/src/OmafDashAccess/CMakeLists.txt index 942cd1a0..303d48bb 100644 --- a/src/OmafDashAccess/CMakeLists.txt +++ b/src/OmafDashAccess/CMakeLists.txt @@ -1,43 +1,85 @@ -cmake_minimum_required(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) -project(OmafDashAccess) +OPTION(USE_ANDROID_NDK + "Use android ndk" + OFF +) + +PROJECT(OmafDashAccess) AUX_SOURCE_DIRECTORY(. DIR_SRC) -AUX_SOURCE_DIRECTORY(../utils UTIL_SRC) AUX_SOURCE_DIRECTORY(OmafDashDownload/. DOWN_SRC) AUX_SOURCE_DIRECTORY(OmafDashParser/. PARSE_SRC) +AUX_SOURCE_DIRECTORY(OmafViewportPredict/. PREDICT_SRC) + +IF(NOT USE_ANDROID_NDK) + ADD_DEFINITIONS("-g -c -fPIC -lstdc++fs -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -z noexecstack -z relro -z now -fstack-protector-strong + -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wl,-S -Wall -Werror") +ELSE() + ADD_DEFINITIONS("-g -c -fPIC -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -fPIE -fPIC -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wall") +ENDIF() -ADD_DEFINITIONS("-g -c -fPIC -lstdc++fs -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -Werror") +IF(USE_ANDROID_NDK) + ADD_DEFINITIONS("-D_ANDROID_NDK_OPTION_") +ENDIF() -INCLUDE_DIRECTORIES(/usr/local/include ../utils mp4lib/api/reader) -LINK_DIRECTORIES(/usr/local/lib) +IF(USE_TRACE) + ADD_DEFINITIONS("-D_USE_TRACE_") +ENDIF() + +IF(NOT USE_ANDROID_NDK) + IF(USE_TRACE) + INCLUDE_DIRECTORIES(/usr/local/include ../utils ../isolib ../trace) + LINK_DIRECTORIES(/usr/local/lib ../isolib/dash_parser ../trace) + ELSE() + INCLUDE_DIRECTORIES(/usr/local/include ../utils ../isolib) + LINK_DIRECTORIES(/usr/local/lib ../isolib/dash_parser) + ENDIF() +ELSE() + INCLUDE_DIRECTORIES(/usr/local/include ../utils ../isolib) + LINK_DIRECTORIES(/usr/local/lib ../isolib/dash_parser) +ENDIF() set(DIR_SRC ${DIR_SRC} - ${UTIL_SRC} ${DOWN_SRC} ${PARSE_SRC} + ${PREDICT_SRC} ) -ADD_LIBRARY(OmafDashAccess SHARED ${DIR_SRC}) +ADD_LIBRARY(OmafDashAccess SHARED ${DIR_SRC} ../utils/Log.cpp ../utils/tinyxml2.cpp) -TARGET_LINK_LIBRARIES(OmafDashAccess streamsegmenter_static_fpic) -TARGET_LINK_LIBRARIES(OmafDashAccess streamsegmenter_static) -TARGET_LINK_LIBRARIES(OmafDashAccess mp4vr_static_fpic) -TARGET_LINK_LIBRARIES(OmafDashAccess mp4vr_static) TARGET_LINK_LIBRARIES(OmafDashAccess glog) TARGET_LINK_LIBRARIES(OmafDashAccess curl) +TARGET_LINK_LIBRARIES(OmafDashAccess dashparser) TARGET_LINK_LIBRARIES(OmafDashAccess 360SCVP) +TARGET_LINK_LIBRARIES(OmafDashAccess safestring_shared) +IF(NOT USE_ANDROID_NDK) + IF(USE_TRACE) + TARGET_LINK_LIBRARIES(OmafDashAccess trace) + TARGET_LINK_LIBRARIES(OmafDashAccess lttng-ust) + ENDIF() + TARGET_LINK_LIBRARIES(OmafDashAccess dl) +ENDIF() +IF(USE_ANDROID_NDK) + TARGET_LINK_LIBRARIES(OmafDashAccess log) +ENDIF() -install(TARGETS OmafDashAccess +INSTALL(TARGETS OmafDashAccess RUNTIME DESTINATION bin LIBRARY DESTINATION lib ARCHIVE DESTINATION lib/static) -install(FILES ${PROJECT_SOURCE_DIR}/../utils/error.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/../utils/data_type.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/../utils/ns_def.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/../utils/OmafStructure.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/OmafDashAccessApi.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/OmafDashAccess.pc DESTINATION lib/pkgconfig) - +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/error.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/pose.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/data_type.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/ns_def.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/OmafStructure.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/common_data.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/Log.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/OmafDashAccessApi.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/OmafDashAccess.pc DESTINATION lib/pkgconfig) diff --git a/src/OmafDashAccess/DownloadManager.cpp b/src/OmafDashAccess/DownloadManager.cpp index 8fd391ab..744703eb 100644 --- a/src/OmafDashAccess/DownloadManager.cpp +++ b/src/OmafDashAccess/DownloadManager.cpp @@ -47,263 +47,210 @@ VCD_OMAF_BEGIN -typedef struct GatherStruct{ - int size; - std::string prefix; -}GatherStruct; - -DownloadManager::DownloadManager() -{ - mDownloadedBytes = 0; - mDownloadedFiles = 0; - mCacheDir = ""; - mMaxCacheSize = 200000000; - pthread_mutex_init(&mMutex, NULL); - mStartTime = 0; - mFilePrefix = ""; - // start count from 1 because 0 and 1 would generate - // same random file name, so ignore 0 - m_count = 1; - mUseCache = false; +typedef struct GatherStruct { + int size; + std::string prefix; +} GatherStruct; + +DownloadManager::DownloadManager() { + mDownloadedBytes = 0; + mDownloadedFiles = 0; + mCacheDir = ""; + mMaxCacheSize = 200000000; + mStartTime = 0; + mFilePrefix = ""; + // start count from 1 because 0 and 1 would generate + // same random file name, so ignore 0 + m_count = 1; + mUseCache = false; } -DownloadManager::~DownloadManager() -{ - pthread_mutex_destroy( &mMutex ); -} - -int DownloadManager::DeleteCacheFile(std::string url) -{ - if( remove(url.c_str())) - { +DownloadManager::~DownloadManager() { } - LOG(WARNING) << "Failed to delete file in cache ! Be cautious cache may exceed the storage limitation!"< lock(mMutex); + std::string file_name = GetRandomString(32); + return file_name; } /// get download bit rate -int DownloadManager::GetImmediateBitrate() -{ - return 0; -} +int DownloadManager::GetImmediateBitrate() { return 0; } -int DownloadManager::GetAverageBitrate() -{ - return 0; -} +int DownloadManager::GetAverageBitrate() { return 0; } -void DownloadManager::CleanCache() -{ - delete_all_cached_files( mCacheDir.c_str() ); -} +void DownloadManager::CleanCache() { delete_all_cached_files(mCacheDir.c_str()); } + +void DownloadManager::DeleteCacheByTime(uint64_t interval) { + if (mCacheMtx.try_lock()) { + uint64_t out_size = CacheGetSize(mCacheDir.c_str()); + if (out_size < mMaxCacheSize) { + mCacheMtx.unlock(); + return; + } -void DownloadManager::DeleteCacheByTime( uint64_t interval ) -{ - if(mCacheMtx.try_lock()) - { - uint64_t out_size = CacheGetSize( mCacheDir.c_str() ); - if (out_size < mMaxCacheSize) - { - mCacheMtx.unlock(); - return; - } - - auto cachePath = GetCacheFolder(); - DIR *dir = opendir(cachePath.c_str()); - if(!dir) - { - LOG(WARNING) << "Failed to open cache folder! Be cautious cache may exceed the storage limitation!" <d_name, &st)) - { - LOG(WARNING) << "Failed to get cache file time info! Be cautious cache may exceed the storage limitation!" <d_name)) - LOG(WARNING) << "Failed to delete file in cache ! Be cautious cache may exceed the storage limitation!"<d_name, &st)) { + OMAF_LOG(LOG_WARNING, "Failed to get cache file time info! Be cautious cache may exceed the storage limitation!\n"); mCacheMtx.unlock(); + return; + } + if ((uint64_t)(st.st_mtim.tv_nsec) < GetStartTime() + interval) { + if (remove(ent->d_name)) + OMAF_LOG(LOG_WARNING, "Failed to delete file in cache ! Be cautious cache may exceed the storage limitation!\n"); + } + + ent = readdir(dir); } - return; + closedir(dir); + + mCacheMtx.unlock(); + } + return; } -void DownloadManager::DeleteCacheBySize( ) -{ - uint64_t out_size = CacheGetSize( mCacheDir.c_str() ); - if (out_size >= mMaxCacheSize) { - delete_all_cached_files( mCacheDir.c_str() ); - } +void DownloadManager::DeleteCacheBySize() { + uint64_t out_size = CacheGetSize(mCacheDir.c_str()); + if (out_size >= mMaxCacheSize) { + delete_all_cached_files(mCacheDir.c_str()); + } } -static bool delete_cache_files( void *cbck, - std::string item_name, - std::string itemPath ) -{ - std::string* startPattern; - uint32_t sz; - if(NULL == cbck ) return false; - if( 0 == item_name.size() ) return false; - if( 0 == itemPath.size() ) return false; - - startPattern = (std::string *) cbck; - sz = (uint32_t) strlen( startPattern->c_str() ); - if(!strncmp(startPattern->c_str(), item_name.c_str(), sz)) { - if( remove(itemPath.c_str()) == 0) - return true; - } +static bool delete_cache_files(void *cbck, std::string item_name, std::string itemPath) { + std::string *startPattern; + uint32_t sz; + if (NULL == cbck) return false; + if (0 == item_name.size()) return false; + if (0 == itemPath.size()) return false; + + startPattern = (std::string *)cbck; + sz = (uint32_t)strlen(startPattern->c_str()); + if (!strncmp(startPattern->c_str(), item_name.c_str(), sz)) { + if (remove(itemPath.c_str()) == 0) return true; + } - return false; + return false; } -void DownloadManager::delete_all_cached_files( std::string directory ) -{ - enum_directory( directory.c_str(), false, delete_cache_files, (void*)&mFilePrefix , NULL); +void DownloadManager::delete_all_cached_files(std::string directory) { + enum_directory(directory.c_str(), false, delete_cache_files, (void *)&mFilePrefix, NULL); } -static bool GatherCacheSize( void *cbck, - std::string item_name, - std::string itemPath ) -{ - GatherStruct *out = (GatherStruct *)cbck; - if (!strncmp(out->prefix.c_str(), item_name.c_str(), out->prefix.size())) { - struct stat statbuf; - stat( itemPath.c_str(), &statbuf ); - out->size += statbuf.st_size; - return true; - } - return false; +static bool GatherCacheSize(void *cbck, std::string item_name, std::string itemPath) { + GatherStruct *out = (GatherStruct *)cbck; + if (!strncmp(out->prefix.c_str(), item_name.c_str(), out->prefix.size())) { + struct stat statbuf; + stat(itemPath.c_str(), &statbuf); + out->size += statbuf.st_size; + return true; + } + return false; } -uint64_t DownloadManager::CacheGetSize( std::string directory ) -{ - GatherStruct gat; - gat.prefix = mFilePrefix; - gat.size = 0; +uint64_t DownloadManager::CacheGetSize(std::string directory) { + GatherStruct gat; + gat.prefix = mFilePrefix; + gat.size = 0; - enum_directory(directory.c_str(), false, GatherCacheSize, (void*)&gat, NULL); - return gat.size; + enum_directory(directory.c_str(), false, GatherCacheSize, (void *)&gat, NULL); + return gat.size; } -int DownloadManager::enum_directory( const char *dir, - bool enum_directory, - enum_dir_item enumDirFct, - void *cbck, - const char *filter ) -{ - unsigned char itemPath[MAX_PATH_COUNT]; - unsigned char path[MAX_PATH_COUNT], *file; - DIR *currentDir; - struct dirent* currentFile; - struct stat st; - - if (!dir || !enumDirFct) - return ERROR_INVALID; - - strncpy((char*)path, dir, strlen(dir) + 1); - if (path[strlen((const char*)path)-1] != '\\') - strncat((char*)path, "\\", strlen("\\")); - - currentDir = opendir((char*)path); - if (currentDir == NULL) - return ERROR_INVALID; +int DownloadManager::enum_directory(const char *dir, bool enum_directory, enum_dir_item enumDirFct, void *cbck, + const char *filter) { + unsigned char itemPath[MAX_PATH_COUNT]; + unsigned char path[MAX_PATH_COUNT], *file; + DIR *currentDir; + struct dirent *currentFile; + struct stat st; - currentFile = readdir(currentDir); - while (currentFile) - { - if (!strcmp(currentFile->d_name, "..")) - goto end; - if (currentFile->d_name[0] == '.') - goto end; - - if (filter) - { - char *separate = strrchr(currentFile->d_name, '.'); - if (!separate) goto end; - if (!strstr(filter, separate+1)) goto end; - } - - strncpy((char*)itemPath, (const char*)path, strlen((const char*)path) + 1); - if(strlen(currentFile->d_name) < 1024 - strlen((const char*)itemPath)) strncat((char*)itemPath, currentFile->d_name, sizeof(currentFile->d_name)); - if (stat( (const char*)itemPath, &st ) != 0) - goto end; - if (enum_directory && ( (st.st_mode & S_IFMT) != S_IFDIR)) - { - goto end; - } - - if (!enum_directory && ((st.st_mode & S_IFMT) == S_IFDIR)) - goto end; - file = (unsigned char*)currentFile->d_name; - - if (enumDirFct(cbck, (char *)file, (char *)itemPath)) - { - break; - } -end: - currentFile = readdir(currentDir); + if (!dir || !enumDirFct) return ERROR_INVALID; + + strncpy((char *)path, dir, strlen(dir) + 1); + if (path[strlen((const char *)path) - 1] != '\\') strncat((char *)path, "\\", strlen("\\")); + + currentDir = opendir((char *)path); + if (currentDir == NULL) return ERROR_INVALID; + + currentFile = readdir(currentDir); + while (currentFile) { + if (!strcmp(currentFile->d_name, "..")) goto end; + if (currentFile->d_name[0] == '.') goto end; + + if (filter) { + char *separate = strrchr(currentFile->d_name, '.'); + if (!separate) goto end; + if (!strstr(filter, separate + 1)) goto end; } - return ERROR_NONE; + strncpy((char *)itemPath, (const char *)path, strlen((const char *)path) + 1); + if (strlen(currentFile->d_name) < 1024 - strlen((const char *)itemPath)) + strncat((char *)itemPath, currentFile->d_name, sizeof(currentFile->d_name)); + if (stat((const char *)itemPath, &st) != 0) goto end; + if (enum_directory && ((st.st_mode & S_IFMT) != S_IFDIR)) { + goto end; + } + + if (!enum_directory && ((st.st_mode & S_IFMT) == S_IFDIR)) goto end; + file = (unsigned char *)currentFile->d_name; + + if (enumDirFct(cbck, (char *)file, (char *)itemPath)) { + break; + } + end: + currentFile = readdir(currentDir); + } + + return ERROR_NONE; } -std::string DownloadManager::GetRandomString(int size) -{ - std::string str = ""; - const char CCH[] = "_0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"; +std::string DownloadManager::GetRandomString(int size) { + std::string str = ""; + const char CCH[] = "_0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"; - // generate seed for random number with count - srand(m_count % INT32_MAX); + // generate seed for random number with count + srand(m_count % INT32_MAX); - for (int i = 0; i < size; ++i){ - int x = rand() / (RAND_MAX / (sizeof(CCH) - 1)); - str += CCH[x]; - } - str = mFilePrefix + "_" + str; + for (int i = 0; i < size; ++i) { + int x = rand() / (RAND_MAX / (sizeof(CCH) - 1)); + str += CCH[x]; + } + str = mFilePrefix + "_" + str; - m_count++; - if(m_count == 0) m_count++; + m_count++; + if (m_count == 0) m_count++; - return str; + return str; } VCD_OMAF_END diff --git a/src/OmafDashAccess/DownloadManager.h b/src/OmafDashAccess/DownloadManager.h index e987bf04..77723f0a 100644 --- a/src/OmafDashAccess/DownloadManager.h +++ b/src/OmafDashAccess/DownloadManager.h @@ -147,7 +147,7 @@ class DownloadManager { int mDownloadedFiles; // m_nAllocSize, keep the data - //! in old buf to new buf; - //! - //! \return - //! size of new allocated packet - //! - int ReAllocatePacket(int size){ - if(NULL==m_pPayload) - return AllocatePacket(size); - - if( size < m_nAllocSize ) - return AllocatePacket(size); - - char* buf = m_pPayload; - - m_pPayload = (char*)malloc( size ); - - if(NULL == m_pPayload) return -1; - - memcpy(m_pPayload, buf, m_nAllocSize); - - free(buf); - - m_nAllocSize = size; - m_nRealSize = 0; - return 0; - }; - - //! - //! \brief Set the type for packet - //! - //! \return - //! - //! - void SetType(int type){m_type = type;}; - - //! - //! \brief Get the type of the payload - //! - //! \return - //! the type of the Packet payload - //! - int GetType(){return m_type;}; - - uint64_t GetPTS() { return mPts; }; - void SetPTS(uint64_t pts){ mPts = pts; }; - - void SetRealSize(uint64_t realSize) { m_nRealSize = realSize; }; - uint64_t GetRealSize() { return m_nRealSize; }; - - void SetRwpk(RegionWisePacking *rwpk) { m_rwpk = rwpk; }; - RegionWisePacking* GetRwpk() { return m_rwpk; }; +#include "iso_structure.h" + +#include + +extern "C" { +#include "safestringlib/safe_mem_lib.h" +} + +namespace VCD { +namespace OMAF { + +class MediaPacket : public VCD::NonCopyable { + public: + //! + //! \brief construct + //! + MediaPacket() = default; + + //! + //! \brief construct + //! + MediaPacket(char* buf, int size) { + if (buf && size > 0) { + m_nAllocSize = AllocatePacket(size); + memcpy_s(m_pPayload, size, buf, size); + } + }; + + //! + //! \brief de-construct + //! + virtual ~MediaPacket() { + if (nullptr != m_pPayload) { + free(m_pPayload); + m_pPayload = nullptr; + m_nAllocSize = 0; + m_type = -1; + mPts = 0; + m_nRealSize = 0; + m_segID = 0; + } + if (m_rwpk) deleteRwpk(); + }; + + MediaPacket* InsertParams(std::vector params) { + char* new_dest = nullptr; + // FIXME align size? + if (m_nAllocSize >= m_nRealSize + params.size()) { + new_dest = m_pPayload; + } else { + void* tmp = malloc(m_nRealSize + params.size()); + new_dest = reinterpret_cast(tmp); + m_nAllocSize = m_nRealSize + params.size(); + } + + // 1. move origin payload + memmove(new_dest + params.size(), m_pPayload, m_nRealSize); + memcpy_s(new_dest, m_nAllocSize - m_nRealSize, params.data(), params.size()); + m_nRealSize += params.size(); + + // this is a new buffer + if (new_dest != m_pPayload) { + free(m_pPayload); + m_pPayload = new_dest; + } + return this; + } + + MediaPacket* InsertADTSHdr() { + char* new_dest = nullptr; + // FIXME align size? + if (m_nAllocSize >= m_nRealSize + m_audioADTSHdr.size()) { + new_dest = m_pPayload; + } else { + void* tmp = malloc(m_nRealSize + m_audioADTSHdr.size()); + new_dest = reinterpret_cast(tmp); + m_nAllocSize = m_nRealSize + m_audioADTSHdr.size(); + } + + // 1. move origin payload + memmove(new_dest + m_audioADTSHdr.size(), m_pPayload, m_nRealSize); + memcpy_s(new_dest, m_nAllocSize - m_nRealSize, m_audioADTSHdr.data(), m_audioADTSHdr.size()); + m_nRealSize += m_audioADTSHdr.size(); + + // this is a new buffer + if (new_dest != m_pPayload) { + free(m_pPayload); + m_pPayload = new_dest; + } + return this; + } + + //! + //! \brief Allocate the packet buffer, and fill the buffer with fill + //! + //! \param [in] size + //! the buffer size to be allocated + //! \param [in] fill + //! the init value for the buffer + //! + //! \return + //! size of new allocated packet + //! + int AllocatePacket(int size, char fill = 0) { + if (nullptr != m_pPayload) { + free(m_pPayload); + m_pPayload = nullptr; + m_nAllocSize = 0; + } + + m_pPayload = (char*)malloc(size); + + if (nullptr == m_pPayload) return -1; + + m_nAllocSize = size; + memset(m_pPayload, fill, m_nAllocSize); + m_nRealSize = 0; + return size; + }; + + //! + //! \brief get the buffer pointer of the packet + //! + //! \return + //! the buffer pointer + //! + char* Payload() { return m_pPayload; }; + char* MovePayload() { + char* tmp = m_pPayload; + m_pPayload = nullptr; + return tmp; + } + //! + //! \brief get the size of the buffer + //! + //! \return + //! size of the packet's payload + //! + uint64_t Size() { return m_nRealSize; }; + + //! + //! \brief reallocate the payload buffer. if size > m_nAllocSize, keep the data + //! in old buf to new buf; + //! + //! \return + //! size of new allocated packet + //! + int ReAllocatePacket(size_t size) { + if (nullptr == m_pPayload) return AllocatePacket(size); + + if (size < m_nAllocSize) return AllocatePacket(size); + + char* buf = m_pPayload; + + m_pPayload = (char*)malloc(size); + + if (nullptr == m_pPayload) return -1; + + memcpy_s(m_pPayload, m_nAllocSize, buf, m_nAllocSize); + + free(buf); + + m_nAllocSize = size; + m_nRealSize = 0; + return 0; + }; + + //! + //! \brief Set the type for packet + //! + //! \return + //! + //! + void SetType(int type) { m_type = type; }; + + //! + //! \brief Get the type of the payload + //! + //! \return + //! the type of the Packet payload + //! + int GetType() { return m_type; }; + + uint64_t GetPTS() { return mPts; }; + void SetPTS(uint64_t pts) { mPts = pts; }; + + void SetRealSize(uint64_t realSize) { m_nRealSize = realSize; }; + uint64_t GetRealSize() { return m_nRealSize; }; + // FIXME, refine and optimize + void SetRwpk(std::unique_ptr rwpk) { m_rwpk = std::move(rwpk); }; + // RegionWisePacking* GetRwpk() { return m_rwpk.get(); }; + const RegionWisePacking& GetRwpk() const { return *m_rwpk.get(); }; + void copyRwpk(RegionWisePacking* to) { + if (to && m_rwpk.get()) { + if (to->rectRegionPacking) { + delete[] to->rectRegionPacking; + } + *to = *m_rwpk.get(); + to->rectRegionPacking = new RectangularRegionWisePacking[m_rwpk->numRegions]; + memcpy_s(to->rectRegionPacking, m_rwpk->numRegions * sizeof(RectangularRegionWisePacking), + m_rwpk->rectRegionPacking, m_rwpk->numRegions * sizeof(RectangularRegionWisePacking)); + } + } + void moveRwpk(RegionWisePacking* to) { + if (to && m_rwpk.get()) { + if (to->rectRegionPacking) { + delete[] to->rectRegionPacking; + } + *to = *m_rwpk.get(); + m_rwpk->rectRegionPacking = nullptr; + } + } + int GetSegID() { return m_segID; }; + void SetSegID(int id) { m_segID = id; }; + + void SetQualityRanking(QualityRank qualityRanking) { m_qualityRanking = qualityRanking; }; + QualityRank GetQualityRanking() { return m_qualityRanking; }; + + void SetSRDInfo(const SRDInfo& srdInfo) { +#if 0 + m_srd.left = srdInfo.left; + m_srd.top = srdInfo.top; + m_srd.width = srdInfo.width; + m_srd.height = srdInfo.height; +#endif + m_srd = srdInfo; + }; + + SRDInfo GetSRDInfo() { return m_srd; }; + + void SetVideoID(uint32_t videoId) { m_videoID = videoId; }; + + uint32_t GetVideoID() { return m_videoID; }; + + void SetCodecType(Codec_Type codecType) { m_codecType = codecType; }; + + Codec_Type GetCodecType() { return m_codecType; }; + + void SetVideoWidth(int32_t videoWidth) { m_videoWidth = videoWidth; }; + + int32_t GetVideoWidth() { return m_videoWidth; }; + + void SetVideoHeight(int32_t videoHeight) { m_videoHeight = videoHeight; }; + + int32_t GetVideoHeight() { return m_videoHeight; }; + + int32_t SetQualityNum(int32_t numQty) { + if (numQty <= 0) { + return OMAF_ERROR_INVALID_DATA; + } + + m_qtyResolution.resize(numQty); + + return ERROR_NONE; + } + + int32_t GetQualityNum() { return m_qtyResolution.size(); }; + + int32_t SetSourceResolution(int32_t srcId, SourceResolution resolution) { + if (srcId >= 0 && static_cast(srcId) < m_qtyResolution.size()) { + m_qtyResolution[srcId] = resolution; + } else { + OMAF_LOG(LOG_ERROR, "Invalid source index %d !\n", srcId); + return OMAF_ERROR_INVALID_DATA; + } + + return ERROR_NONE; + }; + + SourceResolution* GetSourceResolutions() { return m_qtyResolution.data(); }; + + void SetVideoTileRowNum(uint32_t rowNum) { m_videoTileRows = rowNum; }; + + uint32_t GetVideoTileRowNum() { return m_videoTileRows; }; + + void SetVideoTileColNum(uint32_t colNum) { m_videoTileCols = colNum; }; + + uint32_t GetVideoTileColNum() { return m_videoTileCols; }; + + void SetEOS(bool isEOS) { m_bEOS = isEOS; }; + + bool GetEOS() { return m_bEOS; }; + + void SetHasVideoHeader(bool hasHeader) { m_hasVideoHeader = hasHeader; }; + + void SetVideoHeaderSize(uint32_t hrdSize) { + m_hrdSize = hrdSize; + m_hasVideoHeader = hrdSize > 0 ? true : false; + }; + + void SetVPSLen(uint32_t vpsLen) { m_VPSLen = vpsLen; }; + + void SetSPSLen(uint32_t spsLen) { m_SPSLen = spsLen; }; + + void SetPPSLen(uint32_t ppsLen) { m_PPSLen = ppsLen; }; + + bool GetHasVideoHeader() { return m_hasVideoHeader; }; + + uint32_t GetVideoHeaderSize() { return m_hrdSize; }; + + uint32_t GetVPSLen() { return m_VPSLen; }; + + uint32_t GetSPSLen() { return m_SPSLen; }; + + uint32_t GetPPSLen() { return m_PPSLen; }; + + void SetSegmentEnded(bool isEnded) { m_segmentEnded = isEnded; }; + + bool GetSegmentEnded() { return m_segmentEnded; }; + + void SetMediaType(MediaType mediaType) { m_mediaType = mediaType; }; + + MediaType GetMediaType() { return m_mediaType; }; + + void SetADTSHdr(std::vector audioParams) + { + m_audioADTSHdr = audioParams; + printf("m_audioADTSHdr size %ld\n", m_audioADTSHdr.size()); + }; private: - char* m_pPayload; //!rectRegionPacking != NULL) - { - delete []m_rwpk->rectRegionPacking; - m_rwpk->rectRegionPacking = NULL; - } - delete m_rwpk; - m_rwpk = NULL; - } + MediaPacket& operator=(const MediaPacket& other) { return *this; }; + MediaPacket(const MediaPacket& other) { /* do not create copies */ }; + + private: + char* m_pPayload = nullptr; //!< the payload buffer of the packet + size_t m_nAllocSize = 0; //!< the allocated size of packet + size_t m_nRealSize = 0; //!< real size of packet + int m_type = -1; //!< the type of the payload + uint64_t mPts = 0; + int m_segID = 0; + // RegionWisePacking* m_rwpk; + std::unique_ptr m_rwpk; + QualityRank m_qualityRanking = HIGHEST_QUALITY_RANKING; + SRDInfo m_srd; + + uint32_t m_videoID = 0; + Codec_Type m_codecType = VideoCodec_HEVC; + int32_t m_videoWidth = 0; + int32_t m_videoHeight = 0; + // int32_t m_numQuality = 0; + // SourceResolution* m_qtyResolution = nullptr_t; + std::vector m_qtyResolution; + uint32_t m_videoTileRows = 0; + uint32_t m_videoTileCols = 0; + bool m_bEOS = false; + + bool m_hasVideoHeader = false; //!< whether the media packet includes VPS/SPS/PPS + uint32_t m_hrdSize = 0; + uint32_t m_VPSLen = 0; + uint32_t m_SPSLen = 0; + uint32_t m_PPSLen = 0; + bool m_segmentEnded = false; + MediaType m_mediaType = MediaType_Video; + std::vector m_audioADTSHdr; + + void deleteRwpk() { + if (m_rwpk) { + if (m_rwpk->rectRegionPacking != nullptr) { + delete[] m_rwpk->rectRegionPacking; + m_rwpk->rectRegionPacking = nullptr; + } + m_rwpk.reset(); } + } }; - -VCD_OMAF_END; +} // namespace OMAF +} // namespace VCD #endif /* MEDIAPACKET_H */ - diff --git a/src/OmafDashAccess/OmafAdaptationSet.cpp b/src/OmafDashAccess/OmafAdaptationSet.cpp index 5e40369e..a246a053 100644 --- a/src/OmafDashAccess/OmafAdaptationSet.cpp +++ b/src/OmafDashAccess/OmafAdaptationSet.cpp @@ -34,446 +34,544 @@ */ #include "OmafAdaptationSet.h" +#include "OmafReaderManager.h" + +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include "../trace/E2E_latency_tp.h" +#endif +#endif #include -#include +//#include VCD_OMAF_BEGIN using namespace VCD::OMAF; -OmafAdaptationSet::OmafAdaptationSet() -{ - mAdaptationSet = NULL; - mRepresentation = NULL; - mInitSegment = NULL; - mSRD = NULL; - mPreselID = NULL; - mTwoDQuality = NULL; - mSrqr = NULL; - mCC = NULL; - mEnable = true; - m_bMain = false; - mActiveSegNum = 1; - mSegNum = 1; - mReEnable = false; - mPF = PF_UNKNOWN; - mSegmentDuration = 0; - mTrackNumber = 0; - mStartNumber = 0; - mID = 0; - mType = MediaType_NONE; - mFpt = FP_UNKNOWN; - mRwpkType = RWPK_UNKNOWN; - memset(&mVideoInfo, 0, sizeof(VideoInfo)); - memset(&mAudioInfo, 0, sizeof(AudioInfo)); - pthread_mutex_init(&mMutex, NULL); +OmafAdaptationSet::OmafAdaptationSet() { + mAdaptationSet = nullptr; + mRepresentation = nullptr; + mInitSegment = nullptr; + mSRD = nullptr; + mPreselID = nullptr; + mTwoDQuality = nullptr; + mSrqr = nullptr; + mCC = nullptr; + mEnable = true; + m_bMain = false; + mActiveSegNum = 1; + mSegNum = 1; + mReEnable = false; + mPF = PF_UNKNOWN; + mSegmentDuration = 0; + mTrackNumber = 0; + mStartNumber = 1; + mID = 0; + mType = MediaType_NONE; + mFpt = FP_UNKNOWN; + mRwpkType = RWPK_UNKNOWN; + mTileInfo = NULL; + mIsExtractorTrack = false; + memset(&mVideoInfo, 0, sizeof(VideoInfo)); + memset(&mAudioInfo, 0, sizeof(AudioInfo)); } -OmafAdaptationSet::~OmafAdaptationSet() -{ - this->ClearSegList(); - pthread_mutex_destroy(&mMutex); +OmafAdaptationSet::~OmafAdaptationSet() { + this->ClearSegList(); - SAFE_DELETE(mInitSegment); + // SAFE_DELETE(mInitSegment); - if(mBaseURL.size()) - { - mBaseURL.clear(); - } + if (mBaseURL.size()) { + mBaseURL.clear(); + } } -OmafAdaptationSet::OmafAdaptationSet( AdaptationSetElement* pAdaptationSet ):OmafAdaptationSet() +OmafAdaptationSet::OmafAdaptationSet( AdaptationSetElement* pAdaptationSet, ProjectionFormat pf, bool isExtractorTrack ):OmafAdaptationSet() { + mPF = pf; + mIsExtractorTrack = isExtractorTrack; Initialize(pAdaptationSet); } -int OmafAdaptationSet::Initialize(AdaptationSetElement* pAdaptationSet) -{ - mAdaptationSet = pAdaptationSet; - - SelectRepresentation( ); - - // if( ERROR_NOT_FOUND == OmafProperty::Get2DQualityRanking(mAdaptationSet->GetAdditionalSubNodes(), &mTwoDQuality) ){ - // OmafProperty::Get2DQualityRanking(mRepresentation->GetAdditionalSubNodes(), &mTwoDQuality); - // } - - // OmafProperty::GetFramePackingType(mAdaptationSet->GetAdditionalSubNodes(), this->mFpt); - // if(FP_UNKNOWN==mFpt) - // OmafProperty::GetFramePackingType(mRepresentation->GetAdditionalSubNodes(), this->mFpt); - - mSrqr = mAdaptationSet->GetSphereQuality(); - mSRD = mAdaptationSet->GetSRD(); - mPreselID = mAdaptationSet->GetPreselection(); - mRwpkType = mAdaptationSet->GetRwpkType(); - mPF = mAdaptationSet->GetProjectionFormat(); - mCC = mAdaptationSet->GetContentCoverage(); - mID = stoi(mAdaptationSet->GetId()); - - - for(auto it = mRepresentation->GetDependencyIDs().begin(); it != mRepresentation->GetDependencyIDs().end(); it++ ){ +int OmafAdaptationSet::Initialize(AdaptationSetElement* pAdaptationSet) { + mAdaptationSet = pAdaptationSet; + + SelectRepresentation(); + + mMimeType = mAdaptationSet->GetMimeType(); + std::string type = GetSubstr(mMimeType, '/', true); + + if (type == "video") + { + mSrqr = mAdaptationSet->GetSphereQuality(); + mSRD = mAdaptationSet->GetSRD(); + mPreselID = mAdaptationSet->GetPreselection(); + mRwpkType = mAdaptationSet->GetRwpkType(); + mCC = mAdaptationSet->GetContentCoverage(); + //mID = stoi(mAdaptationSet->GetId()); + + if ((mPF == ProjectionFormat::PF_CUBEMAP) && !IsExtractor()) + { + mTileInfo = new TileDef; + if (!mTileInfo) + return OMAF_ERROR_NULL_PTR; + if (NULL == mSRD) + { + OMAF_LOG(LOG_ERROR, "SRD information is invalid for track %d!\n", mID); + return OMAF_ERROR_NULL_PTR; + } + mTileInfo->x = mSRD->get_X(); + mTileInfo->y = mSRD->get_Y(); + } + + for (auto it = mRepresentation->GetDependencyIDs().begin(); it != mRepresentation->GetDependencyIDs().end(); it++) { std::string id = *it; mDependIDs.push_back(atoi(id.c_str())); - } - - SegmentElement* segment = mRepresentation->GetSegment(); - - if(NULL != segment){ - mStartNumber = segment->GetStartNumber(); - mSegmentDuration = segment->GetDuration() / segment->GetTimescale(); - } - - // mAudioInfo.sample_rate = parse_int( mRepresentation->GetAudioSamplingRate().c_str() ); - // mAudioInfo.channels = mRepresentation->GetAudioChannelConfiguration().size(); - // mAudioInfo.channel_bytes = 2; - - mVideoInfo.bit_rate = mRepresentation->GetBandwidth(); - mVideoInfo.height = mRepresentation->GetHeight(); - mVideoInfo.width = mRepresentation->GetWidth(); - mVideoInfo.frame_Rate.num = atoi(GetSubstr(mRepresentation->GetFrameRate(), '/', true).c_str()); - mVideoInfo.frame_Rate.den = atoi(GetSubstr(mRepresentation->GetFrameRate(), '/', false).c_str()); - mVideoInfo.sar.num = atoi(GetSubstr(mRepresentation->GetSar(), ':', true).c_str()); - mVideoInfo.sar.den = atoi(GetSubstr(mRepresentation->GetSar(), ':', false).c_str()); - - mMimeType = mAdaptationSet->GetMimeType(); - mCodec = mAdaptationSet->GetCodecs(); - - mType = MediaType_Video; - if( GetSubstr(mRepresentation->GetMimeType(), '/', true) == "audio") - mType = MediaType_Audio; - - JudgeMainAdaptationSet(); - - return ERROR_NONE; + } + } + + mID = stoi(mAdaptationSet->GetId()); + OMAF_LOG(LOG_INFO, "ID of AS %d\n", mID); + SegmentElement* segment = mRepresentation->GetSegment(); + + if (nullptr != segment) { + mStartNumber = segment->GetStartNumber(); + mSegmentDuration = segment->GetDuration() / segment->GetTimescale(); + OMAF_LOG(LOG_INFO, "Segment duration %ld\n", mSegmentDuration); + } + + // mAudioInfo.sample_rate = parse_int( + // mRepresentation->GetAudioSamplingRate().c_str() ); mAudioInfo.channels = + // mRepresentation->GetAudioChannelConfiguration().size(); + // mAudioInfo.channel_bytes = 2; + + if (type == "video") + { + mVideoInfo.bit_rate = mRepresentation->GetBandwidth(); + mVideoInfo.height = mRepresentation->GetHeight(); + mVideoInfo.width = mRepresentation->GetWidth(); + mVideoInfo.frame_Rate.num = atoi(GetSubstr(mRepresentation->GetFrameRate(), '/', true).c_str()); + mVideoInfo.frame_Rate.den = atoi(GetSubstr(mRepresentation->GetFrameRate(), '/', false).c_str()); + mVideoInfo.sar.num = atoi(GetSubstr(mRepresentation->GetSar(), ':', true).c_str()); + mVideoInfo.sar.den = atoi(GetSubstr(mRepresentation->GetSar(), ':', false).c_str()); + mType = MediaType_Video; + JudgeMainAdaptationSet(); + } + else if (type == "audio") + { + AudioChannelConfigurationElement* audioElement = mRepresentation->GetAudioChlCfg(); + if (!audioElement) + { + OMAF_LOG(LOG_ERROR, "Failed to get audio channel configuration element from MPD parsing!\n"); + return OMAF_ERROR_INVALID_DATA; + } + + mAudioInfo.channels = audioElement->GetChannelCfg(); + //mAudioInfo.channel_bytes = + mAudioInfo.sample_rate = mRepresentation->GetAudioSamplingRate(); + OMAF_LOG(LOG_INFO, "Audio sample rate %u and channel cfg %u\n", mAudioInfo.sample_rate, mAudioInfo.channels); + + mType = MediaType_Audio; + } + + mCodec = mAdaptationSet->GetCodecs(); + + return ERROR_NONE; } -int OmafAdaptationSet::SelectRepresentation( ) -{ - std::vector pRep = mAdaptationSet->GetRepresentations(); +int OmafAdaptationSet::SelectRepresentation() { + std::vector pRep = mAdaptationSet->GetRepresentations(); - ///FIX; so far choose the first rep in the Representation list - this->mRepresentation = pRep[0]; + /// FIX; so far choose the first rep in the Representation list + this->mRepresentation = pRep[0]; - return ERROR_NONE; + return ERROR_NONE; } -void OmafAdaptationSet::JudgeMainAdaptationSet() -{ - if(NULL == mAdaptationSet || !mSRD) return ; - - if( mType == MediaType_Video ){ - if( this->mSRD->get_H() == 0 && mSRD->get_Y() == 0){ - m_bMain = true; - return ; - } - }else{ - m_bMain = true; - return ; - } - m_bMain = false; -} +void OmafAdaptationSet::JudgeMainAdaptationSet() { + if (nullptr == mAdaptationSet || !mSRD) return; -int OmafAdaptationSet::LoadLocalInitSegment() -{ - int ret = ERROR_NONE; - - for (auto it = mBaseURL.begin(); it != mBaseURL.end(); it++) - { - BaseUrlElement *baseURL = *it; - std::string url = baseURL->GetPath(); + if (mType == MediaType_Video) { + if (this->mSRD->get_H() == 0 && mSRD->get_W() == 0) { + m_bMain = true; + return; } - - SegmentElement* seg = mRepresentation->GetSegment(); - - if( NULL == seg ){ - LOG(ERROR) << "Create Initial SegmentElement for AdaptationSet:" << this->mID - << " failed" << endl; - return ERROR_NULL_PTR; - } - - auto repID = mRepresentation->GetId(); - - mInitSegment = new OmafSegment(seg, mSegNum, true); - - if(NULL == mInitSegment ) { - LOG(ERROR) << "New Initial OmafSegment for AdaptationSet:" << this->mID - << " failed" << endl; - return ERROR_NULL_PTR; - } - - LOG(INFO)<<"Load Initial OmafSegment for AdaptationSet "<mID<GetSegment(); - - if( NULL == seg ){ - LOG(ERROR) << "Create Initial SegmentElement for AdaptationSet:" << this->mID - << " failed" << endl; - return ERROR_NULL_PTR; - } - - auto repID = mRepresentation->GetId(); - - OmafSegment* pSegment = new OmafSegment(seg, mSegNum, false); - - if(NULL == pSegment ) { - LOG(ERROR) << "Create OmafSegment for AdaptationSet: " << this->mID - <<" Number: " << mActiveSegNum - << " failed" << endl; - - return ERROR_NULL_PTR; - } - - pSegment->SetInitSegID(this->mInitSegment->GetInitSegID()); - - LOG(INFO)<<"Load OmafSegment for AdaptationSet "<mID<GetPath(); + } + + SegmentElement* seg = mRepresentation->GetSegment(); + if (nullptr == seg) { + OMAF_LOG(LOG_ERROR, "Create Initial SegmentElement for AdaptationSet: %d failed\n", this->mID); + return ERROR_NULL_PTR; + } + + auto repID = mRepresentation->GetId(); +#if 0 + mInitSegment = new OmafSegment(seg, mSegNum, true); +#else + DashSegmentSourceParams params; + params.dash_url_ = seg->GenerateCompleteURL(mBaseURL, repID, 0); + params.priority_ = TaskPriority::NORMAL; + params.timeline_point_ = static_cast(mSegNum); + mInitSegment = std::make_shared(params, mSegNum, true); +#endif + if (nullptr == mInitSegment) { + OMAF_LOG(LOG_ERROR, "New Initial OmafSegment for AdaptationSet: %d failed\n", this->mID); + return ERROR_NULL_PTR; + } + + OMAF_LOG(LOG_INFO, "Load Initial OmafSegment for AdaptationSet %d\n", this->mID ); + + return ret; +} - mSegments.push_back(pSegment); +int OmafAdaptationSet::LoadLocalSegment() { + int ret = ERROR_NONE; + if (!mEnable) { mActiveSegNum++; - + mSegNum++; return ret; + } + + SegmentElement* seg = mRepresentation->GetSegment(); + + if (nullptr == seg) { + OMAF_LOG(LOG_ERROR, "Create Initial SegmentElement for AdaptationSet: %d failed\n", this->mID); + return ERROR_NULL_PTR; + } + + auto repID = mRepresentation->GetId(); +#if 0 + OmafSegment* pSegment = new OmafSegment(seg, mSegNum, false); +#else + DashSegmentSourceParams params; + params.dash_url_ = seg->GenerateCompleteURL(mBaseURL, repID, 0); + params.priority_ = TaskPriority::NORMAL; + params.timeline_point_ = static_cast(mSegNum); + OmafSegment::Ptr pSegment = std::make_shared(params, mSegNum, false); +#endif + + if (pSegment.get() != nullptr) { + if (this->mInitSegment == nullptr) return ERROR_NULL_PTR; + pSegment->SetInitSegID(this->mInitSegment->GetInitSegID()); + pSegment->SetSegID(mSegNum); + pSegment->SetTrackId(this->mInitSegment->GetTrackId()); + pSegment->SetMediaType(mType); + + if ((mType == MediaType_Video) && (typeid(*this) != typeid(OmafExtractor))) { + auto qualityRanking = GetRepresentationQualityRanking(); + pSegment->SetQualityRanking(qualityRanking); + SRDInfo srdInfo; + srdInfo.left = mSRD->get_X(); + srdInfo.top = mSRD->get_Y(); + srdInfo.width = mSRD->get_W(); + srdInfo.height = mSRD->get_H(); + pSegment->SetSRDInfo(srdInfo); + } + else if (mType == MediaType_Audio) + { + pSegment->SetAudioChlNum(mAudioInfo.channels); + pSegment->SetAudioSampleRate(mAudioInfo.sample_rate); + } + + OMAF_LOG(LOG_INFO, "Load OmafSegment for AdaptationSet %d\n", this->mID ); + + mSegments.push_back(std::move(pSegment)); + + mActiveSegNum++; + mSegNum++; + return ret; + } + else { + OMAF_LOG(LOG_ERROR, "Create OmafSegment for AdaptationSet: %d Number: %d failed\n", this->mID, mActiveSegNum); + + return ERROR_NULL_PTR; + } } -int OmafAdaptationSet::LoadAssignedInitSegment(std::string assignedSegment) -{ - int ret = ERROR_NONE; +int OmafAdaptationSet::LoadAssignedInitSegment(std::string assignedSegment) { + int ret = ERROR_NONE; - ret = LoadLocalInitSegment(); - if (ret) - return ret; + ret = LoadLocalInitSegment(); + if (ret) return ret; - OmafSegment *initSeg = GetInitSegment(); - if (!initSeg) - { - LOG(ERROR) << "Failed to get local init segment" << endl; - return ERROR_NOT_FOUND; - } + OmafSegment::Ptr initSeg = GetInitSegment(); + if (!initSeg) { + OMAF_LOG(LOG_ERROR, "Failed to get local init segment\n"); + return ERROR_NOT_FOUND; + } - initSeg->SetSegmentCacheFile(assignedSegment); - initSeg->SetSegStored(); + initSeg->SetSegmentCacheFile(assignedSegment); + initSeg->SetSegStored(); - return ret; + return ret; } -OmafSegment* OmafAdaptationSet::LoadAssignedSegment(std::string assignedSegment) -{ - int ret = ERROR_NONE; - - ret = LoadLocalSegment(); - if (ret) - { - LOG(ERROR) << "Failed to load local segment " << endl; - return NULL; - } - - OmafSegment *newSeg = GetLocalNextSegment(); - if (!newSeg) - { - LOG(ERROR) << "Failed to get local segment" << endl; - return NULL; - } - - OmafSegment *initSeg = GetInitSegment(); - if (!initSeg) - { - LOG(ERROR) << "Failed to get local init segment" << endl; - return NULL; - } - - newSeg->SetSegmentCacheFile(assignedSegment); - newSeg->SetSegStored(); - - return newSeg; +OmafSegment::Ptr OmafAdaptationSet::LoadAssignedSegment(std::string assignedSegment) { + int ret = ERROR_NONE; + ret = LoadLocalSegment(); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to load local segment\n"); + return nullptr; + } + + OmafSegment::Ptr newSeg = GetLocalNextSegment(); + if (!newSeg) { + OMAF_LOG(LOG_ERROR, "Failed to get local segment\n"); + return nullptr; + } + + OmafSegment::Ptr initSeg = GetInitSegment(); + if (!initSeg) { + OMAF_LOG(LOG_ERROR, "Failed to get local init segment\n"); + return nullptr; + } + + newSeg->SetSegmentCacheFile(assignedSegment); + newSeg->SetSegStored(); + + return newSeg; } /////Download relative methods -int OmafAdaptationSet::DownloadInitializeSegment() -{ - int ret = ERROR_NONE; +int OmafAdaptationSet::DownloadInitializeSegment() { + int ret = ERROR_NONE; - SegmentElement* seg = mRepresentation->GetSegment(); + if (omaf_reader_mgr_ == nullptr) { + OMAF_LOG(LOG_ERROR, "The omaf reader manager is empty!\n"); + return ERROR_NULL_PTR; + } - if( NULL == seg ){ - LOG(ERROR) << "Create Initial SegmentElement for AdaptationSet:" << this->mID - << " failed" << endl; - return ERROR_NULL_PTR; - } + SegmentElement* seg = mRepresentation->GetSegment(); + if (nullptr == seg) { + OMAF_LOG(LOG_ERROR, "Create Initial SegmentElement for AdaptationSet: %d failed\n", this->mID); + return ERROR_NULL_PTR; + } - auto repID = mRepresentation->GetId(); + auto repID = mRepresentation->GetId(); +#if 0 ret = seg->InitDownload(mBaseURL, repID, 0); if( ERROR_NONE != ret ){ SAFE_DELETE(seg); - LOG(ERROR) << "Fail to Init OmafSegment Download for AdaptationSet:" << this->mID - << endl; + OMAF_LOG(LOG_ERROR, "Fail to Init OmafSegment Download for AdaptationSet: %d\n", this->mID); } - mInitSegment = new OmafSegment(seg, mSegNum, true); +#else - if(NULL == mInitSegment ) { - LOG(ERROR) << "New Initial OmafSegment for AdaptationSet:" << this->mID - << " failed" << endl; - return ERROR_NULL_PTR; - } + DashSegmentSourceParams params; + params.dash_url_ = seg->GenerateCompleteURL(mBaseURL, repID, 0); + params.priority_ = TaskPriority::NORMAL; + params.timeline_point_ = static_cast(mSegNum); - ret = mInitSegment->Open(); + mInitSegment = std::make_shared(params, mSegNum, true); - if( ERROR_NONE != ret ){ - SAFE_DELETE(mInitSegment); - LOG(ERROR) << "Fail to Download Initial OmafSegment for AdaptationSet:" << this->mID - << endl; - } +#endif - LOG(INFO)<<"Download Initial OmafSegment for AdaptationSet "<mID<mID); + return ERROR_NULL_PTR; + } - return ret; + ret = omaf_reader_mgr_->OpenInitSegment(mInitSegment); + + if (ERROR_NONE != ret) { + mInitSegment.reset(); + OMAF_LOG(LOG_ERROR, "Fail to Download Initial OmafSegment for AdaptationSet: %d\n", this->mID); + } + + OMAF_LOG(LOG_INFO, "Download Initial OmafSegment for AdaptationSet %d\n", this->mID); + + return ret; } -int OmafAdaptationSet::DownloadSegment( ) -{ - int ret = ERROR_NONE; +int OmafAdaptationSet::DownloadSegment() { + int ret = ERROR_NONE; - if(!mEnable){ - mActiveSegNum++; - mSegNum++; - return ret; - } + if (!mEnable) { + mActiveSegNum++; + mSegNum++; + return ret; + } + OMAF_LOG(LOG_INFO, "Download OmafSegment id %d for AdaptationSet: %d\n", mSegNum, this->mID); - SegmentElement* seg = mRepresentation->GetSegment(); + if (omaf_reader_mgr_ == nullptr) { + OMAF_LOG(LOG_ERROR, "The omaf reader manager is empty!\n"); + return ERROR_NULL_PTR; + } - if( NULL == seg ){ - LOG(ERROR) << "Create Initial SegmentElement for AdaptationSet:" << this->mID - << " failed" << endl; - return ERROR_NULL_PTR; - } + SegmentElement* seg = mRepresentation->GetSegment(); - auto repID = mRepresentation->GetId(); - ret = seg->InitDownload(mBaseURL, repID, mActiveSegNum); + if (nullptr == seg) { + OMAF_LOG(LOG_ERROR, "Create Initial SegmentElement for AdaptationSet: %d failed\n", this->mID); + return ERROR_NULL_PTR; + } - if( ERROR_NONE != ret ){ - SAFE_DELETE(seg); - LOG(ERROR) << "Fail to Init OmafSegment Download for AdaptationSet:" << this->mID - << endl; - } + auto repID = mRepresentation->GetId(); +#if 0 + ret = seg->InitDownload(mBaseURL, repID, mActiveSegNum); - OmafSegment* pSegment = new OmafSegment(seg, mSegNum, false, mReEnable); + if (ERROR_NONE != ret) { + SAFE_DELETE(seg); + OMAF_LOG(LOG_ERROR, "Fail to Init OmafSegment Download for AdaptationSet: %d\n", this->mID); + } - // reset the re-enable flag, since it will be updated with different viewport - if(mReEnable) mReEnable = false; + OmafSegment* pSegment = new OmafSegment(seg, mSegNum, false, mReEnable); +#else + DashSegmentSourceParams params; - if(NULL == pSegment ) { - LOG(ERROR) << "Create OmafSegment for AdaptationSet: " << this->mID - <<" Number: " << mActiveSegNum - << " failed" << endl; + params.dash_url_ = seg->GenerateCompleteURL(mBaseURL, repID, mActiveSegNum); + params.priority_ = TaskPriority::NORMAL; + params.timeline_point_ = static_cast(mSegNum); - return ERROR_NULL_PTR; - } + OmafSegment::Ptr pSegment = std::make_shared(params, mSegNum, false); + +#endif + // reset the re-enable flag, since it will be updated with different viewport + if (mReEnable) mReEnable = false; + if (pSegment.get() != nullptr) { + if (this->mInitSegment.get() == nullptr) return ERROR_NULL_PTR; pSegment->SetInitSegID(this->mInitSegment->GetInitSegID()); + pSegment->SetMediaType(mType); + if ((mType == MediaType_Video) && (typeid(*this) != typeid(OmafExtractor))) { + auto qualityRanking = GetRepresentationQualityRanking(); + pSegment->SetQualityRanking(qualityRanking); + SRDInfo srdInfo; + srdInfo.left = mSRD->get_X(); + srdInfo.top = mSRD->get_Y(); + srdInfo.width = mSRD->get_W(); + srdInfo.height = mSRD->get_H(); + pSegment->SetSRDInfo(srdInfo); + } + else if (mType == MediaType_Audio) + { + pSegment->SetAudioChlNum(mAudioInfo.channels); + pSegment->SetAudioSampleRate(mAudioInfo.sample_rate); + } - ret = pSegment->Open(); + pSegment->SetSegID(mSegNum); + pSegment->SetTrackId(this->mInitSegment->GetTrackId()); + ret = omaf_reader_mgr_->OpenSegment(std::move(pSegment), IsExtractor()); - if( ERROR_NONE != ret ){ - SAFE_DELETE(pSegment); - LOG(ERROR) << "Fail to Download OmafSegment for AdaptationSet:" << this->mID - << endl; + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Fail to Download OmafSegment for AdaptationSet: %d\n", this->mID); } - LOG(INFO)<<"Download OmafSegment for AdaptationSet "<mID<mID, mActiveSegNum); - gTime = now.tv_sec; - t = gmtime(&gTime); + return ERROR_NULL_PTR; + } +} - uint64_t current = timegm(t); - current *= 1000; +std::string OmafAdaptationSet::GetUrl(const SegmentSyncNode& node) const { + SegmentElement* seg = mRepresentation->GetSegment(); - if (current < nAvailableStartTime) - { - LOG(ERROR) << "Unreasonable current time " << current - << "which is earlier than available time " << nAvailableStartTime; + if (nullptr == seg) { + OMAF_LOG(LOG_ERROR, "Create Initial SegmentElement for AdaptationSet: %d failed\n", this->mID); + return std::string(); + } - return -1; - } - mActiveSegNum = (current - nAvailableStartTime) / (mSegmentDuration * 1000) + mStartNumber; + auto repID = mRepresentation->GetId(); + return seg->GenerateCompleteURL(mBaseURL, repID, static_cast(node.segment_value.number_)); +} - LOG(INFO) << "current " << current << " and available time " << nAvailableStartTime << " Start segment index " << mActiveSegNum << endl; - return mActiveSegNum; +/////read relative methods +int OmafAdaptationSet::UpdateStartNumberByTime(uint64_t nAvailableStartTime) { + time_t gTime; + struct tm* t; + struct timeval now; + struct timezone tz; + gettimeofday(&now, &tz); + // struct timeb timeBuffer; + // ftime(&timeBuffer); + // now.tv_sec = (long)(timeBuffer.time); + // now.tv_usec = timeBuffer.millitm * 1000; + gTime = now.tv_sec; + t = gmtime(&gTime); + + uint64_t current = timegm(t); + current *= 1000; + + if (current < nAvailableStartTime) { + OMAF_LOG(LOG_ERROR, "Unreasonable current time %lld which is earlier than available time %lld\n", current, nAvailableStartTime); + + return -1; + } + mActiveSegNum = (current - nAvailableStartTime) / (mSegmentDuration * 1000) + mStartNumber; + + OMAF_LOG(LOG_INFO, "Current time= %lld and available time= %lld.\n", current, nAvailableStartTime); + OMAF_LOG(LOG_INFO, "Set start segment index= %d\n", mActiveSegNum); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + tracepoint(E2E_latency_tp_provider, + da_ssi_info, + mActiveSegNum); +#endif +#endif + return mActiveSegNum; } -OmafSegment* OmafAdaptationSet::GetNextSegment() -{ - OmafSegment* seg = NULL; +OmafSegment::Ptr OmafAdaptationSet::GetNextSegment() { + OmafSegment::Ptr seg; - pthread_mutex_lock(&mMutex); - seg = (OmafSegment*) mSegments.front(); - mSegments.pop_front(); - pthread_mutex_unlock(&mMutex); + std::lock_guard lock(mMutex); + seg = std::move(mSegments.front()); + mSegments.pop_front(); - return seg; + return seg; } -OmafSegment* OmafAdaptationSet::GetLocalNextSegment() -{ - OmafSegment* seg = NULL; +OmafSegment::Ptr OmafAdaptationSet::GetLocalNextSegment() { + OmafSegment::Ptr seg; - seg = (OmafSegment*) mSegments.front(); - mSegments.pop_front(); + seg = std::move(mSegments.front()); + mSegments.pop_front(); - return seg; + return seg; } -void OmafAdaptationSet::ClearSegList() -{ - std::list::iterator it; - pthread_mutex_lock(&mMutex); - for(auto it = mSegments.begin(); it != mSegments.end(); it++){ - delete *it; - *it = NULL; - } - mSegments.clear(); - pthread_mutex_unlock(&mMutex); +void OmafAdaptationSet::ClearSegList() { + std::list::iterator it; + std::lock_guard lock(mMutex); + for (auto it = mSegments.begin(); it != mSegments.end(); it++) { + // delete *it; + *it = nullptr; + } + mSegments.clear(); } -int OmafAdaptationSet::SeekTo( int seg_num ) -{ - mActiveSegNum = seg_num; - ClearSegList(); - return ERROR_NONE; +int OmafAdaptationSet::SeekTo(int seg_num) { + mActiveSegNum = seg_num; + ClearSegList(); + return ERROR_NONE; } VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafAdaptationSet.h b/src/OmafDashAccess/OmafAdaptationSet.h index 9fe2c87d..6b784127 100644 --- a/src/OmafDashAccess/OmafAdaptationSet.h +++ b/src/OmafDashAccess/OmafAdaptationSet.h @@ -37,191 +37,240 @@ #define OMAFADAPTATIONSET_H #include "general.h" -#include "OmafSegment.h" -#include "OmafDashParser/BaseUrlElement.h" #include "OmafDashParser/AdaptationSetElement.h" +#include "OmafDashParser/BaseUrlElement.h" #include "OmafDashParser/DescriptorElement.h" #include "OmafDashParser/RepresentationElement.h" -VCD_OMAF_BEGIN +#include "OmafSegment.h" + +#include "OmafDashRangeSync.h" + +#include +#include +#include using namespace VCD::VRVideo; -using namespace VCD::OMAF; + +namespace VCD { +namespace OMAF { const static uint8_t recordSize = 3; +class OmafReaderManager; +class OmafDashSegmentClient; //! //! \class: OmafAdaptationSet //! \brief: //! class OmafAdaptationSet { -public: - //! - //! \brief construct - //! - OmafAdaptationSet(); - - //! - //! \brief construct from AdaptationSetElement - //! - OmafAdaptationSet( AdaptationSetElement* pAdaptationSet ); - - //! - //! \brief de-construct - //! - virtual ~OmafAdaptationSet(); - -public: - - int LoadLocalInitSegment(); - - int LoadLocalSegment(); - - int LoadAssignedInitSegment(std::string assignedSegment); - - OmafSegment* LoadAssignedSegment(std::string assignedSegment); - - //! - //! \brief Get InitializeSegment for reading. - //! - int DownloadInitializeSegment(); - - //! - //! \brief Download Segment for reading. - //! - int DownloadSegment( ); - - //! - //! \brief Select representation from - //! - int SelectRepresentation( ); - - //! - //! \brief update start number for download based on stream start time - //! \param nAvailableStartTime : the start time for live stream in mpd - //! - int UpdateStartNumberByTime(uint64_t nAvailableStartTime); - - //! - //! \brief Initialize the AdaptationSet - //! - int Initialize(AdaptationSetElement* pAdaptationSet); - - //! - //! \brief Get head segment in segment list which have been downloaded - //! - OmafSegment* GetNextSegment(); - - OmafSegment* GetLocalNextSegment(); - - //! - //! \brief Seek to special segment and is is valid in static mode - //! - int SeekTo( int seg_num ); - - //! - //! \brief The following methods are basic Get/Set for fields - //! - void SetTrackNumber(int nTrackNumber) { mTrackNumber = nTrackNumber; }; - void SetMediaType(MediaType type) { mType = type; }; - void SetProjectionFormat(ProjectionFormat format) { mPF = format; }; - int GetTrackNumber() { return mTrackNumber; }; - void SetBaseURL(std::vector url ) { mBaseURL = url; }; - OmafSrd* GetSRD() { return mSRD; }; - int GetID() { return mID; }; - std::vector GetDependIDs() { return mDependIDs; }; - std::string GetMimeType() { return mMimeType; }; - std::vector GetCodec() { return mCodec; }; - OmafSegment* GetInitSegment() { return mInitSegment; }; - bool IsMain() { return m_bMain; }; - RwpkType GetRegionWisePacking() { return mRwpkType; }; - SphereQuality* GetQualityRanking() { return mSrqr; }; - PreselValue* GetPreselection() { return mPreselID; }; - TwoDQualityRanking* GetTwoDQualityRanking() { return mTwoDQuality; }; - ContentCoverage* GetContentCoverage() { return mCC; }; - FramePackingType GetFramePackingType() { return mFpt; }; - ProjectionFormat GetProjectionFormat() { return mPF; }; - VideoInfo GetVideoInfo() { return mVideoInfo; }; - AudioInfo GetAudioInfo() { return mAudioInfo; }; - MediaType GetMediaType() { return mType; }; - uint64_t GetSegmentDuration() { return mSegmentDuration; }; - uint32_t GetStartNumber() { return mStartNumber; }; - std::string GetRepresentationId() { return mRepresentation->GetId(); }; - uint32_t GetRepresentationQualityRanking() { return stoi(mRepresentation->GetQualityRanking());}; - int Enable( bool bEnable ) + public: + //! + //! \brief construct + //! + OmafAdaptationSet(); + + //! + //! \brief construct from AdaptationSetElement + //! + OmafAdaptationSet( AdaptationSetElement* pAdaptationSet, ProjectionFormat pf, bool isExtractorTrack ); + + //! + //! \brief de-construct + //! + virtual ~OmafAdaptationSet(); + + public: + void SetOmafReaderMgr(std::shared_ptr mgr) noexcept { omaf_reader_mgr_ = std::move(mgr); } + int LoadLocalInitSegment(); + + int LoadLocalSegment(); + + int LoadAssignedInitSegment(std::string assignedSegment); + + OmafSegment::Ptr LoadAssignedSegment(std::string assignedSegment); + + //! + //! \brief Get InitializeSegment for reading. + //! + int DownloadInitializeSegment(); + + //! + //! \brief Download Segment for reading. + //! + int DownloadSegment(); + + //! + //! \brief Select representation from + //! + int SelectRepresentation(); + + //! + //! \brief update start number for download based on stream start time + //! \param nAvailableStartTime : the start time for live stream in mpd + //! + int UpdateStartNumberByTime(uint64_t nAvailableStartTime); + + void UpdateSegmentNumber(int64_t segnum) { mActiveSegNum = segnum; }; + int64_t GetSegmentNumber(void) const { return mActiveSegNum; }; + std::string GetUrl(const SegmentSyncNode& node) const; + //! + //! \brief Initialize the AdaptationSet + //! + int Initialize(AdaptationSetElement* pAdaptationSet); + + //! + //! \brief Get head segment in segment list which have been downloaded + //! + // OmafSegment* GetNextSegment(); + + // OmafSegment* GetLocalNextSegment(); + OmafSegment::Ptr GetNextSegment(); + + OmafSegment::Ptr GetLocalNextSegment(); + + //! + //! \brief Seek to special segment and is is valid in static mode + //! + int SeekTo(int seg_num); + + //! + //! \brief The following methods are basic Get/Set for fields + //! + void SetTrackNumber(int nTrackNumber) { mTrackNumber = nTrackNumber; }; + void SetMediaType(MediaType type) { mType = type; }; + void SetProjectionFormat(ProjectionFormat format) { mPF = format; }; + int GetTrackNumber() { return mTrackNumber; }; + void SetBaseURL(std::vector url) { mBaseURL = url; }; + OmafSrd* GetSRD() { return mSRD; }; + int GetID() { return mID; }; + std::vector GetDependIDs() { return mDependIDs; }; + std::string GetMimeType() { return mMimeType; }; + std::vector GetCodec() { return mCodec; }; + // OmafSegment* GetInitSegment() { return mInitSegment; }; + OmafSegment::Ptr GetInitSegment() { return mInitSegment; }; + bool IsMain() { return m_bMain; }; + RwpkType GetRegionWisePacking() { return mRwpkType; }; + SphereQuality* GetQualityRanking() { return mSrqr; }; + PreselValue* GetPreselection() { return mPreselID; }; + TwoDQualityRanking* GetTwoDQualityRanking() { return mTwoDQuality; }; + ContentCoverage* GetContentCoverage() { return mCC; }; + FramePackingType GetFramePackingType() { return mFpt; }; + ProjectionFormat GetProjectionFormat() { return mPF; }; + VideoInfo GetVideoInfo() { return mVideoInfo; }; + AudioInfo GetAudioInfo() { return mAudioInfo; }; + MediaType GetMediaType() { return mType; }; + uint64_t GetSegmentDuration() { return mSegmentDuration; }; + uint32_t GetStartNumber() { return mStartNumber; }; + std::string GetRepresentationId() { return mRepresentation->GetId(); }; + QualityRank GetRepresentationQualityRanking() { + try { + int rank = stoi(mRepresentation->GetQualityRanking()); + + switch (rank) { + case 1: + return HIGHEST_QUALITY_RANKING; + case 2: + return SECOND_QUALITY_RANKING; + case 3: + return THIRD_QUALITY_RANKING; + default: + return INVALID_QUALITY_RANKING; + } + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when parse the quality ranking. ex: %s\n", ex.what()); + return INVALID_QUALITY_RANKING; + } + }; + + void SetTwoDQualityInfos() { + if ((mType == MediaType_Video) && + (mPF == ProjectionFormat::PF_PLANAR) && m_bMain) { - mEnableRecord.push_back(bEnable); - while(mEnableRecord.size() > recordSize) - mEnableRecord.pop_front(); - - // Set mReEnable true if mEnableRecord is {false, false, true}, it's because - // everytime Adaption Set is set as true, it must be set as false before. - // check OmafMediaStream::UpdateEnabledExtractors for more details - if(bEnable && !mEnable && mEnableRecord.size() == recordSize && mEnableRecord.front() == false) - mReEnable = true; - - mEnable = bEnable; - return 0; - }; - bool IsEnabled() { return mEnable; }; - - virtual OmafAdaptationSet* GetClassType(){ - return this; - }; - -private: - - //! - //! \brief Decide whether this adaption set is main Adaption Set; - //! the Main AS will no depend any other AS and has initial seg - //! - void JudgeMainAdaptationSet(); - - void ClearSegList(); - -friend class RepresentationSelector; - -protected: - AdaptationSetElement *mAdaptationSet; // mSegments; // mDependIDs; // mBaseURL; // mSampleList; // mCodec; // mEnableRecord; //GetTwoDQuality(); + } + } + + map GetTwoDQualityInfos() { return mTwoDQualityInfos; }; + + int Enable(bool bEnable) { + mEnableRecord.push_back(bEnable); + while (mEnableRecord.size() > recordSize) mEnableRecord.pop_front(); + + // Set mReEnable true if mEnableRecord is {false, false, true}, it's because + // everytime Adaption Set is set as true, it must be set as false before. + // check OmafMediaStream::UpdateEnabledExtractors for more details + if (bEnable && !mEnable && mEnableRecord.size() == recordSize && mEnableRecord.front() == false) mReEnable = true; + + mEnable = bEnable; + return 0; + }; + bool IsEnabled() { return mEnable; }; + + virtual OmafAdaptationSet* GetClassType() { return this; }; + TileDef* GetTileInfo() { return mTileInfo; }; + virtual bool IsExtractor() { return mIsExtractorTrack; } + + private: + //! + //! \brief Decide whether this adaption set is main Adaption Set; + //! the Main AS will no depend any other AS and has initial seg + //! + void JudgeMainAdaptationSet(); + + void ClearSegList(); + + friend class RepresentationSelector; + + protected: + AdaptationSetElement* mAdaptationSet; // mSegments; // mSegments; + OmafSegment::Ptr mInitSegment; + OmafSrd* mSRD; // mDependIDs; // mBaseURL; // mSampleList; // mCodec; // mEnableRecord; // omaf_reader_mgr_; + bool mIsExtractorTrack; + + std::map mTwoDQualityInfos; // for all planar video sources }; -VCD_OMAF_END; +} // namespace OMAF +} // namespace VCD #endif /* OMAFAdaptationSet_H */ - diff --git a/src/OmafDashAccess/OmafDashAccess.pc b/src/OmafDashAccess/OmafDashAccess.pc index 35326bcd..d898624d 100644 --- a/src/OmafDashAccess/OmafDashAccess.pc +++ b/src/OmafDashAccess/OmafDashAccess.pc @@ -7,4 +7,4 @@ Name:OmafDashAccess Description: OMAF DASH Access library Version:0.0.1-DEV Cflags: -I${prefix}/include -Libs: -L${libdir} -lOmafDashAccess -lstdc++ -l360SCVP -lmp4vr_static -lmp4vr_static_fpic -lstreamsegmenter_static -lstreamsegmenter_static_fpic -lglog -lpthread -lcurl -L/usr/lib64 -L/usr/local/lib64 +Libs: -L${libdir} -lOmafDashAccess -static-libstdc++ -l360SCVP -lglog -lpthread -lcurl -L/usr/lib64 -L/usr/local/lib64 diff --git a/src/OmafDashAccess/OmafDashAccessApi.h b/src/OmafDashAccess/OmafDashAccessApi.h index 67d8b5c6..44e44ef2 100644 --- a/src/OmafDashAccess/OmafDashAccessApi.h +++ b/src/OmafDashAccess/OmafDashAccessApi.h @@ -24,7 +24,6 @@ * POSSIBILITY OF SUCH DAMAGE. */ - /* * File: OmafDashAccessApi.h * Author: Zhang, Andrew @@ -36,9 +35,10 @@ #define OMAFDASHACCESSAPI_H #include -#include "error.h" -#include "data_type.h" + #include "360SCVPAPI.h" +#include "data_type.h" +#include "error.h" #ifdef __cplusplus extern "C" { @@ -46,13 +46,59 @@ extern "C" { typedef void* Handler; +typedef struct _omafHttpProxy { + char* http_proxy; + char* https_proxy; + char* no_proxy; + char* proxy_user; + char* proxy_passwd; +} OmafHttpProxy; + +typedef struct _omafHttpParams { + long conn_timeout; + long total_timeout; + int32_t retry_times; + int ssl_verify_peer; + int ssl_verify_host; +} OmafHttpParams; + +typedef struct _omafStatisticsParams { + int32_t window_size_ms; + int enable; +} OmafStatisticsParams; + +typedef struct _omafSynchronizerParams { + int32_t segment_range_size; + int enable; +} OmafSynchronizerParams; + +typedef struct _omafPredictorParams { + char* name; + char* libpath; + int enable; +} OmafPredictorParams; + +typedef struct _omafDashParams { + //for download + OmafHttpProxy proxy; + OmafHttpParams http_params; + OmafStatisticsParams statistic_params; + OmafSynchronizerParams synchronizer_params; + OmafPredictorParams predictor_params; + long max_parallel_transfers; + int segment_open_timeout_ms; + //for stitch + uint32_t max_decode_width; + uint32_t max_decode_height; +} OmafParams; + /* * the enum for source type; more type will be added in future */ -typedef enum{ - DefaultSource = 0, - MultiResSource, - Reserved +typedef enum { + DefaultSource = 0, + MultiResSource, + Reserved, } SourceType; /* @@ -60,11 +106,20 @@ typedef enum{ * source_type : the structure of videos in the mpd to be processed * cache_path : the directory to store cached downloaded files; a default path * will be used if it is "" + * enable_extractor: whether to enable extractor track mode for packed sub-picture + * log_callback: external logging callback function pointer. Glog will be used + * if it is NULL + * plugin_def: 360SCVP library plugin set, now used for tiles selection for + * planar video */ -typedef struct DASHSTREAMINGCLIENT{ - const char* media_url; - SourceType source_type; - const char* cache_path; +typedef struct DASHSTREAMINGCLIENT { + SourceType source_type; + OmafParams omaf_params; + const char* media_url; + const char* cache_path; + bool enable_extractor; + void* log_callback; + PluginDef plugin_def; } DashStreamingClient; /* @@ -72,31 +127,41 @@ typedef struct DASHSTREAMINGCLIENT{ * params: pCtx - [in] the structure for the necessary parameters to handle an dash stream * return: the handle created for the API */ -Handler OmafAccess_Init( DashStreamingClient* pCtx); +Handler OmafAccess_Init(DashStreamingClient* pCtx); /* * description: API to open a dash stream * params: hdl - [in] handler created with DashStreaming_Init * pCtx - [in] the structure for the necessary parameters to handle an dash stream * enablePredictor - [in] flag for use predictor or not + * predictPluginName - [in] name of predict plugin + * libPath - [in] plugin library path * return: the error return from the API */ -int OmafAccess_OpenMedia( Handler hdl, DashStreamingClient* pCtx, bool enablePredictor); +int OmafAccess_OpenMedia(Handler hdl, DashStreamingClient* pCtx, bool enablePredictor, char* predictPluginName, + char* libPath); +/* + * description: API to start a dash stream + * params: hdl - [in] handler created with DashStreaming_Init + * + * return: the error return from the API + */ +int OmafAccess_StartStreaming(Handler hdl); /* * description: API to seek a stream. only work with static mode. not implement yet. * params: hdl - [in] handler created with DashStreaming_Init * time - [in] the position to be seek * return: the error return from the API */ -int OmafAccess_SeekMedia( Handler hdl, uint64_t time ); +int OmafAccess_SeekMedia(Handler hdl, uint64_t time); /* * description: API to close a dash stream * params: hdl - [in]handler created with DashStreaming_Init * return: the error return from the API */ -int OmafAccess_CloseMedia( Handler hdl ); +int OmafAccess_CloseMedia(Handler hdl); /* * description: API to get information of opened dashed stream @@ -104,7 +169,7 @@ int OmafAccess_CloseMedia( Handler hdl ); * info - [out] the media info of opened dash media * return: the error return from the API */ -int OmafAccess_GetMediaInfo( Handler hdl, DashMediaInfo* info ); +int OmafAccess_GetMediaInfo(Handler hdl, DashMediaInfo* info); /* * description: API to get packets according to stream id in the dash media. As for viewport-based @@ -120,7 +185,8 @@ int OmafAccess_GetMediaInfo( Handler hdl, DashMediaInfo* info ); * return: the error return from the API, ERROR_EOS means reach end of * stream for static source */ -int OmafAccess_GetPacket( Handler hdl, int stream_id, DashPacket* packet, int* size, uint64_t* pts, bool needParams, bool clearBuf ); +int OmafAccess_GetPacket(Handler hdl, int stream_id, DashPacket* packet, int* size, uint64_t* pts, bool needParams, + bool clearBuf); /* * description: API to set InitViewport before downloading segment. @@ -128,7 +194,7 @@ int OmafAccess_GetPacket( Handler hdl, int stream_id, DashPacket* packet, int* s * clientInfo - [in] the headset info which is needed to calculate viewport * return: the error return from the API */ -int OmafAccess_SetupHeadSetInfo( Handler hdl, HeadSetInfo* clientInfo); +int OmafAccess_SetupHeadSetInfo(Handler hdl, HeadSetInfo* clientInfo); /* * description: API to update Viewport when input shows that viewport is changed @@ -136,7 +202,7 @@ int OmafAccess_SetupHeadSetInfo( Handler hdl, HeadSetInfo* clientInfo); * pose - [in] changed pose info * return: the error return from the API */ -int OmafAccess_ChangeViewport( Handler hdl, HeadPose* pose); +int OmafAccess_ChangeViewport(Handler hdl, HeadPose* pose); /* * description: API to get statistic data such as bandwith etc. @@ -144,7 +210,7 @@ int OmafAccess_ChangeViewport( Handler hdl, HeadPose* pose); * info - [out] the information current statistic data * return: the error return from the API */ -int OmafAccess_Statistic( Handler hdl, DashStatisticInfo* info ); +int OmafAccess_Statistic(Handler hdl, DashStatisticInfo* info); /* * description: API to Close the Handle and release relative resources after dealing with @@ -152,13 +218,10 @@ int OmafAccess_Statistic( Handler hdl, DashStatisticInfo* info ); * params: hdl - [in] handler created with DashStreaming_Init * return: the error return from the API */ -int OmafAccess_Close( Handler hdl ); - +int OmafAccess_Close(Handler hdl); #ifdef __cplusplus } #endif #endif /* VRDASHSTREAMINGAPI_H */ - - diff --git a/src/OmafDashAccess/OmafDashAccessApi_impl.cpp b/src/OmafDashAccess/OmafDashAccessApi_impl.cpp index c4cb0a0e..f7040b33 100644 --- a/src/OmafDashAccess/OmafDashAccessApi_impl.cpp +++ b/src/OmafDashAccess/OmafDashAccessApi_impl.cpp @@ -24,7 +24,6 @@ * POSSIBILITY OF SUCH DAMAGE. */ - /* * File: VRDashStreamingAPI.h * Author: Zhang, Andrew @@ -32,130 +31,252 @@ * Created on January 15, 2019, 1:11 PM */ -#include "OmafDashAccessApi.h" - #include -#include "general.h" -#include "OmafMediaSource.h" +#include + +//#include "../utils/GlogWrapper.h" +#include "OmafDashAccessApi.h" #include "OmafDashSource.h" -#include "../utils/GlogWrapper.h" +#include "OmafMediaSource.h" +#include "OmafTypes.h" +#include "general.h" +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include "../trace/Bandwidth_tp.h" +#include "../trace/E2E_latency_tp.h" +#endif +#endif using namespace std; VCD_USE_VROMAF; VCD_USE_VRVIDEO; -Handler OmafAccess_Init( DashStreamingClient* pCtx) -{ - OmafMediaSource* pSource = new OmafDashSource(); +Handler OmafAccess_Init(DashStreamingClient *pCtx) { + if (pCtx == nullptr) { + return nullptr; + } + OmafMediaSource *pSource = new OmafDashSource(); + + VCD::OMAF::OmafDashParams omaf_dash_params; + const OmafParams &omaf_params = pCtx->omaf_params; + // for download + if (omaf_params.proxy.http_proxy) { + omaf_dash_params.http_proxy_.http_proxy_ = std::string(omaf_params.proxy.http_proxy); + } + if (omaf_params.proxy.https_proxy) { + omaf_dash_params.http_proxy_.https_proxy_ = std::string(omaf_params.proxy.https_proxy); + } + if (omaf_params.proxy.no_proxy) { + omaf_dash_params.http_proxy_.no_proxy_ = std::string(omaf_params.proxy.no_proxy); + } + if (omaf_params.proxy.proxy_user) { + omaf_dash_params.http_proxy_.proxy_user_ = std::string(omaf_params.proxy.proxy_user); + } + if (omaf_params.proxy.proxy_passwd) { + omaf_dash_params.http_proxy_.proxy_passwd_ = std::string(omaf_params.proxy.proxy_passwd); + } + + if (omaf_params.http_params.conn_timeout > 0) { + omaf_dash_params.http_params_.conn_timeout_ = omaf_params.http_params.conn_timeout; + } + if (omaf_params.http_params.total_timeout > 0) { + omaf_dash_params.http_params_.total_timeout_ = omaf_params.http_params.total_timeout; + } + + if (omaf_params.http_params.retry_times > 0) { + omaf_dash_params.http_params_.retry_times_ = omaf_params.http_params.retry_times; + } + + omaf_dash_params.http_params_.bssl_verify_peer_ = omaf_params.http_params.ssl_verify_peer == 0 ? false : true; + + omaf_dash_params.http_params_.bssl_verify_host_ = omaf_params.http_params.ssl_verify_host == 0 ? false : true; + + omaf_dash_params.prediector_params_.enable_ = omaf_params.predictor_params.enable == 0 ? false : true; + + if (omaf_params.predictor_params.name) { + omaf_dash_params.prediector_params_.name_ = std::string(omaf_params.predictor_params.name); + } + if (omaf_params.predictor_params.libpath) { + omaf_dash_params.prediector_params_.libpath_ = std::string(omaf_params.predictor_params.libpath); + } + + omaf_dash_params.stats_params_.enable_ = omaf_params.statistic_params.enable == 0 ? false : true; + if (omaf_dash_params.stats_params_.enable_) { + omaf_dash_params.stats_params_.window_size_ms_ = omaf_params.statistic_params.window_size_ms; + } + + omaf_dash_params.syncer_params_.enable_ = omaf_params.synchronizer_params.enable == 0 ? false : true; + if (omaf_dash_params.syncer_params_.enable_) { + omaf_dash_params.syncer_params_.segment_range_size_ = omaf_params.synchronizer_params.segment_range_size; + } + + if (omaf_params.max_parallel_transfers > 0) { + omaf_dash_params.max_parallel_transfers_ = omaf_params.max_parallel_transfers; + } - return (Handler)((long)pSource); + if (omaf_params.segment_open_timeout_ms > 0) { + omaf_dash_params.segment_open_timeout_ms_ = omaf_params.segment_open_timeout_ms; + } + // for stitch + if (omaf_params.max_decode_width > 0) { + omaf_dash_params.max_decode_width_ = omaf_params.max_decode_width; + } + if (omaf_params.max_decode_height > 0) { + omaf_dash_params.max_decode_height_ = omaf_params.max_decode_height; + } + OMAF_LOG(LOG_INFO,"Dash parameter %s\n", omaf_dash_params.to_string().c_str()); + pSource->SetOmafDashParams(omaf_dash_params); + + return (Handler)((long)pSource); } -int OmafAccess_OpenMedia( Handler hdl, DashStreamingClient* pCtx, bool enablePredictor) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; - pSource->SetLoop(false); - return pSource->OpenMedia(pCtx->media_url, pCtx->cache_path, enablePredictor); +int OmafAccess_OpenMedia(Handler hdl, DashStreamingClient *pCtx, bool enablePredictor, char *predictPluginName, + char *libPath) { + if (hdl == nullptr || pCtx == nullptr) { + return ERROR_INVALID; + } + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + pSource->SetLoop(false); + // for android ndk compile, transform char* to string is mandatory + string media_url = pCtx->media_url; + string cache_path = pCtx->cache_path; + string s_predictPluginName = predictPluginName; + string s_libPath = libPath; + return pSource->OpenMedia(media_url, cache_path, pCtx->log_callback, pCtx->plugin_def, pCtx->enable_extractor, enablePredictor, + s_predictPluginName, s_libPath); } -int OmafAccess_CloseMedia( Handler hdl ) +int OmafAccess_StartStreaming(Handler hdl) { - OmafMediaSource* pSource = (OmafMediaSource*)hdl; - - return pSource->CloseMedia(); + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + return pSource->StartStreaming(); } -int OmafAccess_SeekMedia( Handler hdl, uint64_t time ) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; +int OmafAccess_CloseMedia(Handler hdl) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + + return pSource->CloseMedia(); +} - pSource->SeekTo(time); +int OmafAccess_SeekMedia(Handler hdl, uint64_t time) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; - return 0; - //return pSource->SeekTo(time); + return pSource->SeekTo(time); } -int OmafAccess_GetMediaInfo( Handler hdl, DashMediaInfo* info ) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; - pSource->GetMediaInfo(info); - return ERROR_NONE; +int OmafAccess_GetMediaInfo(Handler hdl, DashMediaInfo *info) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + pSource->GetMediaInfo(info); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + const char *dash_mode = (info->streaming_type == 1) ? "static" : "dynamic"; + int32_t frameNum = round(float(info->duration) / 1000 * ((float)info->stream_info[0].framerate_num / info->stream_info[0].framerate_den)); + tracepoint(bandwidth_tp_provider, segmentation_info, (char *)dash_mode, (int)info->stream_info[0].segmentDuration, (float)info->stream_info[0].framerate_num / info->stream_info[0].framerate_den, (uint32_t)info->stream_count, (uint64_t*)&(info->stream_info[0].bit_rate), frameNum, (int32_t)(info->duration / 1000)); +#endif +#endif + return ERROR_NONE; } -int OmafAccess_GetPacket( - Handler hdl, - int stream_id, - DashPacket* packet, - int* size, - uint64_t* pts, - bool needParams, - bool clearBuf ) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; - std::list pkts; - pSource->GetPacket(stream_id, &pkts, needParams, clearBuf); +int OmafAccess_GetPacket(Handler hdl, int stream_id, DashPacket *packet, int *size, uint64_t *pts, bool needParams, + bool clearBuf) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + std::list pkts; + pSource->GetPacket(stream_id, &pkts, needParams, clearBuf); - if( 0 == pkts.size()) { - return ERROR_NULL_PACKET; - } + if (0 == pkts.size()) { + return ERROR_NULL_PACKET; + } + + *size = pkts.size(); - *size = pkts.size(); - - int i = 0; - for(auto it=pkts.begin(); it!=pkts.end(); it++){ - MediaPacket* pPkt = (MediaPacket*)(*it); - if(!pPkt) - { - *size -= 1; - continue; - } - int outSize = pPkt->Size(); - char* buf = (char*)malloc(outSize * sizeof(char)); - memcpy(buf, pPkt->Payload(), outSize); - RegionWisePacking *newRwpk = new RegionWisePacking; - RegionWisePacking *pRwpk = pPkt->GetRwpk(); - *newRwpk = *pRwpk; - newRwpk->rectRegionPacking = new RectangularRegionWisePacking[newRwpk->numRegions]; - memcpy(newRwpk->rectRegionPacking, pRwpk->rectRegionPacking, pRwpk->numRegions * sizeof(RectangularRegionWisePacking)); - packet[i].rwpk = newRwpk; - packet[i].buf = buf; - packet[i].size = outSize; - i++; - - delete pPkt; - pPkt = NULL; + int i = 0; + for (auto it = pkts.begin(); it != pkts.end(); it++) { + MediaPacket *pPkt = (MediaPacket *)(*it); + if (!pPkt) { + *size -= 1; + continue; + } + if (!(pPkt->GetEOS())) { + if (pPkt->GetMediaType() == MediaType_Video) + { + RegionWisePacking *newRwpk = new RegionWisePacking; + const RegionWisePacking &pRwpk = pPkt->GetRwpk(); + *newRwpk = pRwpk; + newRwpk->rectRegionPacking = new RectangularRegionWisePacking[newRwpk->numRegions]; + memcpy_s(newRwpk->rectRegionPacking, pRwpk.numRegions * sizeof(RectangularRegionWisePacking), + pRwpk.rectRegionPacking, pRwpk.numRegions * sizeof(RectangularRegionWisePacking)); + SourceResolution *srcRes = new SourceResolution[pPkt->GetQualityNum()]; + memcpy_s(srcRes, pPkt->GetQualityNum() * sizeof(SourceResolution), pPkt->GetSourceResolutions(), + pPkt->GetQualityNum() * sizeof(SourceResolution)); + packet[i].rwpk = newRwpk; + packet[i].buf = pPkt->MovePayload(); + packet[i].size = pPkt->Size(); + packet[i].segID = pPkt->GetSegID(); + packet[i].videoID = pPkt->GetVideoID(); + packet[i].video_codec = pPkt->GetCodecType(); + packet[i].pts = pPkt->GetPTS(); + packet[i].height = pPkt->GetVideoHeight(); + packet[i].width = pPkt->GetVideoWidth(); + packet[i].numQuality = pPkt->GetQualityNum(); + packet[i].qtyResolution = srcRes; + packet[i].tileRowNum = pPkt->GetVideoTileRowNum(); + packet[i].tileColNum = pPkt->GetVideoTileColNum(); + packet[i].bEOS = pPkt->GetEOS(); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + string tag = "sgmtIdx:" + to_string(pPkt->GetSegID()); + tag += ";videoIdx:" + to_string(pPkt->GetVideoID()); + tracepoint(E2E_latency_tp_provider, + post_da_info, + pPkt->GetPTS(), + tag.c_str()); +#endif +#endif + } + else if (pPkt->GetMediaType() == MediaType_Audio) + { + packet[i].buf = pPkt->MovePayload(); + packet[i].size = pPkt->Size(); + packet[i].segID = pPkt->GetSegID(); + packet[i].pts = pPkt->GetPTS(); + packet[i].bEOS = pPkt->GetEOS(); + } + } else { + packet[i].bEOS = true; } - return ERROR_NONE; -} + i++; -int OmafAccess_SetupHeadSetInfo( Handler hdl, HeadSetInfo* clientInfo) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; + delete pPkt; + pPkt = NULL; + } - return pSource->SetupHeadSetInfo(clientInfo); + return ERROR_NONE; } -int OmafAccess_ChangeViewport( Handler hdl, HeadPose* pose) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; +int OmafAccess_SetupHeadSetInfo(Handler hdl, HeadSetInfo *clientInfo) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; - return pSource->ChangeViewport(pose); + return pSource->SetupHeadSetInfo(clientInfo); } -int OmafAccess_Statistic( Handler hdl, DashStatisticInfo* info ) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; - - return pSource->GetStatistic(info); +int OmafAccess_ChangeViewport(Handler hdl, HeadPose *pose) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + return pSource->ChangeViewport(pose); } -int OmafAccess_Close( Handler hdl ) -{ - OmafMediaSource* pSource = (OmafMediaSource*)hdl; - delete pSource; - return ERROR_NONE; +int OmafAccess_Statistic(Handler hdl, DashStatisticInfo *info) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + + return pSource->GetStatistic(info); } +int OmafAccess_Close(Handler hdl) { + OmafMediaSource *pSource = (OmafMediaSource *)hdl; + delete pSource; + // FIXME, when and where to do resource release + // OmafCurlDownloader::releaseCurlModule(); + return ERROR_NONE; +} diff --git a/src/player/Common.h b/src/OmafDashAccess/OmafDashAccessLog.cpp similarity index 80% rename from src/player/Common.h rename to src/OmafDashAccess/OmafDashAccessLog.cpp index ea68019c..9686cc24 100644 --- a/src/player/Common.h +++ b/src/OmafDashAccess/OmafDashAccessLog.cpp @@ -22,27 +22,16 @@ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - - * */ - //! -//! \file: common.h -//! \brief: Include the common system and data type header files that needed +//! \file: OmafDashAccessLog.cpp +//! \brief: Include the log function implementation //! //! Created on April 30, 2019, 6:04 AM //! -#ifndef _COMMON_H_ -#define _COMMON_H_ +#include "OmafDashAccessLog.h" -#include "../utils/ns_def.h" -#include "RenderType.h" -#ifndef LOW_LATENCY_USAGE -#include "data_type.h" -#endif -#include "../utils/OmafStructure.h" -#include "../utils/GlogWrapper.h" -#endif /* _COMMON_H_ */ +LogFunction logCallBack = GlogFunction; diff --git a/src/VROmafPacking/AudioStream.h b/src/OmafDashAccess/OmafDashAccessLog.h similarity index 71% rename from src/VROmafPacking/AudioStream.h rename to src/OmafDashAccess/OmafDashAccessLog.h index c1ebe98f..f149aa17 100644 --- a/src/VROmafPacking/AudioStream.h +++ b/src/OmafDashAccess/OmafDashAccessLog.h @@ -25,43 +25,26 @@ */ //! -//! \file: AudioStream.h -//! \brief: Audio stream class definition -//! \detail: Define the audio stream data and operation +//! \file: OmafDashAccessLog.h +//! \brief: Include the log function declaration //! //! Created on April 30, 2019, 6:04 AM //! -#ifndef _AUDIOSTREAM_H_ -#define _AUDIOSTREAM_H_ +#ifndef _DASHACCESSLOG_H_ +#define _DASHACCESSLOG_H_ -#include "MediaStream.h" +#include "../utils/Log.h" -VCD_NS_BEGIN +//global logging callback function +extern LogFunction logCallBack; -//! -//! \class AudioStream -//! \brief Audio stream data definition and operation -//! - -class AudioStream : public MediaStream -{ -public: - //! - //! \brief Constructor - //! - AudioStream() - { - m_mediaType = AUDIOTYPE; - }; +#define FILE_NAME(x) (strrchr(x, '/') ? strrchr(x, '/')+1:x) - //! - //! \brief Destructor - //! - virtual ~AudioStream() {}; +#define PRINT_LOG(logLevel, source, line, fmt, args...) \ + logCallBack(logLevel, source, line, fmt, ##args); \ -private: -}; +#define OMAF_LOG(logLevel, fmt, args...) \ + PRINT_LOG(logLevel, FILE_NAME(__FILE__), __LINE__, fmt, ##args) \ -VCD_NS_END; -#endif /* _AUDIOSTREAM_H_ */ +#endif /* _DASHACCESSLOG_H_ */ diff --git a/src/OmafDashAccess/OmafDashDownload/OmafCurlDownloader.cpp b/src/OmafDashAccess/OmafDashDownload/OmafCurlDownloader.cpp deleted file mode 100644 index 1f3740dd..00000000 --- a/src/OmafDashAccess/OmafDashDownload/OmafCurlDownloader.cpp +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - */ - -//! -//! \file: OmafCurlDownloader.cpp -//! \brief: downloader class with libcurl -//! - -#include "OmafCurlDownloader.h" - -VCD_OMAF_BEGIN - -OmafCurlDownloader::OmafCurlDownloader() -{ - m_status = NOT_START; - m_downloadRate = 0; - m_endTime = 0; - m_startTime = 0; - m_curlHandler = NULL; -} - -OmafCurlDownloader::OmafCurlDownloader(string url):OmafCurlDownloader() -{ - m_url = url; -} - -OmafCurlDownloader::~OmafCurlDownloader() -{ - this->CleanUp(); - - if(m_observers.size()) - { - m_observers.clear(); - } -} - -ODStatus OmafCurlDownloader::InitCurl() -{ - curl_global_init(CURL_GLOBAL_ALL); - - m_curlHandler = curl_easy_init(); - CheckNullPtr_PrintLog_ReturnStatus(m_curlHandler, "failed to init curl library.", ERROR, OD_STATUS_OPERATION_FAILED); - - curl_easy_setopt(m_curlHandler, CURLOPT_URL, m_url.c_str()); - curl_easy_setopt(m_curlHandler, CURLOPT_SSL_VERIFYPEER, 0L); - curl_easy_setopt(m_curlHandler, CURLOPT_SSL_VERIFYHOST, 0L); - curl_easy_setopt(m_curlHandler, CURLOPT_WRITEFUNCTION, CallBackForCurl); - curl_easy_setopt(m_curlHandler, CURLOPT_WRITEDATA, (void*)this); - return OD_STATUS_SUCCESS; -} - - -ODStatus OmafCurlDownloader::Start() -{ - ODStatus st = OD_STATUS_SUCCESS; - - if(GetStatus() != NOT_START) - return OD_STATUS_INVALID; - - st = InitCurl(); - CheckAndReturn(st); - - StartThread(true); - - m_startTime = chrono::duration_cast(m_clock.now().time_since_epoch()).count(); - - SetStatus(DOWNLOADING); - - return st; -} - -ODStatus OmafCurlDownloader::Stop() -{ - this->SetStatus(STOPPING); - this->Join(); - return OD_STATUS_SUCCESS; -} - -ODStatus OmafCurlDownloader::Read(uint8_t* data, size_t size) -{ - return m_stream.GetStream((char*)data, size); -} - -ODStatus OmafCurlDownloader::Peek(uint8_t* data, size_t size) -{ - return m_stream.PeekStream((char*)data, size); -} - -ODStatus OmafCurlDownloader::Peek(uint8_t* data, size_t size, size_t offset) -{ - return m_stream.PeekStream((char*)data, size, offset); -} - -void* OmafCurlDownloader::Download(void* downloader) -{ - OmafCurlDownloader* curlDownloader = static_cast(downloader); - LOG(INFO)<<"now download "<m_url<m_curlHandler); - curl_easy_cleanup(curlDownloader->m_curlHandler); - curl_global_cleanup(); - - if(curlDownloader->GetStatus() == STOPPING) - curlDownloader->SetStatus(STOPPED); - else - { - curlDownloader->SetStatus(DOWNLOADED); - } - - m_stream.ReachedEOS(); - - return nullptr; -} - -void OmafCurlDownloader::Run() -{ - Download(this); -} - -ODStatus OmafCurlDownloader::ObserverAttach(OmafDownloaderObserver *observer) -{ - m_observerLock.lock(); - - m_observers.insert(observer); - - m_observerLock.unlock(); - - return OD_STATUS_SUCCESS; -} - -ODStatus OmafCurlDownloader::ObserverDetach(OmafDownloaderObserver* observer) -{ - ScopeLock tmplock(m_observerLock); - - if(!m_observers.size() || !observer) - return OD_STATUS_INVALID; - - if(m_observers.find(observer) == m_observers.end()) - { - //LOG(WARNING)<<"The observer is not recorded in this downloader!"<DownloadStatusNotify(this->GetStatus()); - } - m_observerLock.unlock(); - - return OD_STATUS_SUCCESS; -} - -ODStatus OmafCurlDownloader::NotifyDownloadedData() -{ - m_observerLock.lock(); - for(auto observer: m_observers) - { - observer->DownloadDataNotify(this->m_stream.GetTotalStreamLength()); - } - m_observerLock.unlock(); - - return OD_STATUS_SUCCESS; -} - -ODStatus OmafCurlDownloader::CleanUp() -{ - // make sure downloader is stopped or wait time is more than 10 mins - int64_t waitTime = 0; - while(m_status != STOPPED && waitTime < 6000000) - { - usleep(100); - waitTime++; - } - - return OD_STATUS_SUCCESS; -} - -size_t OmafCurlDownloader::CallBackForCurl(void* downloadedData, size_t dataSize, size_t typeSize, void* handle) -{ - OmafCurlDownloader* curlDownloder = (OmafCurlDownloader*) handle; - // return 0 directly if the downloader status is stopping - if(curlDownloder->GetStatus() == STOPPING) - return 0; - - size_t size = dataSize * typeSize; - char* data = new char[size]; - memcpy(data, downloadedData, size); - curlDownloder->m_stream.AddSubStream(data, size); - - // notify all the observers that more data is downloaded - curlDownloder->NotifyDownloadedData(); - - //calculate the download rate - uint64_t endTime = chrono::duration_cast(curlDownloder->m_clock.now().time_since_epoch()).count(); - - double downloadRate = curlDownloder->m_stream.GetTotalStreamLength() / ((endTime - curlDownloder->m_startTime) * 1000.0); - curlDownloder->SetDownloadRate(downloadRate); - - return size; -} - -ODStatus OmafCurlDownloader::SetStatus(DownloaderStatus status) -{ - ODStatus ret = OD_STATUS_SUCCESS; - - if(m_stream.IsEOS() && status == STOPPING) - status = STOPPED; - - m_statusLock.lock(); - m_status = status; - m_statusLock.unlock(); - - // notify all the observers that status has been changed - ret = NotifyStatus(); - - return ret; -} - -DownloaderStatus OmafCurlDownloader::GetStatus() -{ - DownloaderStatus status = NOT_START; - - m_statusLock.lock(); - status = m_status; - m_statusLock.unlock(); - - return status; -} - -double OmafCurlDownloader::GetDownloadRate() -{ - m_rateLock.lock(); - double rate = m_downloadRate; - m_rateLock.unlock(); - - return rate; -} - -void OmafCurlDownloader::SetDownloadRate(double rate) -{ - m_rateLock.lock(); - m_downloadRate = rate; - m_rateLock.unlock(); -} - -VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafDashDownload/OmafCurlDownloader.h b/src/OmafDashAccess/OmafDashDownload/OmafCurlDownloader.h deleted file mode 100644 index 1b1d6752..00000000 --- a/src/OmafDashAccess/OmafDashDownload/OmafCurlDownloader.h +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - */ - -//! -//! \file: OmafCurlDownloader.h -//! \brief: downloader class with libcurl -//! - -#ifndef OMAFCURLDOWNLOADER_H -#define OMAFCURLDOWNLOADER_H - -#include -#include "OmafDownloader.h" -#include "Stream.h" -#include "../OmafDashParser/SegmentElement.h" - -VCD_USE_VRVIDEO; - -VCD_OMAF_BEGIN - -//! -//! \class: OmafCurlDownloader -//! \brief: downloader with libcurl -//! -class OmafCurlDownloader: public OmafDownloader, ThreadLock, Threadable -{ -public: - - //! - //! \brief Constructor - //! - OmafCurlDownloader(); - - //! - //! \brief Constructor with parameter - //! - OmafCurlDownloader(string url); - - //! - //! \brief Destructor - //! - virtual ~OmafCurlDownloader(); - - //! - //! \brief Stop download - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Stop(); - - //! - //! \brief start download - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Start(); - - //! - //! \brief Read given size stream to data pointer - //! - //! \param [in] size - //! size of stream that should read - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Read(uint8_t* data, size_t size); - - //! - //! \brief Peek given size stream to data pointer - //! - //! \param [in] size - //! size of stream that should read - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Peek(uint8_t* data, size_t size); - - //! - //! \brief Peek given size stream to data pointer start from offset - //! - //! \param [in] size - //! size of stream that should read - //! \param [in] offset - //! stream offset that read should start - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Peek(uint8_t* data, size_t size, size_t offset); - - //! - //! \brief Attach download observer - //! - //! \param [in] observer - //! observer need to be attached - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus ObserverAttach(OmafDownloaderObserver *observer); - - //! - //! \brief Dettach download observer - //! - //! \param [in] observer - //! observer need to be dettached - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus ObserverDetach(OmafDownloaderObserver* observer); - - //! - //! \brief Get download rate - //! - //! \return double - //! download rate - //! - virtual double GetDownloadRate(); - - //! - //! \brief Interface implementation from base class: Threadable - //! - virtual void Run(); - -private: - - //! - //! \brief Notify observers the status has changed - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus NotifyStatus(); - - //! - //! \brief Notify observers there is new downloaded data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus NotifyDownloadedData(); - - //! - //! \brief Initialize curl library - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus InitCurl(); - - //! - //! \brief Clean up curl related resources - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus CleanUp(); - - //! - //! \brief Callback function for curl - //! - //! \param [in] downloadedData - //! pointer to downloaded data - //! \param [in] dataSize - //! size of data - //! \param [in] typeSize - //! size of data type - //! \param [in] handle - //! handle for this class - //! - //! \return size_t - //! the downloaded size - //! - static size_t CallBackForCurl(void* downloadedData, size_t dataSize, size_t typeSize, void* handle); - - //! - //! \brief Download run in thread - //! - //! \param [in] downloader - //! the downloader handle - //! - //! \return void - //! - void* Download(void* downloader); - - //! - //! \brief Set download status - //! - //! \param [in] status - //! download status - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus SetStatus(DownloaderStatus status); - - //! - //! \brief Set download rate - //! - //! \param [in] rate - //! download rate - //! - //! \return void - //! - void SetDownloadRate(double rate); - - //! - //! \brief Get download status - //! - //! \return DownloaderStatus - //! status - //! - DownloaderStatus GetStatus(); - - unordered_set m_observers; //!< attached downloader observers - DownloaderStatus m_status; //!< download status - ThreadLock m_statusLock; //!< locker for status - ThreadLock m_observerLock; //!< locker for observers - Stream m_stream; //!< download stream - CURL* m_curlHandler; //!< curl handle - string m_url; //!< download url - - chrono::high_resolution_clock m_clock; //!< clock for calculating rate - uint64_t m_startTime; //!< download start time - uint64_t m_endTime; //!< download end time - double m_downloadRate; //!< real-time download rate (bytes/s) - ThreadLock m_rateLock; //!< lock for download rate -}; - -VCD_OMAF_END; - -#endif //OMAFCURLDOWNLOADER_H \ No newline at end of file diff --git a/src/OmafDashAccess/OmafDashDownload/OmafCurlEasyHandler.cpp b/src/OmafDashAccess/OmafDashDownload/OmafCurlEasyHandler.cpp new file mode 100644 index 00000000..0027a363 --- /dev/null +++ b/src/OmafDashAccess/OmafDashDownload/OmafCurlEasyHandler.cpp @@ -0,0 +1,471 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: OmafCurlEasyHandler.cpp +//! \brief: curl easy handlers class implementation +//! +#include "OmafCurlEasyHandler.h" + +#include + +//#include "../../utils/GlogWrapper.h" // GLOG + +namespace VCD { +namespace OMAF { + +HttpHeader OmafCurlEasyHelper::header(CURL *easy_curl) noexcept { + try { + HttpHeader header; + if (easy_curl == nullptr) { + return header; + } + + long ldata = -1; + CURLcode res; + res = curl_easy_getinfo(easy_curl, CURLINFO_RESPONSE_CODE, &ldata); + if (CURLE_OK == res) { + header.http_status_code_ = ldata; + } + curl_off_t cl; + res = curl_easy_getinfo(easy_curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD_T, &cl); + if (CURLE_OK == res) { + header.content_length_ = static_cast(cl); + } + return header; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when set params for curl easy handler, ex: %s\n", ex.what()); + return HttpHeader(); + } +} + +double OmafCurlEasyHelper::speed(CURL *easy_curl) noexcept { + try { + if (easy_curl == nullptr) { + return 0.0f; + } + curl_off_t speed; + CURLcode res = curl_easy_getinfo(easy_curl, CURLINFO_SPEED_DOWNLOAD_T, &speed); + if (CURLE_OK == res) { + return static_cast(speed) * 8; + } else { + return 0.0f; + } + + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when get speed for curl easy handler, ex: %s\n", ex.what()); + return 0.0f; + } +} + +OMAF_STATUS OmafCurlEasyHelper::setParams(CURL *easy_curl, CurlParams params) noexcept { + try { + if (easy_curl == nullptr) { + return ERROR_NULL_PTR; + } + + if (!params.http_params_.bssl_verify_peer_) { + curl_easy_setopt(easy_curl, CURLOPT_SSL_VERIFYPEER, 0L); + } + if (!params.http_params_.bssl_verify_host_) { + curl_easy_setopt(easy_curl, CURLOPT_SSL_VERIFYHOST, 0L); + } + + if (params.http_params_.conn_timeout_ > 0) { + curl_easy_setopt(easy_curl, CURLOPT_CONNECTTIMEOUT_MS, params.http_params_.conn_timeout_); + } + if (params.http_params_.total_timeout_ > 0) { + curl_easy_setopt(easy_curl, CURLOPT_TIMEOUT_MS, params.http_params_.total_timeout_); + } + + if (!params.http_proxy_.http_proxy_.empty()) { + curl_easy_setopt(easy_curl, CURLOPT_PROXY, params.http_proxy_.http_proxy_.c_str()); + curl_easy_setopt(easy_curl, CURLOPT_PROXYTYPE, CURLPROXY_HTTP); + } + if (!params.http_proxy_.https_proxy_.empty()) { + curl_easy_setopt(easy_curl, CURLOPT_PROXY, params.http_proxy_.https_proxy_.c_str()); + curl_easy_setopt(easy_curl, CURLOPT_PROXYTYPE, CURLPROXY_HTTPS); + } + if (!params.http_proxy_.no_proxy_.empty()) { + curl_easy_setopt(easy_curl, CURLOPT_NOPROXY, params.http_proxy_.no_proxy_.c_str()); + } + if (!params.http_proxy_.proxy_user_.empty()) { + curl_easy_setopt(easy_curl, CURLOPT_USERNAME, params.http_proxy_.proxy_user_.c_str()); + } + if (!params.http_proxy_.proxy_passwd_.empty()) { + curl_easy_setopt(easy_curl, CURLOPT_PASSWORD, params.http_proxy_.proxy_passwd_.c_str()); + } + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when set params for curl easy handler, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +inline long OmafCurlEasyHelper::namelookupTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_NAMELOOKUP_TIME_T, &timev); + + return static_cast(timev); +} +inline long OmafCurlEasyHelper::connectTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_CONNECT_TIME_T, &timev); + + return static_cast(timev); +} +inline long OmafCurlEasyHelper::appConnectTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_APPCONNECT_TIME_T, &timev); + + return static_cast(timev); +} +inline long OmafCurlEasyHelper::preTransferTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_PRETRANSFER_TIME_T, &timev); + + return static_cast(timev); +} +inline long OmafCurlEasyHelper::startTransferTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_STARTTRANSFER_TIME_T, &timev); + + return static_cast(timev); +} + +inline long OmafCurlEasyHelper::redirectTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_REDIRECT_TIME_T, &timev); + + return static_cast(timev); +} +void OmafCurlEasyHelper::curlTime(CURL *easy_curl, CurlTimes &curl_time) noexcept { + curl_time.nameloopup_ = namelookupTime(easy_curl); + curl_time.connect_ = connectTime(easy_curl); + curl_time.app_connect_ = appConnectTime(easy_curl); + curl_time.pre_transfer_ = preTransferTime(easy_curl); + curl_time.start_transfer_ = startTransferTime(easy_curl); + curl_time.total_ = totalTime(easy_curl); + curl_time.redirect_ = redirectTime(easy_curl); +} + +OmafCurlEasyDownloader::~OmafCurlEasyDownloader() { close(); } + +OMAF_STATUS OmafCurlEasyDownloader::init(const CurlParams ¶ms) noexcept { + try { + OMAF_STATUS ret = ERROR_NONE; + std::lock_guard lock(easy_curl_mutex_); + if (easy_curl_) { + ret = close(); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to clean older easy handle! err=%d\n", ret); + return ret; + } + } + + easy_curl_ = curl_easy_init(); + if (easy_curl_ == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the curl easy handler!\n"); + return ERROR_NULL_PTR; + } + curl_params_ = params; + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when create the curl easy hanlder, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlEasyDownloader::open(const std::string &url) noexcept { + try { + std::lock_guard lock(easy_curl_mutex_); + url_ = url; + OMAF_LOG(LOG_INFO, "To open the url: %s\n", url.c_str()); + if (easy_curl_ == nullptr) { + OMAF_LOG(LOG_ERROR, "curl easy handler is invalid!\n"); + return ERROR_NULL_PTR; + } + + curl_easy_reset(easy_curl_); + OMAF_STATUS ret = OmafCurlEasyHelper::setParams(easy_curl_, curl_params_); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to set params for easy curl handler!\n"); + return ret; + } + + curl_easy_setopt(easy_curl_, CURLOPT_URL, url.c_str()); + curl_easy_setopt(easy_curl_, CURLOPT_PRIVATE, url.c_str()); + curl_easy_setopt(easy_curl_, CURLOPT_WRITEFUNCTION, curlBodyCallback); + curl_easy_setopt(easy_curl_, CURLOPT_WRITEDATA, (void *)this); + + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when set options for curl easy hanlder, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlEasyDownloader::start(int64_t offset, int64_t size, onData dcb, onState scb) noexcept { + try { + std::lock_guard lock(easy_curl_mutex_); + if (offset > 0 || size > 0) { + std::stringstream ss; + if (offset > 0) { + ss << offset; + } + ss << "-"; + if (size > 0) { + ss << size; + } + // OMAF_LOG(LOG_INFO, "To download the range: %s\n", ss.str()); + curl_easy_setopt(easy_curl_, CURLOPT_RANGE, ss.str().c_str()); + } + dcb_ = dcb; + scb_ = scb; + + // TODO, easy mode + if (work_mode_ == CurlWorkMode::EASY_MODE) { + CURLcode res = curl_easy_perform(easy_curl_); + + if (res != CURLE_OK) { + OMAF_LOG(LOG_ERROR, "Failed to download the url: %s\n", url_.c_str()); + if (scb_) { + scb_(OmafCurlEasyDownloader::State::FAILED); + } + return ERROR_DOWNLOAD_FAIL; + } else { + if (scb_) { + scb_(OmafCurlEasyDownloader::State::SUCCESS); + } + return ERROR_NONE; + } + } + + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when start curl easy hanlder, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlEasyDownloader::stop() noexcept { + { + std::lock_guard lock(cb_mutex_); + dcb_ = nullptr; + scb_ = nullptr; + } + + return ERROR_NONE; +} + +OMAF_STATUS OmafCurlEasyDownloader::close() noexcept { + try { + stop(); + + std::lock_guard lock(easy_curl_mutex_); + + if (easy_curl_) { + curl_easy_cleanup(easy_curl_); + easy_curl_ = nullptr; + } + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when close curl easy hanlder, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +HttpHeader OmafCurlEasyDownloader::header() noexcept { + try { + std::lock_guard lock(easy_curl_mutex_); + return OmafCurlEasyHelper::header(this->easy_curl_); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when read header, ex: %s\n", ex.what()); + return HttpHeader(); + } +} + +double OmafCurlEasyDownloader::speed() noexcept { + try { + std::lock_guard lock(easy_curl_mutex_); + return OmafCurlEasyHelper::speed(this->easy_curl_); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when read average speed, ex: %s\n", ex.what()); + return 0.0f; + } +} + +void OmafCurlEasyDownloader::receiveSB(std::unique_ptr sb) noexcept { + try { + { + std::lock_guard lock(cb_mutex_); + if (dcb_ != nullptr) { + dcb_(std::move(sb)); + } + } + + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when process stream block! ex: %s\n", ex.what()); + } +} + +size_t OmafCurlEasyDownloader::curlBodyCallback(char *ptr, size_t size, size_t nmemb, void *userdata) noexcept { + size_t bsize = size * nmemb; + + try { + //OMAF_LOG(LOG_INFO, "Receive bytes size= %lld\n", bsize); + if (ptr == nullptr || bsize <= 0) { + OMAF_LOG(LOG_ERROR, "The buffer from curl handler is empty!\n"); + return bsize; + } + OmafCurlEasyDownloader *phandler = reinterpret_cast(userdata); + if (phandler == nullptr) { + OMAF_LOG(LOG_ERROR, "The OmafCurlEasyDownloader invalid handler!\n"); + return bsize; + } + std::unique_ptr sb = make_unique_vcd(); + if (!sb->resize(bsize)) { + OMAF_LOG(LOG_ERROR, "Failed to allocate the target buffer for curl download data!\n"); + return bsize; + } + // FIXME, use security memcpy_s + memcpy_s(sb->buf(), sb->capacity(), ptr, bsize); + sb->size(bsize); + + phandler->receiveSB(std::move(sb)); + return bsize; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when receive data from curl easy hanlder, ex: %s\n", ex.what()); + return bsize; + } +} +OmafCurlEasyDownloaderPool::~OmafCurlEasyDownloaderPool() { + try { + std::lock_guard lock(easy_downloader_pool_mutex_); + while (easy_downloader_pool_.size()) { + auto downloader = std::move(easy_downloader_pool_.front()); + downloader->close(); + easy_downloader_pool_.pop(); + } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when release all easy downloader! ex=%s\n", ex.what()); + } +} + +OmafCurlEasyDownloader::Ptr OmafCurlEasyDownloaderPool::pop() noexcept { + try { + // 1. check and get the downloader from the pool + { + std::lock_guard lock(easy_downloader_pool_mutex_); + if (easy_downloader_pool_.size()) { + OmafCurlEasyDownloader::Ptr downloader = std::move(easy_downloader_pool_.front()); + downloader->params(curl_params_); + easy_downloader_pool_.pop(); + return std::move(downloader); + } + if (downloader_count_ >= max_downloader_) { + return nullptr; + } + } + // 2. create a new downloader + OmafCurlEasyDownloader::Ptr downloader = std::make_shared(); + if (ERROR_NONE == downloader->init(curl_params_)) { + return std::move(downloader); + } + return nullptr; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when request one easy downloader! ex=%s\n", ex.what()); + return nullptr; + } +} +void OmafCurlEasyDownloaderPool::push(OmafCurlEasyDownloader::Ptr downloader) noexcept { + try { + std::lock_guard lock(easy_downloader_pool_mutex_); + downloader->stop(); + easy_downloader_pool_.push(downloader); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when return one easy downloader! ex=%s\n", ex.what()); + } +} + +OmafCurlChecker::~OmafCurlChecker() { close(); } + +OMAF_STATUS OmafCurlChecker::init(const CurlParams ¶ms) noexcept { + try { + easy_curl_ = curl_easy_init(); + if (easy_curl_ == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the curl easy handler!\n"); + return ERROR_NULL_PTR; + } + OMAF_STATUS ret = OmafCurlEasyHelper::setParams(easy_curl_, params); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to set params for easy curl handler!\n"); + return ret; + } + curl_easy_setopt(easy_curl_, CURLOPT_NOBODY, 1L); + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when create the curl easy hanlder, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} +OMAF_STATUS OmafCurlChecker::check(const std::string &url) noexcept { + try { + if (easy_curl_ == nullptr) { + OMAF_LOG(LOG_ERROR, "curl easy handler is invalid!\n"); + return ERROR_INVALID; + } + + curl_easy_setopt(easy_curl_, CURLOPT_URL, url.c_str()); + + CURLcode res = curl_easy_perform(easy_curl_); + if (CURLE_OK == res) { + long response_code = 0; + curl_easy_getinfo(easy_curl_, CURLINFO_RESPONSE_CODE, &response_code); + return OmafCurlEasyHelper::success(response_code); + } + return ERROR_INVALID; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when call curl easy handler, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} +OMAF_STATUS OmafCurlChecker::close() noexcept { + try { + OMAF_LOG(LOG_INFO, "To close the curl checker!\n"); + if (easy_curl_) { + curl_easy_cleanup(easy_curl_); + easy_curl_ = nullptr; + } + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when close curl easy hanlder, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} +} // namespace OMAF +} // namespace VCD diff --git a/src/OmafDashAccess/OmafDashDownload/OmafCurlEasyHandler.h b/src/OmafDashAccess/OmafDashDownload/OmafCurlEasyHandler.h new file mode 100644 index 00000000..bd042120 --- /dev/null +++ b/src/OmafDashAccess/OmafDashDownload/OmafCurlEasyHandler.h @@ -0,0 +1,228 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: OmafCurlEasyDownloader.h +//! \brief: curl easy handlers class +//! +#ifndef OMAFCURLEASYWRAPPER_H +#define OMAFCURLEASYWRAPPER_H +#include + +#include "../common.h" // VCD::NonCopyable +#include "../../utils/ns_def.h" // namespace VCD::OMAF +#include "../OmafTypes.h" +#include "OmafDownloader.h" // VCD::OMAF::State, VCD::OMAF::DownloadPolicy +#include "Stream.h" // VCD::OMAF::StreamBlock + +#include +#include +#include +#include +#include + +// namespace +namespace VCD { +namespace OMAF { + +struct _curlParams { + OmafDashHttpProxy http_proxy_; + OmafDashHttpParams http_params_; + std::string to_string() { + std::stringstream ss; + ss << "curl params: {" << std::endl; + ss << "\t" << http_proxy_.to_string() << std::endl; + ss << "\t" << http_params_.to_string() << std::endl; + ss << "}"; + return ss.str(); + } +}; + +using CurlParams = struct _curlParams; + +struct _httpHeader { + long http_status_code_ = -1; + int64_t content_length_ = -1; +}; + +using HttpHeader = struct _httpHeader; + +struct _curlTimes { + long nameloopup_ = 0; + long connect_ = 0; + long app_connect_ = 0; + long pre_transfer_ = 0; + long start_transfer_ = 0; + long total_ = 0; + long redirect_ = 0; +}; +using CurlTimes = struct _curlTimes; + +class OmafCurlEasyDownloaderPool; + +class OmafCurlEasyHelper : public NonCopyable { + protected: + OmafCurlEasyHelper() = default; + + public: + virtual ~OmafCurlEasyHelper(){}; + + public: + static HttpHeader header(CURL *easy_curl) noexcept; + static double speed(CURL *easy_curl) noexcept; + static OMAF_STATUS setParams(CURL *easy_curl, CurlParams parmas) noexcept; + static bool success(const long http_status_code_) noexcept { + return http_status_code_ >= 200 && http_status_code_ < 300; + } + static long namelookupTime(CURL *easy_curl) noexcept; + static long connectTime(CURL *easy_curl) noexcept; + static long appConnectTime(CURL *easy_curl) noexcept; + static long preTransferTime(CURL *easy_curl) noexcept; + static long startTransferTime(CURL *easy_curl) noexcept; + + static long redirectTime(CURL *easy_curl) noexcept; + static void curlTime(CURL *easy_curl, CurlTimes &) noexcept; + static long totalTime(CURL *easy_curl) noexcept { + curl_off_t timev = 0; + curl_easy_getinfo(easy_curl, CURLINFO_TOTAL_TIME_T, &timev); + + return static_cast(timev); + }; +}; + +class OmafCurlEasyDownloader : public VCD::NonCopyable { + friend OmafCurlEasyDownloaderPool; + + public: + enum class CurlWorkMode { + MULTI_MODE = 0, + EASY_MODE = 1, + }; + + enum class State { + DOWNLOADING = 0, + SUCCESS = 1, + STOPPED = 2, + FAILED = 3, + }; + + public: + using onData = std::function)>; + using onState = std::function; + using Ptr = std::shared_ptr; + + public: + OmafCurlEasyDownloader(CurlWorkMode work_mode = CurlWorkMode::MULTI_MODE) : work_mode_(work_mode){}; + + virtual ~OmafCurlEasyDownloader(); + + public: + OMAF_STATUS init(const CurlParams ¶ms) noexcept; + OMAF_STATUS open(const std::string &url) noexcept; + OMAF_STATUS start(int64_t offset, int64_t size, onData scb, onState fcb) noexcept; + OMAF_STATUS stop() noexcept; + OMAF_STATUS close() noexcept; + HttpHeader header() noexcept; + double speed() noexcept; + + public: + static size_t curlBodyCallback(char *ptr, size_t size, size_t nmemb, void *userdata) noexcept; + + public: + inline CURL *handler() noexcept { + std::lock_guard lock(easy_curl_mutex_); + return easy_curl_; + }; + inline CurlTimes curlTimes() { + CurlTimes curl_times; + if (easy_curl_) { + OmafCurlEasyHelper::curlTime(easy_curl_, curl_times); + } + return curl_times; + } + inline long downloadTime() { + if (easy_curl_) { + return OmafCurlEasyHelper::totalTime(easy_curl_); + } + return 0; + } + + private: + void receiveSB(std::unique_ptr) noexcept; + void params(const CurlParams ¶ms) noexcept { curl_params_ = params; } + + private: + CurlWorkMode work_mode_ = CurlWorkMode::MULTI_MODE; + CurlParams curl_params_; + std::mutex easy_curl_mutex_; + CURL *easy_curl_ = nullptr; + std::string url_; + std::mutex cb_mutex_; + onData dcb_ = nullptr; + onState scb_ = nullptr; + State state_ = State::DOWNLOADING; +}; + +class OmafCurlEasyDownloaderPool : public VCD::NonCopyable { + public: + OmafCurlEasyDownloaderPool(int32_t max_downloader) : max_downloader_(max_downloader){}; + virtual ~OmafCurlEasyDownloaderPool(); + + public: + OmafCurlEasyDownloader::Ptr pop() noexcept; + void push(OmafCurlEasyDownloader::Ptr) noexcept; + void params(const CurlParams ¶ms) noexcept { curl_params_ = params; } + + private: + int32_t max_downloader_; + int32_t downloader_count_ = 0; + CurlParams curl_params_; + std::mutex easy_downloader_pool_mutex_; + std::queue easy_downloader_pool_; +}; + +class OmafCurlChecker : public VCD::NonCopyable { + public: + using Ptr = std::shared_ptr; + + public: + OmafCurlChecker(){}; + ~OmafCurlChecker(); + + public: + OMAF_STATUS init(const CurlParams ¶ms) noexcept; + OMAF_STATUS check(const std::string &url) noexcept; + OMAF_STATUS close() noexcept; + + private: + CURL *easy_curl_ = nullptr; +}; + +} // namespace OMAF +} // namespace VCD + +#endif // OMAFCURLEASYWRAPPER_H \ No newline at end of file diff --git a/src/OmafDashAccess/OmafDashDownload/OmafCurlMultiHandler.cpp b/src/OmafDashAccess/OmafDashDownload/OmafCurlMultiHandler.cpp new file mode 100644 index 00000000..7149a7f3 --- /dev/null +++ b/src/OmafDashAccess/OmafDashDownload/OmafCurlMultiHandler.cpp @@ -0,0 +1,452 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: OmafCurlMultiHandler.cpp +//! \brief: downloader class with libcurl multi handler +//! + +#include "OmafCurlMultiHandler.h" + +namespace VCD { +namespace OMAF { + +std::atomic_size_t OmafDownloadTask::TASK_ID(0); + +OmafCurlMultiDownloader::OmafCurlMultiDownloader(long max_parallel_transfers) + : max_parallel_transfers_(max_parallel_transfers) {} +OmafCurlMultiDownloader::~OmafCurlMultiDownloader() { close(); } + +OMAF_STATUS OmafCurlMultiDownloader::init(const CurlParams& p, OmafDownloadTask::TaskDoneCB taskcb) noexcept { + try { + // 1. params + curl_params_ = p; + task_done_cb_ = taskcb; + + // 2. create the multi handle + curl_multi_ = curl_multi_init(); + max_parallel_ = (max_parallel_transfers_ > 0) ? max_parallel_transfers_ : DEFAULT_MAX_PARALLER_TRANSFERS; + OMAF_LOG(LOG_INFO, "Set max transfer to %ld\n", max_parallel_); + curl_multi_setopt(curl_multi_, CURLMOPT_MAXCONNECTS, max_parallel_ << 1); + + // 3. create the easy downloader pool + downloader_pool_ = std::move(make_unique_vcd(max_parallel_ << 1)); + if (downloader_pool_ == NULL) { + OMAF_LOG(LOG_ERROR, "Failed to create the downloader pool!\n"); + return ERROR_NULL_PTR; + } + downloader_pool_->params(curl_params_); + + // 4. create thread for multi runner + bworking_ = true; + worker_ = std::thread(&OmafCurlMultiDownloader::threadRunner, this); + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when init curl multi handler, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::close() noexcept { + try { + OMAF_LOG(LOG_INFO, "To close the curl multi downloader!\n"); + // 1. mark Stop + bworking_ = false; + + // 2. detach the thread + if (worker_.joinable()) { + bworking_ = false; + worker_.join(); + } + // 3. clean up + if (curl_multi_) { + curl_multi_cleanup(curl_multi_); + curl_multi_ = nullptr; + } + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when close curl multi handler, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::addTask(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Try to add empty task!\n"); + return ERROR_INVALID; + } + OMAF_LOG(LOG_INFO, "01-task id %lld, task count=%d\n", task->id(), task.use_count()); + task->state(OmafDownloadTask::State::READY); + { + std::lock_guard lock(ready_task_list_mutex_); + ready_task_list_.push_back(task); + } + task_size_.fetch_add(1); + OMAF_LOG(LOG_INFO, "02-task id %lld, task count=%d\n", task->id(), task.use_count()); + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when add task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} +OMAF_STATUS OmafCurlMultiDownloader::removeTask(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Try to remove empty task!\n"); + return ERROR_INVALID; + } + + // FIXME, the task state may change when processing, skip this issue now + if (task->state() == OmafDownloadTask::State::READY) { + removeReadyTask(task); + } + + if (task->state() == OmafDownloadTask::State::RUNNING) { + removeRunningTask(task); + } + task_size_.fetch_sub(1); + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when remove task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::createTransfer(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr || downloader_pool_.get() == nullptr) { + return ERROR_INVALID; + } + + OmafCurlEasyDownloader::Ptr downloader = std::move(downloader_pool_->pop()); + if (downloader.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the curl easy downloader!\n"); + return ERROR_NULL_PTR; + } + + OMAF_STATUS ret = downloader->open(task->url()); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to open the curl easy downloader, err=%d\n", ret); + // return the downloader to pool + downloader_pool_->push(std::move(downloader)); + return ret; + } + + task->easy_downloader_ = std::move(downloader); + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when create curl transfer, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::startTransfer(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr) { + return ERROR_INVALID; + } + + OMAF_LOG(LOG_INFO, "To start transfer for url: %s\n", task->url_.c_str()); + auto downloader = task->easy_downloader_; + if (downloader == nullptr) { + OMAF_LOG(LOG_ERROR, "The curl easy downloader is empty!\n"); + return ERROR_NULL_PTR; + } + + OMAF_STATUS ret = ERROR_NONE; + auto offset = task->streamSize(); + // multi hanlder will manager the life cycle of curl easy hanlder, + // so, we won't send the state callback to downloader. + ret = downloader->start( + offset, -1, + [task](std::unique_ptr sb) { + if (task->dcb_) { + task->stream_size_ += sb->size(); + task->dcb_(std::move(sb)); + } + }, + nullptr); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to start the curl easy downloader!\n"); + return ERROR_INVALID; + } + + { + std::lock_guard lock(run_task_map_mutex_); + auto handler = downloader->handler(); + if (handler) { + OMAF_LOG(LOG_INFO, "Add to multi handler transfer for url: %s, handler: %ld\n", task->url_.c_str(), reinterpret_cast(task->easy_downloader_->handler())); + curl_multi_add_handle(curl_multi_, handler); + + task->state(OmafDownloadTask::State::RUNNING); + run_task_map_[handler] = task; + task->transfer_times_ += 1; + } + } + + int still_alive = 0; + curl_multi_perform(curl_multi_, &still_alive); + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when start curl transfer task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::removeTransfer(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr) { + return ERROR_INVALID; + } + OMAF_LOG(LOG_INFO, "Remove transfer for url: %s, handler: %ld\n", task->url_.c_str(), reinterpret_cast(task->easy_downloader_->handler())); + CURLMcode code = curl_multi_remove_handle(curl_multi_, task->easy_downloader_->handler()); + if (code != CURLM_OK) { + OMAF_LOG(LOG_ERROR, "Failed to remove curl easy handle from multi handler!, code= %d\n", code); + return ERROR_INVALID; + } + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when remove curl transfer task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::removeReadyTask(OmafDownloadTask::Ptr task) noexcept { + try { + std::lock_guard lock(ready_task_list_mutex_); + std::list::iterator it = ready_task_list_.begin(); + for (; it != ready_task_list_.end(); ++it) { + if ((*it)->url() == task->url()) { + break; + } + } + + if (it != ready_task_list_.end()) { + ready_task_list_.erase(it); + task->state(OmafDownloadTask::State::STOPPED); + } + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when remove waitting task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::removeRunningTask(OmafDownloadTask::Ptr task) noexcept { + try { + moveTaskFromRun(task, OmafDownloadTask::State::STOPPED); + + removeTransfer(task); + + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when remove running task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::markTaskFinish(OmafDownloadTask::Ptr task) noexcept { + try { + OMAF_LOG(LOG_INFO, "Task finish, url=%s\n", task->url().c_str()); + + moveTaskFromRun(task, OmafDownloadTask::State::FINISH); + // other logic + processTaskDone(std::move(task)); + + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when process finished task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafCurlMultiDownloader::markTaskTimeout(OmafDownloadTask::Ptr task) noexcept { + try { + OMAF_LOG(LOG_INFO, "Task timeout, url=%s\n", task->url().c_str()); + moveTaskFromRun(task, OmafDownloadTask::State::TIMEOUT); + // other logic + // restart the trasfer when timeout + if (task->transfer_times_ < curl_params_.http_params_.retry_times_) { + startTransfer(task); + } else { + processTaskDone(std::move(task)); + } + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when process timeout task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +void OmafCurlMultiDownloader::processTaskDone(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Try to process empty task!\n"); + } else { + task_size_.fetch_sub(1); + + if (task->easy_downloader_) { + // return the downloader to pool + if (downloader_pool_) { + task->easy_downloader_->stop(); + downloader_pool_->push(std::move(task->easy_downloader_)); + } else { + task->easy_downloader_->close(); + task->easy_downloader_.reset(); + } + } + if (task_done_cb_) { + task_done_cb_(std::move(task)); + } + } + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when process task done, ex: %s\n", ex.what()); + } +} + +OMAF_STATUS OmafCurlMultiDownloader::moveTaskFromRun(OmafDownloadTask::Ptr task, + OmafDownloadTask::State to_state) noexcept { + try { + std::lock_guard lock(run_task_map_mutex_); + auto handler = task->easy_downloader_->handler(); + auto find = run_task_map_.find(handler); + if (find != run_task_map_.end()) { + run_task_map_.erase(find); + task->state(to_state); + } + + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when task state from run to %d, ex: %s\n", static_cast(to_state), ex.what()); + return ERROR_INVALID; + } +} + +void OmafCurlMultiDownloader::threadRunner(void) noexcept { + try { + while (bworking_) { + startTaskDownload(); + + int still_alive = 0; + curl_multi_perform(curl_multi_, &still_alive); + + if (still_alive == max_parallel_) { + int numfds; + curl_multi_wait(curl_multi_, nullptr, 0, 100, &numfds); + } + + retriveDoneTask(); + } + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception in the multi thread worker, ex: %s\n", ex.what()); + } +} + +OMAF_STATUS OmafCurlMultiDownloader::startTaskDownload(void) noexcept { + try { + // start a new task + if ((run_task_map_.size() < static_cast(max_parallel_transfers_)) && (ready_task_list_.size() > 0)) { + OmafDownloadTask::Ptr task = NULL; + { + std::lock_guard lock(ready_task_list_mutex_); + task = std::move(ready_task_list_.front()); + OMAF_LOG(LOG_INFO, "1-task id %lld, task count=%d\n", task->id(), task.use_count()); + ready_task_list_.pop_front(); + } + OMAF_STATUS ret = ERROR_NONE; + if (task != NULL) { + ret = createTransfer(task); + if (ret == ERROR_NONE) { + ret = startTransfer(task); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to start the transfer!\n"); + removeRunningTask(task); + } + } else { + OMAF_LOG(LOG_ERROR, "Failed to create the transfer!\n"); + } + } + else + { + OMAF_LOG(LOG_ERROR, "Download task failed to create!\n"); + return ERROR_NULL_PTR; + } + OMAF_LOG(LOG_INFO, "2-task id %lld, task count=%d\n", task->id(), task.use_count()); + } + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when start a new task, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +size_t OmafCurlMultiDownloader::retriveDoneTask(int msgNum) noexcept { + try { + UNUSED(msgNum); + + struct CURLMsg* msg; + size_t num = 0; + do { + int msgq = 0; + msg = curl_multi_info_read(curl_multi_, &msgq); + if (msg && (msg->msg == CURLMSG_DONE)) { + auto handler = msg->easy_handle; + OmafDownloadTask::Ptr task; + { + std::lock_guard lock(run_task_map_mutex_); + auto find = run_task_map_.find(handler); + if (find != run_task_map_.end()) { + task = find->second; + } + } + + if (task.get() != nullptr) { + OMAF_LOG(LOG_INFO, "3-task id %lld, task count=%d\n", task->id(), task.use_count()); + removeTransfer(task); + auto header = task->easy_downloader_->header(); + OMAF_LOG(LOG_INFO, "Header content length=%lld\n", header.content_length_); + if (OmafCurlEasyHelper::success(header.http_status_code_) && (header.content_length_ == task->streamSize())) { + markTaskFinish(std::move(task)); + } else { + // FIXME how to check timeout + markTaskTimeout(std::move(task)); + } + } + } + + num++; + } while (msg); + + return num; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception in the process multi handler information, ex: %s\n", ex.what()); + return 0; + } +} // namespace OMAF +} // namespace OMAF +} // namespace VCD diff --git a/src/OmafDashAccess/OmafDashDownload/OmafCurlMultiHandler.h b/src/OmafDashAccess/OmafDashDownload/OmafCurlMultiHandler.h new file mode 100644 index 00000000..3666ae7f --- /dev/null +++ b/src/OmafDashAccess/OmafDashDownload/OmafCurlMultiHandler.h @@ -0,0 +1,275 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: OmafCurlMultiDownloader.h +//! \brief: downloader class with libcurl multi handler +//! + +#ifndef OMAFCURLMULTIWRAPPER_H +#define OMAFCURLMULTIWRAPPER_H + +#include "../common.h" // VCD::NonCopyable +#include "OmafCurlEasyHandler.h" +#include "OmafDownloader.h" +#include "performance.h" + +#include +#include +#include +#include +#include +#include +#include + +namespace VCD { +namespace OMAF { + +class OmafCurlMultiDownloader; +class OmafDownloadTask; + +const int DEFAULT_MAX_PARALLER_TRANSFERS = 50; + +class OmafDownloadTaskPerfCounter : public VCD::NonCopyable { + public: + using UPtr = std::unique_ptr; + using Ptr = std::shared_ptr; + + public: + OmafDownloadTaskPerfCounter() { create_time_ = std::chrono::steady_clock::now(); }; + virtual ~OmafDownloadTaskPerfCounter(){}; + inline void markStart() { start_transfer_ = std::chrono::steady_clock::now(); }; + inline void markStop() { stop_transfer_ = std::chrono::steady_clock::now(); }; + inline std::chrono::milliseconds transferDuration() { + return std::chrono::duration_cast(stop_transfer_ - start_transfer_); + }; + inline std::chrono::milliseconds duration() { + return std::chrono::duration_cast(stop_transfer_ - create_time_); + }; + inline long downloadTime() const { return download_time_; } + inline void downloadTime(long t) { download_time_ = t; } + inline double downloadSpeed() const { return download_speed_; } + inline void downloadSpeed(double s) { download_speed_ = s; } + + private: + std::chrono::steady_clock::time_point create_time_; + std::chrono::steady_clock::time_point start_transfer_; + std::chrono::steady_clock::time_point stop_transfer_; + long download_time_ = 0; + double download_speed_ = 0.0; +}; + +class OmafDownloadTask : public VCD::NonCopyable { + friend OmafCurlMultiDownloader; + + public: + using Ptr = std::shared_ptr; + + enum class State { + CREATE = 0, + READY = 1, + RUNNING = 2, + STOPPED = 3, + TIMEOUT = 4, + FINISH = 5, + }; + + using TaskDoneCB = std::function; + + public: + OmafDownloadTask(const std::string &url, OmafDashSegmentClient::OnData dcb, OmafDashSegmentClient::OnState scb) + : url_(url), dcb_(dcb), scb_(scb) { + id_ = TASK_ID.fetch_add(1); + }; + + public: + virtual ~OmafDownloadTask() { + dcb_ = nullptr; + scb_ = nullptr; + OMAF_LOG(LOG_INFO, "Release the %s\n", this->to_string().c_str()); + } + + public: + static Ptr createTask(const std::string &url, OmafDashSegmentClient::OnData dcb, OmafDashSegmentClient::OnState scb) { + Ptr task = std::make_shared(url, dcb, scb); + return task; + } + + public: + inline const std::string &url() const noexcept { return url_; } + inline int64_t streamSize(void) const noexcept { return stream_size_; } + inline size_t id() const noexcept { return id_; } + std::string to_string() const noexcept { + std::stringstream ss; + ss << "task, id=" << id_; + ss << ", url=" << url_; + ss << ", stream_size=" << stream_size_; + ss << ", state=" << static_cast(state_); + return ss.str(); + } + + public: + inline void state(State s) noexcept { + switch (s) { + case State::CREATE: + case State::READY: + break; + case State::RUNNING: + if (perf_counter_) perf_counter_->markStart(); + break; + case State::STOPPED: + case State::TIMEOUT: + case State::FINISH: + if (perf_counter_) { + perf_counter_->downloadTime(easy_downloader_->downloadTime()); + perf_counter_->downloadSpeed(easy_downloader_->speed()); + } + break; + default: + break; + } + state_ = s; + }; + inline State state() noexcept { return state_; } + + inline void taskDoneCallback(State state) noexcept { + if (perf_counter_) perf_counter_->markStop(); + if (scb_) { + if (state == OmafDownloadTask::State::FINISH) { + scb_(OmafDashSegmentClient::State::SUCCESS); + } else { + if (state == OmafDownloadTask::State::STOPPED) { + scb_(OmafDashSegmentClient::State::STOPPED); + } else if (state == OmafDownloadTask::State::TIMEOUT) { + scb_(OmafDashSegmentClient::State::TIMEOUT); + } else { + scb_(OmafDashSegmentClient::State::FAILURE); + } + } + } + } + std::chrono::milliseconds transferDuration() const { + if (perf_counter_) return perf_counter_->transferDuration(); + return std::chrono::milliseconds(0); + } + std::chrono::milliseconds duration() const { + if (perf_counter_) return perf_counter_->duration(); + return std::chrono::milliseconds(0); + } + long downloadTime() const { + if (perf_counter_) return perf_counter_->downloadTime(); + return 0; + } + double downloadSpeed() const { + if (perf_counter_) return perf_counter_->downloadSpeed(); + return 0.0f; + } + + public: + void perfCounter(OmafDownloadTaskPerfCounter::Ptr s) { perf_counter_ = s; } + OmafDownloadTaskPerfCounter::Ptr perfCounter() { return perf_counter_; }; + + private: + std::string url_; + OmafDashSegmentClient::OnData dcb_; + OmafDashSegmentClient::OnState scb_; + State state_ = State::CREATE; + OmafCurlEasyDownloader::Ptr easy_downloader_; + int transfer_times_ = 0; + size_t id_ = 0; + size_t stream_size_ = 0; + OmafDownloadTaskPerfCounter::Ptr perf_counter_; + + private: + static std::atomic_size_t TASK_ID; +}; // namespace OMAF + +class OmafCurlMultiDownloader : public VCD::NonCopyable { + public: + using Ptr = std::shared_ptr; + + public: + OmafCurlMultiDownloader(long max_parallel_transfers = DEFAULT_MAX_PARALLEL_TRANSFERS); + virtual ~OmafCurlMultiDownloader(); + + public: + OMAF_STATUS init(const CurlParams &p, OmafDownloadTask::TaskDoneCB) noexcept; + OMAF_STATUS close() noexcept; + void setParams(const CurlParams &p) noexcept { + curl_params_ = p; + if (downloader_pool_) { + downloader_pool_->params(p); + } + }; + + public: + OMAF_STATUS addTask(OmafDownloadTask::Ptr task) noexcept; + OMAF_STATUS removeTask(OmafDownloadTask::Ptr task) noexcept; + + inline size_t size() const noexcept { // return ready_task_list_.size() + run_task_map_.size(); + int size = task_size_.load(); + if (size < 0) { + OMAF_LOG(LOG_FATAL, "The task size is in invalid state!\n"); + } + return static_cast(size); + } + + private: + OMAF_STATUS removeReadyTask(OmafDownloadTask::Ptr task) noexcept; + OMAF_STATUS removeRunningTask(OmafDownloadTask::Ptr task) noexcept; + OMAF_STATUS moveTaskFromRun(OmafDownloadTask::Ptr task, OmafDownloadTask::State to_state) noexcept; + OMAF_STATUS markTaskFinish(OmafDownloadTask::Ptr task) noexcept; + OMAF_STATUS markTaskTimeout(OmafDownloadTask::Ptr task) noexcept; + + private: + void threadRunner(void) noexcept; + OMAF_STATUS startTaskDownload(void) noexcept; + size_t retriveDoneTask(int msgNum = -1) noexcept; + OMAF_STATUS createTransfer(OmafDownloadTask::Ptr task) noexcept; + OMAF_STATUS startTransfer(OmafDownloadTask::Ptr task) noexcept; + OMAF_STATUS removeTransfer(OmafDownloadTask::Ptr task) noexcept; + void processTaskDone(OmafDownloadTask::Ptr task) noexcept; + + private: + const long max_parallel_transfers_; + CURLM *curl_multi_ = nullptr; + CurlParams curl_params_; + std::unique_ptr downloader_pool_; + OmafDownloadTask::TaskDoneCB task_done_cb_; + std::mutex ready_task_list_mutex_; + std::list ready_task_list_; + std::mutex run_task_map_mutex_; + std::map run_task_map_; + std::thread worker_; + std::atomic_int32_t task_size_{0}; + long max_parallel_ = DEFAULT_MAX_PARALLER_TRANSFERS; + bool bworking_ = false; +}; + +} // namespace OMAF +} // namespace VCD +#endif // !OMAFCURLMULTIWRAPPER_H diff --git a/src/OmafDashAccess/OmafDashDownload/OmafDownloader.cpp b/src/OmafDashAccess/OmafDashDownload/OmafDownloader.cpp new file mode 100644 index 00000000..e12058bc --- /dev/null +++ b/src/OmafDashAccess/OmafDashDownload/OmafDownloader.cpp @@ -0,0 +1,592 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: OmafDownloader.cpp +//! \brief: downloader class +//! + +#include "OmafDownloader.h" +#include "OmafCurlMultiHandler.h" +#include "performance.h" + +#include +#include +#include +#include +#include +#include +#include + +namespace VCD { +namespace OMAF { + +#define PRIORITYTASKSIZE static_cast(TaskPriority::END) + +struct _taskList { + using Ptr = std::shared_ptr; + + int64_t timeline_point_ = -1; + std::list tasks_[PRIORITYTASKSIZE]; +}; +using TaskList = struct _taskList; + +class OmafDashSegmentHttpClientPerf; + +/****************************************************************************** + * + * class define + * + * ****************************************************************************/ +class OmafDashSegmentHttpClientImpl : public OmafDashSegmentHttpClient { + public: + OmafDashSegmentHttpClientImpl(long max_parallel_transfers = DEFAULT_MAX_PARALLEL_TRANSFERS) + : max_parallel_transfers_(max_parallel_transfers) + { + tmpMultiDownloader_ = nullptr; + }; + + virtual ~OmafDashSegmentHttpClientImpl() { stop(); }; + + public: + OMAF_STATUS start() noexcept override; + OMAF_STATUS stop() noexcept override; + + public: + OMAF_STATUS open(const SourceParams &ds_params, OnData dcb, OnState scb) noexcept override; + OMAF_STATUS remove(const SourceParams &ds_params) noexcept override; + OMAF_STATUS check(const SourceParams &ds_params) noexcept override; + inline void setStatisticsWindows(int32_t time_window) noexcept override; + inline std::unique_ptr statistics(void) noexcept override; + + public: + // set policy based on the priority + void setProxy(OmafDashHttpProxy proxy) noexcept override { + curl_params_.http_proxy_ = proxy; + if (segment_downloader_) { + segment_downloader_->setParams(curl_params_); + } + }; + void setParams(OmafDashHttpParams params) noexcept override { + curl_params_.http_params_ = params; + if (segment_downloader_) { + segment_downloader_->setParams(curl_params_); + } + }; + + private: + void threadRunner(void) noexcept; + OmafDownloadTask::Ptr fetchReadyTask(void) noexcept; + void processDoneTask(OmafDownloadTask::Ptr task) noexcept; + + private: + const long max_parallel_transfers_; + OmafCurlMultiDownloader::Ptr segment_downloader_; + CurlParams curl_params_; + OmafCurlChecker::Ptr url_checker_; + std::mutex task_queue_mutex_; + std::condition_variable task_queue_cv_; + std::list task_queue_; + std::mutex downloading_task_mutex_; + std::map downloading_tasks_; + std::thread download_worker_; + bool bworking_ = false; + + std::unique_ptr perf_stats_; + OmafCurlMultiDownloader *tmpMultiDownloader_; +}; + +class OmafDashSegmentHttpClientPerf : public VCD::NonCopyable { + public: + OmafDashSegmentHttpClientPerf() = default; + virtual ~OmafDashSegmentHttpClientPerf() {} + + public: + void setStatisticsWindows(int32_t time_window) noexcept; + std::unique_ptr statistics(void) noexcept; + + void addTime(OmafDownloadTask::State, const std::chrono::milliseconds &) noexcept; + void addTransfer(OmafDownloadTask::State, size_t) noexcept; + void addDownloadTime(OmafDownloadTask::State, long) noexcept; + void add(OmafDownloadTask::State state, const std::chrono::milliseconds &duration, size_t transfer_size, + long download, double network_spped); + + private: + void copyPerf(WindowCounter &time_counter, WindowCounter &transfer_counter, + WindowCounter &download_counter, OmafDashSegmentClient::PerfNode &to_node) noexcept; + + private: + WindowCounter success_task_time_counter_; + WindowCounter timeout_task_time_counter_; + WindowCounter failure_task_time_counter_; + WindowCounter success_task_transfer_counter_; + WindowCounter timeout_task_transfer_counter_; + WindowCounter failure_task_transfer_counter_; + WindowCounter success_task_download_counter_; + WindowCounter timeout_task_download_counter_; + WindowCounter failure_task_download_counter_; + WindowCounter network_speed_counter_; +}; + +/****************************************************************************** + * + * class implementation + * + * ****************************************************************************/ +OmafDashSegmentHttpClient::Ptr OmafDashSegmentHttpClient::create(long max_parallel_transfers) noexcept { + try { + return std::make_shared(max_parallel_transfers); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when create the dash segment http source, ex:%s\n", ex.what()); + return nullptr; + } +} + +OMAF_STATUS OmafDashSegmentHttpClientImpl::start() noexcept { + try { + OMAF_LOG(LOG_INFO, "Start the dash source http client!\n"); + curl_global_init(CURL_GLOBAL_ALL); + + // 1. create the multi downloader + tmpMultiDownloader_ = new OmafCurlMultiDownloader; + if (tmpMultiDownloader_ == NULL) return ERROR_INVALID; + segment_downloader_.reset(tmpMultiDownloader_); + if (segment_downloader_.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the curl multi downloader!\n"); + return ERROR_INVALID; + } + OMAF_STATUS ret = segment_downloader_->init( + curl_params_, [this](OmafDownloadTask::Ptr task) { this->processDoneTask(std::move(task)); }); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to init the multi downloader!\n"); + return ERROR_INVALID; + } + + // 2. create the checker + url_checker_ = std::make_shared(); + if (url_checker_ == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the curl checker downloader!\n"); + return ERROR_NULL_PTR; + } + ret = url_checker_->init(curl_params_); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to init the curl easy downloader!\n"); + return ERROR_INVALID; + } + + // 3. start the worker + bworking_ = true; + download_worker_ = std::thread(&OmafDashSegmentHttpClientImpl::threadRunner, this); + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when start the dash source, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafDashSegmentHttpClientImpl::stop() noexcept { + try { + OMAF_LOG(LOG_INFO, "Stop the dash source http client!\n"); + bworking_ = false; + if (segment_downloader_.get() != nullptr) { + segment_downloader_->close(); + } + + if (url_checker_.get() != nullptr) { + url_checker_->close(); + } + + { + std::lock_guard lock(task_queue_mutex_); + task_queue_.clear(); + task_queue_cv_.notify_all(); + } + + { + std::lock_guard lock(downloading_task_mutex_); + downloading_tasks_.clear(); + } + + if (download_worker_.joinable()) { + bworking_ = false; + download_worker_.join(); + } + + curl_global_cleanup(); + tmpMultiDownloader_ = nullptr; + OMAF_LOG(LOG_INFO, "Success to stop the dash client!\n"); + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when Stop the dash source, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafDashSegmentHttpClientImpl::open(const SourceParams &ds_params, OnData dcb, OnState scb) noexcept { + try { + OmafDownloadTask::Ptr task = OmafDownloadTask::createTask(ds_params.dash_url_, dcb, scb); + if (task.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the task\n"); + return ERROR_INVALID; + } + if (perf_stats_.get() != nullptr) { + OmafDownloadTaskPerfCounter::Ptr t_perf = std::make_shared(); + task->perfCounter(std::move(t_perf)); + } + + OMAF_LOG(LOG_INFO, "Open the task count=%d. %s\n", task.use_count(), task->to_string().c_str()); + bool new_timeline = true; + + std::lock_guard lock(task_queue_mutex_); + for (auto &tl : task_queue_) { + if (ds_params.timeline_point_ == tl->timeline_point_) { + new_timeline = false; + auto &tasks = tl->tasks_[static_cast(ds_params.priority_)]; + tasks.push_back(task); + break; + } + } + + // this is a download request with new timeline + if (new_timeline) { + TaskList::Ptr tl = std::make_shared(); + if (tl.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Task list create failed!\n"); + return ERROR_NULL_PTR; + } + tl->timeline_point_ = ds_params.timeline_point_; + int priority = static_cast(ds_params.priority_); + if (priority < PRIORITYTASKSIZE){ + tl->tasks_[priority].push_back(task); + }else + { + OMAF_LOG(LOG_ERROR, "Priority %d is invalid, the max task size is %d\n", priority, PRIORITYTASKSIZE); + return ERROR_INVALID; + } + + if (!task_queue_.empty()) { + auto &tail_tasks = task_queue_.back(); + if (tail_tasks->timeline_point_ >= ds_params.timeline_point_) { + OMAF_LOG(LOG_FATAL, "Invalid timeline point happen! < %ld, %ld>\n", tail_tasks->timeline_point_, ds_params.timeline_point_); + } + } + + task_queue_.push_back(tl); + } + task_queue_cv_.notify_all(); + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when open the dash source, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} +OMAF_STATUS OmafDashSegmentHttpClientImpl::remove(const SourceParams &ds_params) noexcept { + try { + OmafDownloadTask::Ptr to_remove_task; + + // 1. find it from create state list + { + std::lock_guard lock(task_queue_mutex_); + for (auto &tl : task_queue_) { + if (tl->timeline_point_ == ds_params.timeline_point_) { + int priority = static_cast(ds_params.priority_); + auto &tasks = tl->tasks_[priority]; + + std::list::iterator it = tasks.begin(); + while (it != tasks.end()) { + auto &task = *it; + if (task->url() == ds_params.dash_url_) { + if (task->state() == OmafDownloadTask::State::CREATE) { + to_remove_task = *it; + tasks.erase(it); + break; + } + } + } + break; + } + } + } + + // 2. remove it from downloading task list + if (to_remove_task.get() == nullptr) { + { + std::lock_guard lock(downloading_task_mutex_); + auto it = downloading_tasks_.find(ds_params.dash_url_); + if (it != downloading_tasks_.end()) { + to_remove_task = std::move(it->second); + downloading_tasks_.erase(it); + } + } + if (to_remove_task.get() != nullptr) { + segment_downloader_->removeTask(to_remove_task); + } + } + + if (to_remove_task.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to remove the task for the dash: %s\n", ds_params.dash_url_.c_str()); + return ERROR_INVALID; + } + + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when Stop the dash source, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} +OMAF_STATUS OmafDashSegmentHttpClientImpl::check(const SourceParams &ds_params) noexcept { + try { + return url_checker_->check(ds_params.dash_url_); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when check the dash source, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +inline void OmafDashSegmentHttpClientImpl::setStatisticsWindows(int32_t time_window) noexcept { + if (perf_stats_ == nullptr) { + perf_stats_.reset(new OmafDashSegmentHttpClientPerf()); + } + perf_stats_->setStatisticsWindows(time_window); +}; +inline std::unique_ptr OmafDashSegmentHttpClientImpl::statistics(void) noexcept { + if (perf_stats_ != nullptr) { + return std::move(perf_stats_->statistics()); + } + return nullptr; +}; + +void OmafDashSegmentHttpClientImpl::threadRunner(void) noexcept { + try { + const size_t max_queue_size = static_cast(max_parallel_transfers_ << 1); + while (bworking_) { + // 1.1. check + // too many task in dash downloader + if (segment_downloader_->size() > max_queue_size) { + OMAF_LOG(LOG_INFO, "The size of downloading is: %lld\n", segment_downloader_->size()); + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + continue; + } + // 2. fetch ready task + + OmafDownloadTask::Ptr task = fetchReadyTask(); + if (task.get() != nullptr) { + // 2.1 add to downloader + OMAF_LOG(LOG_INFO, "downloader-0-task id %lld, task count=%d\n", task->id(), task.use_count()); + segment_downloader_->addTask(task); + { + // 2.1.2 cache in the downloading list to support remove + + std::lock_guard lock(downloading_task_mutex_); + downloading_tasks_[task->url()] = task; + } + OMAF_LOG(LOG_INFO, "Start download for task count=%d. %s\n", task.use_count(), task->to_string().c_str()); + } + } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception in the dash source thread runner, ex: %s\n", ex.what()); + } +} + +OmafDownloadTask::Ptr OmafDashSegmentHttpClientImpl::fetchReadyTask(void) noexcept { + try { + std::unique_lock lock(task_queue_mutex_); + + while (task_queue_.size()) { + auto &tl = task_queue_.front(); + for (int i = 0; i < PRIORITYTASKSIZE; i++) { + if (tl->tasks_[i].size()) { + auto task = std::move(tl->tasks_[i].front()); + tl->tasks_[i].pop_front(); + return std::move(task); + } + } + // no new task list with new timeline ready, then wait + if (task_queue_.size() <= 1) { + task_queue_cv_.wait(lock); + } else { + // drop the oldest task list of queue and move next + task_queue_.pop_front(); + } + } + + return nullptr; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when fetch the avaiable task, ex: %s\n", ex.what()); + return nullptr; + } +} + +void OmafDashSegmentHttpClientImpl::processDoneTask(OmafDownloadTask::Ptr task) noexcept { + try { + if (task.get() == nullptr) { + return; + } + OmafDownloadTask::State state = task->state(); + task->taskDoneCallback(state); + + // remove from downloading list + { + std::lock_guard lock(downloading_task_mutex_); + auto it = downloading_tasks_.find(task->url()); + if (it != downloading_tasks_.end()) { + downloading_tasks_.erase(it); + OMAF_LOG(LOG_INFO, "Done the task count=%d. %s\n", task.use_count(), task->to_string().c_str()); + } + } + + if (perf_stats_) { + auto duration = task->transferDuration(); + auto transfer_size = task->streamSize(); + auto download_time = task->downloadTime(); + auto network_speed = task->downloadSpeed(); + perf_stats_->add(state, duration, transfer_size, download_time, network_speed); + } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when process the done task, ex: %s\n", ex.what()); + } +} + +void OmafDashSegmentHttpClientPerf::addTime(OmafDownloadTask::State state, + const std::chrono::milliseconds &duration) noexcept { + switch (state) { + case OmafDownloadTask::State::STOPPED: + failure_task_time_counter_.add(duration.count()); + + break; + case OmafDownloadTask::State::TIMEOUT: + timeout_task_time_counter_.add(duration.count()); + + break; + case OmafDownloadTask::State::FINISH: + success_task_time_counter_.add(duration.count()); + + break; + default: + break; + } +} +void OmafDashSegmentHttpClientPerf::addTransfer(OmafDownloadTask::State state, size_t transfer_size) noexcept { + switch (state) { + case OmafDownloadTask::State::STOPPED: + + failure_task_transfer_counter_.add(transfer_size); + break; + case OmafDownloadTask::State::TIMEOUT: + + timeout_task_transfer_counter_.add(transfer_size); + break; + case OmafDownloadTask::State::FINISH: + + success_task_transfer_counter_.add(transfer_size); + break; + default: + break; + } +} +void OmafDashSegmentHttpClientPerf::addDownloadTime(OmafDownloadTask::State state, long download) noexcept { + switch (state) { + case OmafDownloadTask::State::STOPPED: + failure_task_download_counter_.add(download); + break; + case OmafDownloadTask::State::TIMEOUT: + timeout_task_download_counter_.add(download); + break; + case OmafDownloadTask::State::FINISH: + success_task_download_counter_.add(download); + break; + default: + break; + } +} +void OmafDashSegmentHttpClientPerf::add(OmafDownloadTask::State state, const std::chrono::milliseconds &duration, + size_t transfer_size, long download_time, double network_speed) { + switch (state) { + case OmafDownloadTask::State::STOPPED: + failure_task_time_counter_.add(duration.count()); + failure_task_transfer_counter_.add(transfer_size); + failure_task_download_counter_.add(download_time); + break; + case OmafDownloadTask::State::TIMEOUT: + timeout_task_time_counter_.add(duration.count()); + timeout_task_transfer_counter_.add(transfer_size); + timeout_task_download_counter_.add(download_time); + break; + case OmafDownloadTask::State::FINISH: + success_task_time_counter_.add(duration.count()); + success_task_transfer_counter_.add(transfer_size); + success_task_download_counter_.add(download_time); + break; + default: + break; + } + network_speed_counter_.add(network_speed); +} + +void OmafDashSegmentHttpClientPerf::setStatisticsWindows(int32_t time_window) noexcept { + try { + success_task_time_counter_.setWindow(time_window); + timeout_task_time_counter_.setWindow(time_window); + failure_task_time_counter_.setWindow(time_window); + success_task_transfer_counter_.setWindow(time_window); + timeout_task_transfer_counter_.setWindow(time_window); + failure_task_transfer_counter_.setWindow(time_window); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when set time window, ex: %s\n", ex.what()); + } +} + +std::unique_ptr OmafDashSegmentHttpClientPerf::statistics(void) noexcept { + std::unique_ptr perf = + make_unique_vcd(); + perf->check_time_ = std::chrono::system_clock::now(); + copyPerf(success_task_time_counter_, success_task_transfer_counter_, success_task_download_counter_, perf->success_); + copyPerf(timeout_task_time_counter_, timeout_task_transfer_counter_, timeout_task_download_counter_, perf->timeout_); + copyPerf(failure_task_time_counter_, failure_task_transfer_counter_, failure_task_download_counter_, perf->failure_); + + perf->download_speed_bps_ = network_speed_counter_.count().avr_value_window_; + return perf; +} + +inline void OmafDashSegmentHttpClientPerf::copyPerf(WindowCounter &time_counter, + WindowCounter &transfer_counter, + WindowCounter &download_counter, + OmafDashSegmentClient::PerfNode &to_node) noexcept { + auto time_v = time_counter.count(); + auto transfer_v = transfer_counter.count(); + auto download_v = download_counter.count(); + to_node.count_total_ = time_v.points_size_total_; + to_node.transfer_bytes_total_ = transfer_v.sum_value_total_; + to_node.count_ = time_v.points_size_window_; + to_node.total_transfer_time_ms_ = time_v.sum_value_total_; + to_node.avr_transfer_time_ms_ = time_v.avr_value_window_; + to_node.transfer_bytes_ = transfer_v.sum_value_total_; + to_node.download_time_ms_ = download_v.avr_value_window_ / 1000.0; // to ms +} + +} // namespace OMAF +} // namespace VCD diff --git a/src/OmafDashAccess/OmafDashDownload/OmafDownloader.h b/src/OmafDashAccess/OmafDashDownload/OmafDownloader.h index 1361f5dd..0d98a79e 100644 --- a/src/OmafDashAccess/OmafDashDownload/OmafDownloader.h +++ b/src/OmafDashAccess/OmafDashDownload/OmafDownloader.h @@ -34,116 +34,120 @@ #define OMAFDOWNLOADER_H #include "../OmafDashParser/Common.h" -#include "OmafDownloaderObserver.h" +#include "../OmafTypes.h" +#include "Stream.h" + +#include +#include +#include +#include +#include + +namespace VCD { +namespace OMAF { +class OmafDashSegmentClient : public VCD::NonCopyable { + public: + enum class State { + SUCCESS = 0, + STOPPED = 1, + TIMEOUT = 2, + FAILURE = 3, + }; + + struct _perfNode { + size_t count_total_ = 0; + size_t transfer_bytes_total_ = 0; + size_t count_ = 0; + size_t transfer_bytes_ = 0; + double download_time_ms_ = 0; + size_t total_transfer_time_ms_ = 0; + double avr_transfer_time_ms_ = 0; + std::string to_string() { + std::stringstream ss; + ss << "{ total: { count=" << count_total_ << ", transfer=" << transfer_bytes_total_ << " bytes}, "; + ss << " sliding window: { count=" << count_; + ss << ", curl download time=" << download_time_ms_; + ss << ", transfer=" << transfer_bytes_ << " bytes"; + ss << ", transfer time=" << total_transfer_time_ms_ << " ms"; + ss << ", average transfer time=" << avr_transfer_time_ms_ << " ms"; + ss << "}}"; + return ss.str(); + } + }; + using PerfNode = struct _perfNode; + + struct _perfStatistics { + std::chrono::system_clock::time_point check_time_; + PerfNode success_; + PerfNode timeout_; + PerfNode failure_; + float download_speed_bps_ = 0.0f; + + std::string serializeTimePoint(const std::chrono::system_clock::time_point &time) { + auto t_sec = std::chrono::time_point_cast(time); + auto t_ms = std::chrono::time_point_cast(time); + auto tt = std::chrono::system_clock::to_time_t(t_sec); + auto ms = (t_ms - t_sec).count(); + auto tm = *std::gmtime(&tt); + std::stringstream ss; + ss << std::put_time(&tm, "%Y-%m-%d %H:%M:%S") << ":" << ms; + return ss.str(); + } + + std::string to_string() { + std::stringstream ss; + ss << std::endl; + ss << "time: " << serializeTimePoint(check_time_) << std::endl; + ss << "per segment transfer speed: " << download_speed_bps_ / 1000.0 << " kbps" << std::endl; + ss << "success segment transfer: " << success_.to_string() << std::endl; + ss << "timeout segment transfer: " << timeout_.to_string() << std::endl; + ss << "failure segment transfer: " << failure_.to_string() << std::endl; + return ss.str(); + } + }; + + using SourceParams = DashSegmentSourceParams; + using PerfStatistics = struct _perfStatistics; + using OnData = std::function)>; + using OnState = std::function; + + protected: + OmafDashSegmentClient() = default; + + public: + virtual ~OmafDashSegmentClient(){}; + + public: + virtual OMAF_STATUS start() noexcept = 0; + virtual OMAF_STATUS stop() noexcept = 0; + + public: + virtual OMAF_STATUS open(const SourceParams &dash_source, OnData scb, OnState fcb) noexcept = 0; + virtual OMAF_STATUS remove(const SourceParams &dash_source) noexcept = 0; + virtual OMAF_STATUS check(const SourceParams &dash_source) noexcept = 0; + virtual void setStatisticsWindows(int32_t time_window) noexcept = 0; + virtual std::unique_ptr statistics(void) noexcept = 0; +}; -VCD_OMAF_BEGIN +class OmafDashSegmentHttpClient : public OmafDashSegmentClient { + public: + using Ptr = std::shared_ptr; -//! -//! \class: OmafDownloader -//! \brief: downloader base -//! -class OmafDownloader -{ -public: - - //! - //! \brief Constructor - //! - OmafDownloader(){}; - - //! - //! \brief Destructor - //! - virtual ~OmafDownloader(){}; - - //! - //! \brief Stop download - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Stop() = 0; - - //! - //! \brief start download - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Start() = 0; - - //! - //! \brief Read given size stream to data pointer - //! - //! \param [in] size - //! size of stream that should read - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Read(uint8_t* data, size_t size) = 0; - - //! - //! \brief Peek given size stream to data pointer - //! - //! \param [in] size - //! size of stream that should read - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Peek(uint8_t* data, size_t size) = 0; - - //! - //! \brief Peek given size stream to data pointer start from offset - //! - //! \param [in] size - //! size of stream that should read - //! \param [in] offset - //! stream offset that read should start - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus Peek(uint8_t* data, size_t size, size_t offset) = 0; - - //! - //! \brief Attach download observer - //! - //! \param [in] observer - //! observer need to be attached - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus ObserverAttach(OmafDownloaderObserver *observer) = 0; - - //! - //! \brief Dettach download observer - //! - //! \param [in] observer - //! observer need to be dettached - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - virtual ODStatus ObserverDetach(OmafDownloaderObserver* observer) = 0; - - //! - //! \brief Get download rate - //! - //! \return double - //! download rate - //! - virtual double GetDownloadRate() = 0; + protected: + OmafDashSegmentHttpClient() : OmafDashSegmentClient(){}; + + public: + virtual ~OmafDashSegmentHttpClient(){}; + + public: + virtual void setProxy(OmafDashHttpProxy proxy) noexcept = 0; + virtual void setParams(OmafDashHttpParams params) noexcept = 0; + + public: + static OmafDashSegmentHttpClient::Ptr create(long max_parallel_transfers) noexcept; }; -VCD_OMAF_END; +} // namespace OMAF +} // namespace VCD -#endif //OMAFDOWNLOADER_H \ No newline at end of file +#endif // OMAFDOWNLOADER_H \ No newline at end of file diff --git a/src/OmafDashAccess/OmafDashDownload/Stream.cpp b/src/OmafDashAccess/OmafDashDownload/Stream.cpp deleted file mode 100644 index e160fba3..00000000 --- a/src/OmafDashAccess/OmafDashDownload/Stream.cpp +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - */ - -//! -//! \file: Stream.cpp -//! \brief: stream class for storing downloaded sub-streams -//! - -#include "Stream.h" - -VCD_OMAF_BEGIN - -Stream::Stream() -{ - m_eos = false; - m_totalLength = 0; -} - -Stream::~Stream() -{ - if(m_listDownloadedStreams.size()) - { - for(std::list::iterator it = m_listDownloadedStreams.begin() ; it != m_listDownloadedStreams.end(); it++) - { - StreamInfo * ds = *it; - SAFE_DELETE(ds); - } - m_listDownloadedStreams.clear(); - } -} - -ODStatus Stream::AddSubStream(char* streamData, uint64_t streamLen) -{ - StreamInfo *sInfo = new StreamInfo(); - sInfo->data = streamData; - sInfo->length = streamLen; - - m_lock.lock(); - m_totalLength += streamLen; - m_listDownloadedStreams.push_back(sInfo); - m_lock.unlock(); - - // notify other threads - m_cv.notify_all(); - return OD_STATUS_SUCCESS; -} - -ODStatus Stream::GetStream(char* streamData, uint64_t streamDataLen) -{ - CheckNullPtr_PrintLog_ReturnStatus(streamData, "the data pointer for getting output stream is null!", ERROR, OD_STATUS_INVALID); - - while(!GetDownloadedListSize() && !IsEOS()) - { - mutex mtx; - unique_lock lck(mtx); - - // wait for time out or is notified - m_cv.wait_for(lck, chrono::milliseconds(2)); - } - - m_lock.lock(); - - uint64_t gotSize = 0, gotCnt = 0; - for(auto stream: m_listDownloadedStreams) - { - if(gotSize >= streamDataLen) - break; - - uint64_t copysize = 0; - if((streamDataLen - gotSize) >= stream->length) - { - copysize = stream->length; - memcpy(streamData + gotSize, stream->data, copysize); - gotCnt++; - } - else - { - copysize = (streamDataLen - gotSize); - memcpy(streamData + gotSize, stream->data, copysize); - - // remove the copied part of list element - auto leftSize = stream->length - copysize; - char* newData = new char(leftSize); - memcpy(newData, stream->data, leftSize); - SAFE_DELETE(stream->data); - stream->data = newData; - stream->length = leftSize; - } - - gotSize += copysize; - } - - while(gotCnt) - { - auto downStream = m_listDownloadedStreams.front(); - m_listDownloadedStreams.pop_front(); - SAFE_DELETE(downStream); - gotCnt--; - } - - m_totalLength -= streamDataLen; - - m_lock.unlock(); - - return OD_STATUS_SUCCESS; -} - -ODStatus Stream::PeekStream(char* streamData, uint64_t streamDataLen) -{ - CheckNullPtr_PrintLog_ReturnStatus(streamData, "The data pointer for getting output stream is null!", ERROR, OD_STATUS_INVALID); - - while(!GetDownloadedListSize() && !IsEOS()) - { - mutex mtx; - unique_lock lck(mtx); - - // wait for time out or is notified - m_cv.wait_for(lck, chrono::milliseconds(2)); - } - - m_lock.lock(); - - uint64_t gotSize = 0; - for(auto stream: m_listDownloadedStreams) - { - if(gotSize >= streamDataLen) - break; - - uint64_t copysize = (streamDataLen - gotSize) >= stream->length ? stream->length : (streamDataLen - gotSize); - - memcpy(streamData + gotSize, stream->data, copysize); - gotSize += copysize; - } - - m_lock.unlock(); - - return OD_STATUS_SUCCESS; -} - -ODStatus Stream::PeekStream(char* streamData, uint64_t streamDataLen, size_t offset) -{ - CheckNullPtr_PrintLog_ReturnStatus(streamData, "The data pointer for getting output stream is null!", ERROR, OD_STATUS_INVALID); - - while(!GetDownloadedListSize() && !IsEOS()) - { - mutex mtx; - unique_lock lck(mtx); - - // wait for time out or is notified - m_cv.wait_for(lck, chrono::milliseconds(2)); - } - - m_lock.lock(); - - // find the block that offset refers - auto it = m_listDownloadedStreams.begin(); - while(it != m_listDownloadedStreams.end()) - { - if(offset - (*it)->length <= 0) break; - offset -= (*it)->length; - - it++; - } - - uint64_t gotSize = 0; - if (it == m_listDownloadedStreams.end()) - { - return OD_STATUS_OPERATION_FAILED; - } - // copy the left data in offset block - memcpy(streamData, (*it)->data + offset, (*it)->length - offset); - gotSize += (*it)->length - offset; - it++; - - for(; it!= m_listDownloadedStreams.end(); it++) - { - auto stream = (*it); - if(gotSize >= streamDataLen) - break; - - uint64_t copysize = (streamDataLen - gotSize) >= stream->length ? stream->length : (streamDataLen - gotSize); - - memcpy(streamData + gotSize, stream->data, copysize); - gotSize += copysize; - } - - m_lock.unlock(); - - return OD_STATUS_SUCCESS; -} - -ODStatus Stream::ReachedEOS() -{ - m_eos = true; - - m_cv.notify_all(); - - return OD_STATUS_SUCCESS; -} - -VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafDashDownload/Stream.h b/src/OmafDashAccess/OmafDashDownload/Stream.h index f44f0d74..0c3c1d81 100644 --- a/src/OmafDashAccess/OmafDashDownload/Stream.h +++ b/src/OmafDashAccess/OmafDashDownload/Stream.h @@ -33,172 +33,171 @@ #ifndef STREAM_H #define STREAM_H -#include "../OmafDashParser/Common.h" +#include +#include //std::mutex, std::unique_lock -VCD_USE_VRVIDEO; +#include "../OmafDashParser/Common.h" +#include "../common.h" +#include "../isolib/dash_parser/Mp4StreamIO.h" -VCD_OMAF_BEGIN +extern "C" { +#include "safestringlib/safe_mem_lib.h" +} +namespace VCD { +namespace OMAF { //! -//! \class StreamInfo +//! \class StreamBlock //! \brief Stream Information, including data and data size //! -class StreamInfo -{ -public: - - //! - //! \brief Constructor - //! - StreamInfo() - { - data = NULL; - length = 0; +class StreamBlock : public VCD::NonCopyable { + public: + //! + //! \brief Constructor + //! + StreamBlock() = default; + + StreamBlock(char *data, int64_t size) : data_(data), size_(size), capacity_(size), bOwner_(false) {} + //! + //! \brief Destructor + //! + ~StreamBlock() { + if (bOwner_ && data_ != nullptr) { + delete[] data_; + data_ = nullptr; } - - //! - //! \brief Constructor with parameter - //! - StreamInfo(char* d, uint64_t l):StreamInfo() - { - data = d; - length = l; + size_ = 0; + } + char *buf() noexcept { return data_; } + const char *cbuf() const noexcept { return data_; } + int64_t size() const noexcept { return size_; } + int64_t capacity() const noexcept { return capacity_; } + bool size(int64_t size) { + if (size <= capacity_ && size > 0) { + size_ = size; + return true; } - - //! - //! \brief Destructor - //! - ~StreamInfo() - { - SAFE_DELETE(data); - length = 0; + return false; + } + void *resize(int64_t size) { + if (bOwner_) { + if (size > capacity_) { + if (data_) { + delete[] data_; + } + data_ = new char[size]; + capacity_ = size; + } + return data_; + } else { + return nullptr; } + } - char* data; // lock(stream_mutex_); -private: + offset_t offset = offset_; - //! - //! \brief get number of sub-streams - //! - //! \return size_t - //! number of sub-streams - //! - size_t GetDownloadedListSize() - { - m_lock.lock(); - auto listSize = m_listDownloadedStreams.size(); - m_lock.unlock(); - - return listSize; + std::list>::const_iterator it = stream_blocks_.cbegin(); + while (it != stream_blocks_.cend()) { + if (offset < (*it)->size()) { + break; + } + offset -= (*it)->size(); + ++it; } - list m_listDownloadedStreams; //!< list stores all downloaded sub-streams - ThreadLock m_lock; //!< for downloaded streams synchronize - bool m_eos; //!< flag for end of stream - condition_variable m_cv; //!< condition variable for streams - uint64_t m_totalLength; //!< the total length of stream -}; + offset_t readSize = 0; + while (it != stream_blocks_.cend()) { + if (readSize >= size) break; + + offset_t copySize = 0; + offset_t dataSize = (*it)->size() - offset; + if ((size - readSize) >= dataSize) { + copySize = dataSize; + } else { + copySize = size - readSize; + } + + memcpy_s(buffer + readSize, copySize, (*it)->cbuf() + offset, copySize); + readSize += copySize; + offset = 0; // set offset to 0 for coming blocks + ++it; + } -VCD_OMAF_END; + offset_ += readSize; + + return readSize; + }; + + bool SeekAbsoluteOffset(offset_t offset) { + std::lock_guard lock(stream_mutex_); + offset_ = offset; // FIXME same logic with old file solution + return true; + }; + + offset_t TellOffset() { return offset_; }; + + offset_t GetStreamSize() { + std::lock_guard lock(stream_mutex_); + return stream_size_; + }; + + public: + void push_back(std::unique_ptr sb) noexcept { + std::lock_guard lock(stream_mutex_); + stream_size_ += sb->size(); + stream_blocks_.push_back(std::move(sb)); + } + + bool cacheToFile(std::string &filename) noexcept { + std::ofstream of; //cbuf(), sb->size()); + } + + of.close(); + return true; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when cache the file: %s, ex: %s\n", filename.c_str(), ex.what()); + if (of.is_open()) { + of.close(); + } + return false; + } + } + + private: + std::list> stream_blocks_; + + std::mutex stream_mutex_; + offset_t stream_size_ = 0; + offset_t offset_ = 0; +}; +} // namespace OMAF +} // namespace VCD -#endif //STREAM_H \ No newline at end of file +#endif // STREAM_H diff --git a/src/OmafDashAccess/OmafDashDownload/performance.h b/src/OmafDashAccess/OmafDashDownload/performance.h new file mode 100644 index 00000000..2ca9d3ea --- /dev/null +++ b/src/OmafDashAccess/OmafDashDownload/performance.h @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: performance.h +//! \brief: performance helper +//! + +#ifndef PERFORMANCE_H_ +#define PERFORMANCE_H_ + +#include "../common.h" + +#include +#include +#include +#include +#include +#include + +namespace VCD { +namespace OMAF { + +const long DEFAULT_TINE_WINDOW = 10000; // 10s + +template +class CountValue { + public: + std::chrono::steady_clock::time_point check_time_; + size_t points_size_window_ = 0; + size_t points_size_total_ = 0; + float points_rate_window_ = 0.0; + T sum_value_window_; + T sum_value_total_; + double avr_value_window_; +}; + +template +class WindowCounter : public VCD::NonCopyable { + private: + class Node { + public: + using UPtr = std::unique_ptr; + + public: + Node(std::chrono::steady_clock::time_point start, T value) + : start_(start), + value_(value){ + + }; + ~Node(){}; + std::chrono::steady_clock::time_point start_; + T value_; + }; + + public: + WindowCounter(std::string category = "CategoryUnknown", std::string object = "ObjectUnknown", + std::string indicator = "Rate") + : time_window_(DEFAULT_TINE_WINDOW) { + std::stringstream ss; + + ss << (category.empty() ? "CategoryUnknown" : category) << "::" << (object.empty() ? "ObjectUnknown" : object) + << "::" << (indicator.empty() ? "Rate" : indicator); + + description_ = ss.str(); + } + + ~WindowCounter(){}; + void setWindow(long bufferTime) { time_window_ = std::chrono::milliseconds(bufferTime); } + void reset() { + std::lock_guard lock(mutex_); + + time_points_.clear(); + } + + void add(T value) { + std::lock_guard lock(mutex_); + + auto now = std::chrono::steady_clock::now(); + + if (!time_points_.empty()) { + popExpired(now); + } + std::unique_ptr node = make_unique_vcd(now, value); + data_.points_size_total_++; + data_.sum_value_window_ += value; + + time_points_.emplace_back(std::move(node)); + } + + CountValue count() { + std::lock_guard lock(mutex_); + + auto now = std::chrono::steady_clock::now(); + + popExpired(now); + + data_.check_time_ = now; + data_.points_size_window_ = time_points_.size(); + if (time_points_.size() > 0) { + const auto &front = time_points_.front(); + const auto &front_time = front->start_; + auto duration = std::chrono::duration_cast(now - front_time); + if (duration > time_window_) { + duration = time_window_; + } + + data_.points_rate_window_ = (time_points_.size() * 1000.0f) / duration.count(); + } + + data_.sum_value_total_ = static_cast(0); + data_.avr_value_window_ = static_cast(0); + for (auto &node : time_points_) { + data_.sum_value_total_ += node->value_; + } + uint32_t sizeofTP = time_points_.size(); + if (sizeofTP > 0) { + data_.avr_value_window_ = static_cast(data_.sum_value_total_) / sizeofTP; + } + + return data_; + } + + protected: + void popExpired(std::chrono::steady_clock::time_point &now) { + while (!time_points_.empty()) { + auto &front = time_points_.front(); + auto front_time = front->start_; + auto duration = std::chrono::duration_cast(now - front_time); + if (duration < time_window_) { + break; + } + + time_points_.pop_front(); + } + } + + private: + std::mutex mutex_; + std::string description_; + std::chrono::milliseconds time_window_; + std::list> time_points_; + CountValue data_; +}; + +} // namespace OMAF +} // namespace VCD +#endif // !PERFORMANCE_H_ \ No newline at end of file diff --git a/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.cpp b/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.cpp index 7480eb31..3f6c7e62 100644 --- a/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.cpp +++ b/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.cpp @@ -46,6 +46,7 @@ AdaptationSetElement::~AdaptationSetElement() m_maxWidth.clear(); m_maxHeight.clear(); m_maxFrameRate.clear(); + m_audioSamplingRate.clear(); m_segmentAlignment.clear(); m_subsegmentAlignment.clear(); @@ -86,7 +87,7 @@ void AdaptationSetElement::AddViewport(ViewportElement* viewport) { if(!viewport) { - LOG(ERROR)<<"Fail to add viewport in Element."< AdaptationSetElement::GetTwoDQuality() +{ + map twoDQualityInfos; + // it can also be in essential property + if(!m_supplementalProperties.size()) + return twoDQualityInfos; + + for(auto s : m_supplementalProperties) + { + twoDQualityInfos = s->GetTwoDRegionQualityInfos(); + if(twoDQualityInfos.size()) return twoDQualityInfos; + } + + return twoDQualityInfos; +} + RwpkType AdaptationSetElement::GetRwpkType() { RwpkType rt = RWPK_UNKNOWN; diff --git a/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.h b/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.h index e5030e5a..0e6ef8dc 100644 --- a/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.h +++ b/src/OmafDashAccess/OmafDashParser/AdaptationSetElement.h @@ -126,6 +126,7 @@ class AdaptationSetElement: public OmafElementBase //! MEMBER_SET_AND_GET_FUNC(string, m_maxFrameRate, MaxFrameRate); + MEMBER_SET_AND_GET_FUNC(string, m_audioSamplingRate, AudioSamplingRate); //! //! \brief Set function for m_segmentAlignment member //! @@ -220,6 +221,14 @@ class AdaptationSetElement: public OmafElementBase //! A pointer of SphereQuality class SphereQuality* GetSphereQuality(); + //! + //! \brief Get 2D quality information from member m_supplementalProperties + //! + //! \return map + //! map of for input planar + //! video sources + map GetTwoDQuality(); + //! //! \brief Get content converage from member m_supplementalProperties //! @@ -288,6 +297,8 @@ class AdaptationSetElement: public OmafElementBase string m_maxWidth; //!< the maxWidth attribute string m_maxHeight; //!< the maxHeight attribute string m_maxFrameRate; //!< the maxFramerate attribute + string m_audioSamplingRate; + string m_segmentAlignment; //!< the segmentAlignment attribute string m_subsegmentAlignment; //!< the subsegmentAlignment attribute vector m_viewport; //!< the Viewport elements diff --git a/src/player/Mesh.h b/src/OmafDashAccess/OmafDashParser/AudioChannelCfgElement.cpp similarity index 63% rename from src/player/Mesh.h rename to src/OmafDashAccess/OmafDashParser/AudioChannelCfgElement.cpp index dd3e6aff..4414b0e4 100644 --- a/src/player/Mesh.h +++ b/src/OmafDashAccess/OmafDashParser/AudioChannelCfgElement.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019, Intel Corporation + * Copyright (c) 2020, Intel Corporation * All rights reserved. * * Redistribution and use in source and binary forms, with or without @@ -23,42 +23,41 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - * */ //! -//! \file Mesh.h -//! \brief Defines class for Mesh. +//! \file: AudioChannelCfgElement.cpp +//! \brief: AudioChannelConfiguration element class //! -#ifndef _MESH_H_ -#define _MESH_H_ -#include "Common.h" -#include "RenderBackend.h" +#include "AudioChannelCfgElement.h" + +VCD_OMAF_BEGIN -VCD_NS_BEGIN +AudioChannelConfigurationElement::~AudioChannelConfigurationElement() +{ + m_chlCfg = 0; +} -class Mesh +ODStatus AudioChannelConfigurationElement::ParseSchemeIdUriAndValue() { -public: - Mesh(); - virtual ~Mesh(); - virtual RenderStatus Create() = 0; - virtual RenderStatus Destroy() = 0; - virtual RenderStatus Bind(RenderBackend *renderBackend, uint32_t vertexAttrib, uint32_t texCoordAttrib) = 0; - uint32_t GetVertexNum(); - uint32_t GetIndexNum(); - float *GetVertices(); - float *GetTexCoords(); - uint32_t *GetIndices(); + ODStatus ret = OD_STATUS_SUCCESS; + + if(GetSchemeIdUri() == SCHEMEIDURI_AUDIO) + { + if(0 == GetValue().length()) + { + OMAF_LOG(LOG_ERROR, "SchemeIdUri Audio doesn't have value.\n"); + ret = OD_STATUS_INVALID; + } + else + { + m_chlCfg = StringToInt(GetValue()); + OMAF_LOG(LOG_INFO, "Parsed audio channel configuration is %d\n", m_chlCfg); + } + } -protected: - float *m_vertices; - float *m_texCoords; - uint32_t *m_indices; - uint32_t m_vertexNum; - uint32_t m_indexNum; -}; + return ret; +} -VCD_NS_END -#endif /* _MESH_H_ */ +VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafDashParser/AudioChannelCfgElement.h b/src/OmafDashAccess/OmafDashParser/AudioChannelCfgElement.h new file mode 100644 index 00000000..ea05eee0 --- /dev/null +++ b/src/OmafDashAccess/OmafDashParser/AudioChannelCfgElement.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: AudioChannelCfgElement.h +//! \brief: AudioChannelConfiguration element class +//! + +#ifndef AUDIOCHANNELCONFIGURATIONELEMENT_H +#define AUDIOCHANNELCONFIGURATIONELEMENT_H +#include "OmafElementBase.h" +#include "DescriptorElement.h" + +VCD_OMAF_BEGIN + +class AudioChannelConfigurationElement: public DescriptorElement, public OmafElementBase +{ +public: + + //! + //! \brief Constructor + //! + AudioChannelConfigurationElement() + { + m_chlCfg = 0; + }; + + //! + //! \brief Destructor + //! + virtual ~AudioChannelConfigurationElement(); + + //! + //! \brief Parse SchemeIdUri and it's value + //! + //! \return ODStatus + //! OD_STATUS_SUCCESS if success, else fail reason + //! + virtual ODStatus ParseSchemeIdUriAndValue(); + + int32_t GetChannelCfg() { return m_chlCfg; }; + +private: + + int32_t m_chlCfg; +}; + +VCD_OMAF_END; + +#endif //AUDIOCHANNELCONFIGURATIONELEMENT_H diff --git a/src/OmafDashAccess/OmafDashParser/Common.h b/src/OmafDashAccess/OmafDashParser/Common.h index dd4b7ca8..04730de1 100644 --- a/src/OmafDashAccess/OmafDashParser/Common.h +++ b/src/OmafDashAccess/OmafDashParser/Common.h @@ -34,63 +34,50 @@ #ifndef COMMON_H #define COMMON_H -#include "../../utils/ns_def.h" -#include "data_type.h" -#include "../../utils/GlogWrapper.h" +#include #include #include -#include -#include -#include #include -#include #include -#include -#include #include -#include +#include +#include +#include +#include #include +#include +#include +//#include "../../utils/GlogWrapper.h" +#include "../../utils/ns_def.h" +#include "data_type.h" #include -#include #include +#include +#include #include #include #include #include -#include -#include -#include -#include +#include +#include +#include -#include "../general.h" #include "../../utils/Threadable.h" +#include "../general.h" //! \brief Return status typedef int32_t ODStatus; -#define OD_STATUS_SUCCESS 0X00000000 -#define OD_STATUS_INVALID 0X00000001 +#define OD_STATUS_SUCCESS 0X00000000 +#define OD_STATUS_INVALID 0X00000001 #define OD_STATUS_OPERATION_FAILED 0X00000002 -#define OD_STATUS_THREAD 0X00000003 -#define OD_STATUS_AGAIN 0X00000004 +#define OD_STATUS_THREAD 0X00000003 +#define OD_STATUS_AGAIN 0X00000004 using namespace std; -//! -//! \enum DownloaderStatus -//! \brief Downloader Status type -//! -enum DownloaderStatus -{ - NOT_START = 0, - DOWNLOADING = 1, - STOPPING = 2, - STOPPED = 3, - DOWNLOADED = 4 -}; - //! //! \brief check status, return status if it doesn't equal to success //! @@ -100,13 +87,12 @@ enum DownloaderStatus //! \return ODStatus //! nothing if success, else status //! -#define CheckAndReturn(status) \ -{ \ - if(status != OD_STATUS_SUCCESS) \ - { \ - return status; \ - } \ -} \ +#define CheckAndReturn(status) \ + { \ + if (status != OD_STATUS_SUCCESS) { \ + return status; \ + } \ + } //! //! \brief check status, print log and return status @@ -121,14 +107,13 @@ enum DownloaderStatus //! \return ODStatus //! nothing if success, else status //! -#define CheckPrintLogAndReturn(status, log, level) \ -{ \ - if(status != OD_STATUS_SUCCESS) \ - { \ - LOG(level)<SetInfo((char*)GetValue().c_str()); } @@ -68,7 +68,7 @@ ODStatus EssentialPropertyElement::ParseSchemeIdUriAndValue() ProjectionFormat format = static_cast(pf); if(format < PF_UNKNOWN || format > PF_RESERVED) { - LOG(WARNING)<<"the projection format is invalid."<= RWPK_RESERVED) { - LOG(WARNING)<<"the RWPK type is invalid."<SetXmlnsOmaf(m_rootXMLElement->GetAttributeVal(OMAF_XMLNS)); @@ -87,13 +87,13 @@ ODStatus OmafMPDReader::BuildMPD() map attributes = m_rootXMLElement->GetAttributes(); m_mpd->AddOriginalAttributes(attributes); - CheckNullPtr_PrintLog_ReturnStatus(m_rootXMLElement, "Failed to create MPD node.", ERROR, OD_STATUS_OPERATION_FAILED); + CheckNullPtr_PrintLog_ReturnStatus(m_rootXMLElement, "Failed to create MPD node.\n", LOG_ERROR, OD_STATUS_OPERATION_FAILED); vector childElement = m_rootXMLElement->GetChildElements(); for(auto child : childElement) { if(!child) { - LOG(WARNING)<<"Faild to load sub element in MPD Element."<GetName() == "EssentialProperty") @@ -103,7 +103,7 @@ ODStatus OmafMPDReader::BuildMPD() if(essentialProperty) m_mpd->AddEssentialProperty(essentialProperty); else - LOG(WARNING)<<"Faild to set EssentialProperty."<GetName() == "BaseURL") { @@ -112,7 +112,7 @@ ODStatus OmafMPDReader::BuildMPD() if(baseURL) m_mpd->AddBaseUrl(baseURL); else - LOG(WARNING)<<"Faild to add baseURL."<GetName() == "Period") { @@ -121,11 +121,11 @@ ODStatus OmafMPDReader::BuildMPD() if(period) m_mpd->AddPeriod(period); else - LOG(WARNING)<<"Faild to add period."<AddChildElement(child); } @@ -137,9 +137,9 @@ ODStatus OmafMPDReader::BuildMPD() BaseUrlElement* OmafMPDReader::BuildBaseURL(OmafXMLElement* xmlBaseURL) { - CheckNullPtr_PrintLog_ReturnNullPtr(xmlBaseURL, "Failed to read baseURL element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xmlBaseURL, "Failed to read baseURL element.\n", LOG_ERROR); BaseUrlElement* baseURL = new BaseUrlElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(baseURL, "Failed to create baseURL node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(baseURL, "Failed to create baseURL node.\n", LOG_ERROR); auto path = xmlBaseURL->GetPath(); baseURL->SetPath(path); @@ -152,9 +152,9 @@ BaseUrlElement* OmafMPDReader::BuildBaseURL(OmafXMLElement* xmlBaseURL) PeriodElement* OmafMPDReader::BuildPeriod(OmafXMLElement* xmlPeriod) { - CheckNullPtr_PrintLog_ReturnNullPtr(xmlPeriod, "Failed to read period element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xmlPeriod, "Failed to read period element.\n", LOG_ERROR); PeriodElement* period = new PeriodElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(period, "Failed to create period node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(period, "Failed to create period node.\n", LOG_ERROR); period->SetStart(xmlPeriod->GetAttributeVal(START)); period->SetId(xmlPeriod->GetAttributeVal(INDEX)); @@ -166,7 +166,7 @@ PeriodElement* OmafMPDReader::BuildPeriod(OmafXMLElement* xmlPeriod) { if(!child) { - LOG(WARNING)<<"Faild to load sub element in Period Element."<AddAdaptationSet(adaptationSet); else - LOG(WARNING)<<"Fail to add adaptionSet."<AddChildElement(child); } @@ -191,15 +191,22 @@ PeriodElement* OmafMPDReader::BuildPeriod(OmafXMLElement* xmlPeriod) AdaptationSetElement* OmafMPDReader::BuildAdaptationSet(OmafXMLElement* xml) { - CheckNullPtr_PrintLog_ReturnNullPtr(xml, "Failed to read adaptionSet element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xml, "Failed to read adaptionSet element.\n", LOG_ERROR); AdaptationSetElement* adaptionSet = new AdaptationSetElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(adaptionSet, "Failed to create adaptionSet node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(adaptionSet, "Failed to create adaptionSet node.\n", LOG_ERROR); adaptionSet->SetId(xml->GetAttributeVal(INDEX)); adaptionSet->SetMimeType(xml->GetAttributeVal(MIMETYPE)); adaptionSet->SetCodecs(xml->GetAttributeVal(CODECS)); - adaptionSet->SetMaxWidth(xml->GetAttributeVal(MAXWIDTH)); - adaptionSet->SetMaxHeight(xml->GetAttributeVal(MAXHEIGHT)); - adaptionSet->SetMaxFrameRate(xml->GetAttributeVal(MAXFRAMERATE)); + if (GetSubstr(adaptionSet->GetMimeType(), '/', true) == "video") + { + adaptionSet->SetMaxWidth(xml->GetAttributeVal(MAXWIDTH)); + adaptionSet->SetMaxHeight(xml->GetAttributeVal(MAXHEIGHT)); + adaptionSet->SetMaxFrameRate(xml->GetAttributeVal(MAXFRAMERATE)); + } + else if (GetSubstr(adaptionSet->GetMimeType(), '/', true) == "audio") + { + adaptionSet->SetAudioSamplingRate(xml->GetAttributeVal(AUDIOSAMPLINGRATE)); + } adaptionSet->SetSegmentAlignment(xml->GetAttributeVal(SEGMENTALIGNMENT)); adaptionSet->SetSubsegmentAlignment(xml->GetAttributeVal(SUBSEGMENTALIGNMENT)); @@ -211,7 +218,7 @@ AdaptationSetElement* OmafMPDReader::BuildAdaptationSet(OmafXMLElement* xml) { if(!child) { - LOG(WARNING)<<"Faild to load sub element in Element."<AddRepresentation(representation); else - LOG(WARNING)<<"Fail to add representation."<GetName() == "Viewport") { @@ -231,7 +238,7 @@ AdaptationSetElement* OmafMPDReader::BuildAdaptationSet(OmafXMLElement* xml) if(viewport) adaptionSet->AddViewport(viewport); else - LOG(WARNING)<<"Fail to add Viewport."<GetName() == "EssentialProperty") { @@ -240,7 +247,7 @@ AdaptationSetElement* OmafMPDReader::BuildAdaptationSet(OmafXMLElement* xml) if(essentialProperty) adaptionSet->AddEssentialProperty(essentialProperty); else - LOG(WARNING)<<"Fail to add essentialProperty."<GetName() == "SupplementalProperty") { @@ -249,11 +256,11 @@ AdaptationSetElement* OmafMPDReader::BuildAdaptationSet(OmafXMLElement* xml) if(supplementalProperty) adaptionSet->AddSupplementalProperty(supplementalProperty); else - LOG(WARNING)<<"Fail to add supplementalProperty."<AddChildElement(child); } @@ -263,9 +270,9 @@ AdaptationSetElement* OmafMPDReader::BuildAdaptationSet(OmafXMLElement* xml) ViewportElement* OmafMPDReader::BuildViewport(OmafXMLElement* xmlViewport) { - CheckNullPtr_PrintLog_ReturnNullPtr(xmlViewport, "Failed to read viewport element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xmlViewport, "Failed to read viewport element.\n", LOG_ERROR); ViewportElement* viewport = new ViewportElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(viewport, "Failed to create viewport node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(viewport, "Failed to create viewport node.\n", LOG_ERROR); viewport->SetSchemeIdUri(xmlViewport->GetAttributeVal(SCHEMEIDURI)); viewport->SetValue(xmlViewport->GetAttributeVal(VALUE)); @@ -279,7 +286,7 @@ ViewportElement* OmafMPDReader::BuildViewport(OmafXMLElement* xmlViewport) { if(!child) { - LOG(WARNING)<<"Faild to load sub element in Viewport Element."<SetSchemeIdUri(xmlEssentialProperty->GetAttributeVal(SCHEMEIDURI)); essentialProperty->SetValue(xmlEssentialProperty->GetAttributeVal(VALUE)); @@ -312,7 +319,7 @@ EssentialPropertyElement* OmafMPDReader::BuildEssentialProperty(OmafXMLElement* { if(!child) { - LOG(WARNING)<<"Faild to load sub element in EssentialProperty Element."<SetId(xmlRepresentation->GetAttributeVal(INDEX)); representation->SetCodecs(xmlRepresentation->GetAttributeVal(CODECS)); representation->SetMimeType(xmlRepresentation->GetAttributeVal(MIMETYPE)); representation->SetWidth(StringToInt(xmlRepresentation->GetAttributeVal(WIDTH))); representation->SetHeight(StringToInt(xmlRepresentation->GetAttributeVal(HEIGHT))); representation->SetFrameRate(xmlRepresentation->GetAttributeVal(FRAMERATE)); + representation->SetAudioSamplingRate(StringToInt(xmlRepresentation->GetAttributeVal(AUDIOSAMPLINGRATE))); representation->SetSar(xmlRepresentation->GetAttributeVal(SAR)); representation->SetStartWithSAP(xmlRepresentation->GetAttributeVal(STARTWITHSAP)); representation->SetQualityRanking(xmlRepresentation->GetAttributeVal(QUALITYRANKING)); @@ -347,7 +355,7 @@ RepresentationElement* OmafMPDReader::BuildRepresentation(OmafXMLElement* xmlRep { if(!child) { - LOG(WARNING)<<"Faild to load sub element in Representation Element."<SetSegment(segment); else - LOG(WARNING)<<"Fail to add segment."<GetName() == "AudioChannelConfiguration") + { + AudioChannelConfigurationElement* audioElement = nullptr; + audioElement = BuildAudioChannelConfiguration(child); + if (audioElement) + representation->SetAudioChlCfg(audioElement); + else + OMAF_LOG(LOG_WARNING, "Fail to add audio channel configuration.\n"); } else { - LOG(INFO)<<"Can't parse element in BuildRepresentation."<AddChildElement(child); } @@ -370,12 +387,40 @@ RepresentationElement* OmafMPDReader::BuildRepresentation(OmafXMLElement* xmlRep return representation; } +AudioChannelConfigurationElement* OmafMPDReader::BuildAudioChannelConfiguration(OmafXMLElement* xmlAudioChlCfg) +{ + CheckNullPtr_PrintLog_ReturnNullPtr(xmlAudioChlCfg, "Failed to read audio channel configuration element.\n", LOG_ERROR); + AudioChannelConfigurationElement* audioCfg = new AudioChannelConfigurationElement(); + CheckNullPtr_PrintLog_ReturnNullPtr(audioCfg, "Failed to create audio channel configuration node.\n", LOG_ERROR); + audioCfg->SetSchemeIdUri(xmlAudioChlCfg->GetAttributeVal(SCHEMEIDURI)); + audioCfg->SetValue(xmlAudioChlCfg->GetAttributeVal(VALUE)); + + audioCfg->ParseSchemeIdUriAndValue(); + + map attributes = xmlAudioChlCfg->GetAttributes(); + audioCfg->AddOriginalAttributes(attributes); + + vector childElement = xmlAudioChlCfg->GetChildElements(); + for(auto child : childElement) + { + if(!child) + { + OMAF_LOG(LOG_WARNING,"Faild to load sub element in Viewport Element.\n"); + continue; + } + + audioCfg->AddChildElement(child); + } + + return audioCfg; +} + SegmentElement* OmafMPDReader::BuildSegment(OmafXMLElement* xmlSegment) { - CheckNullPtr_PrintLog_ReturnNullPtr(xmlSegment, "Failed to read segment element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xmlSegment, "Failed to read segment element.\n", LOG_ERROR); SegmentElement* segment = new SegmentElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(segment, "Failed to create segment node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(segment, "Failed to create segment node.\n", LOG_ERROR); segment->SetMedia(xmlSegment->GetAttributeVal(MEDIA)); segment->SetInitialization(xmlSegment->GetAttributeVal(INITIALIZATION)); @@ -391,7 +436,7 @@ SegmentElement* OmafMPDReader::BuildSegment(OmafXMLElement* xmlSegment) { if(!child) { - LOG(WARNING)<<"Faild to load sub element in Segment Element."<SetSchemeIdUri(xmlSupplementalProperty->GetAttributeVal(SCHEMEIDURI)); supplementalProperty->SetValue(xmlSupplementalProperty->GetAttributeVal(VALUE)); @@ -421,7 +466,7 @@ SupplementalPropertyElement* OmafMPDReader::BuildSupplementalProperty(OmafXMLEle { if(!child) { - LOG(WARNING)<<"Faild to load sub element in supplementalProperty Element."<SetSphereRegionQuality(BuildSphRegionQuality(child)); } + else if (child->GetName() == OMAF_TWOD_REGIONQUALITY) + { + supplementalProperty->SetTwoDRegionQuality(BuildTwoDRegionQuality(child)); + } + supplementalProperty->AddChildElement(child); } @@ -438,10 +488,10 @@ SupplementalPropertyElement* OmafMPDReader::BuildSupplementalProperty(OmafXMLEle SphRegionQualityElement* OmafMPDReader::BuildSphRegionQuality(OmafXMLElement* xmlSphRegionQuality) { - CheckNullPtr_PrintLog_ReturnNullPtr(xmlSphRegionQuality, "Failed to read sphere Region Quality element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xmlSphRegionQuality, "Failed to read sphere Region Quality element.\n", LOG_ERROR); SphRegionQualityElement* sphRegionQuality = new SphRegionQualityElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(sphRegionQuality, "Failed to create sphere Region Quality node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(sphRegionQuality, "Failed to create sphere Region Quality node.\n", LOG_ERROR); sphRegionQuality->SetShapeType(StringToInt(xmlSphRegionQuality->GetAttributeVal(SHAPE_TYPE))); sphRegionQuality->SetRemainingAreaFlag((xmlSphRegionQuality->GetAttributeVal(REMAINING_AREA_FLAG) == "true")); @@ -456,7 +506,7 @@ SphRegionQualityElement* OmafMPDReader::BuildSphRegionQuality(OmafXMLElement* xm { if(!child) { - LOG(WARNING)<<"Faild to load sub element in sphRegionQuality Element."< childElement = xmlTwoDRegionQuality->GetChildElements(); + for(auto child : childElement) + { + if(!child) + { + OMAF_LOG(LOG_WARNING,"Faild to load sub element in twoDRegionQuality Element.\n"); + continue; + } + + if(child->GetName() == OMAF_QUALITY_INFO) + { + twoDRegionQuality->AddQualityInfo(BuildQualityInfo(child)); + } + twoDRegionQuality->AddChildElement(child); + } + + return twoDRegionQuality; +} + QualityInfoElement* OmafMPDReader::BuildQualityInfo(OmafXMLElement* xmlQualityInfo) { - CheckNullPtr_PrintLog_ReturnNullPtr(xmlQualityInfo, "Failed to read Quality Info element.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(xmlQualityInfo, "Failed to read Quality Info element.\n", LOG_ERROR); QualityInfoElement* qualityInfo = new QualityInfoElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(qualityInfo, "Failed to create Quality Info node.", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(qualityInfo, "Failed to create Quality Info node.\n", LOG_ERROR); qualityInfo->SetAzimuthRange(StringToInt(xmlQualityInfo->GetAttributeVal(AZIMUTH_RANGE))); qualityInfo->SetCentreAzimuth(StringToInt(xmlQualityInfo->GetAttributeVal(CENTRE_AZIMUTH))); @@ -485,6 +561,8 @@ QualityInfoElement* OmafMPDReader::BuildQualityInfo(OmafXMLElement* xmlQualityIn qualityInfo->SetOrigHeight(StringToInt(xmlQualityInfo->GetAttributeVal(ORIG_HEIGHT))); qualityInfo->SetOrigWidth(StringToInt(xmlQualityInfo->GetAttributeVal(ORIG_WIDTH))); qualityInfo->SetQualityRanking(StringToInt(xmlQualityInfo->GetAttributeVal(QUALITY_RANKING))); + qualityInfo->SetRegionWidth(StringToInt(xmlQualityInfo->GetAttributeVal(REGION_WIDTH))); + qualityInfo->SetRegionHeight(StringToInt(xmlQualityInfo->GetAttributeVal(REGION_HEIGHT))); map attributes = xmlQualityInfo->GetAttributes(); qualityInfo->AddOriginalAttributes(attributes); @@ -494,7 +572,7 @@ QualityInfoElement* OmafMPDReader::BuildQualityInfo(OmafXMLElement* xmlQualityIn { if(!child) { - LOG(WARNING)<<"Faild to load sub element in qualityInfo Element."< + +#include VCD_OMAF_BEGIN using namespace tinyxml2; -OmafXMLParser::OmafXMLParser() -{ - m_mpdReader = nullptr; - m_xmlDoc = nullptr; -} - -OmafXMLParser::~OmafXMLParser() -{ - if(m_mpdReader) - m_mpdReader->Close(); - SAFE_DELETE(m_mpdReader); - SAFE_DELETE(m_xmlDoc); +OmafXMLParser::OmafXMLParser() { + m_mpdReader = nullptr; + m_xmlDoc = nullptr; } -size_t OmafXMLParser::WriteData(void* ptr, size_t size, size_t nmemb, FILE* fp) -{ - return fwrite(ptr, size, nmemb, fp); +OmafXMLParser::~OmafXMLParser() { + if (m_mpdReader) m_mpdReader->Close(); + SAFE_DELETE(m_mpdReader); + SAFE_DELETE(m_xmlDoc); } -string OmafXMLParser::DownloadXMLFile(string url) -{ - string fileName = url.substr(url.find_last_of('/') + 1, url.length() - url.find_last_of('/') - 1); - CURL* curl = curl_easy_init(); - if(!curl) - { - LOG(ERROR)<<"Failed to init curl."< sb) { + OMAF_LOG(LOG_INFO, "Receive the stream block, size=%lld\n", sb->size()); + if (mpd_file.is_open()) { + mpd_file.write(sb->cbuf(), sb->size()); + } else { + OMAF_LOG(LOG_ERROR, "The file is not in open state, file: %s\n", fileName.c_str()); + } + }, + [url](OmafCurlEasyDownloader::State s) { + OMAF_LOG(LOG_INFO, "Download state: %d for url: %s\n", static_cast(s), url.c_str()); + }); + if (ret == ERROR_NONE) { + OMAF_LOG(LOG_INFO, "Success to start the mpd downloader!\n"); + } else { + OMAF_LOG(LOG_ERROR, "Failed to start the mpd downloader, err=%d\n", ret); + } + + if (mpd_file.is_open()) { + mpd_file.close(); + } + return fileName; } - - FILE* fp = fopen(fileName.c_str(), "wb"); - if(!fp) - return ""; - curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); - curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 0L); - curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, 0L); - curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteData); - curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp); - curl_easy_perform(curl); - curl_easy_cleanup(curl); - fclose(fp); - - return fileName; + } + OMAF_LOG(LOG_ERROR, "Failed to download the mpd file, whose url:%s\n", url.c_str()); + return std::string(); } -ODStatus OmafXMLParser::Generate(string url) -{ - ODStatus ret = OD_STATUS_SUCCESS; +ODStatus OmafXMLParser::Generate(string url, string cacheDir) { + ODStatus ret = OD_STATUS_SUCCESS; - m_path = url.substr(0, url.find_last_of('/')); + m_path = url.substr(0, url.find_last_of('/')); - // define the url is local or through network with prefix - string url_prefix = "http"; - bool local = m_path.length() < url_prefix.length() || m_path.substr(0, 4) != url_prefix; + // define the url is local or through network with prefix + string url_prefix = "http"; + bool local = m_path.length() < url_prefix.length() || m_path.substr(0, 4) != url_prefix; - string fileName = local ? url : DownloadXMLFile(url); - if(!fileName.length()) - return OD_STATUS_INVALID; + string fileName = local ? url : DownloadXMLFile(url, cacheDir); + if (!fileName.length()) return OD_STATUS_INVALID; - m_xmlDoc = new XMLDocument(); - CheckNullPtr_PrintLog_ReturnStatus(m_xmlDoc, "Failed to create XMLDocument with tinyXML.", ERROR, OD_STATUS_OPERATION_FAILED); + m_xmlDoc = new XMLDocument(); + CheckNullPtr_PrintLog_ReturnStatus(m_xmlDoc, "Failed to create XMLDocument with tinyXML.\n", LOG_ERROR, + OD_STATUS_OPERATION_FAILED); + OMAF_LOG(LOG_INFO, "To parse the mpd file: %s\n", fileName.c_str()); + XMLError result = m_xmlDoc->LoadFile(fileName.c_str()); + if (result != XML_SUCCESS) return OD_STATUS_OPERATION_FAILED; - XMLError result = m_xmlDoc->LoadFile(fileName.c_str()); - if(result != XML_SUCCESS) - return OD_STATUS_OPERATION_FAILED; + XMLElement* elmt = m_xmlDoc->FirstChildElement(); + CheckNullPtr_PrintLog_ReturnStatus(elmt, "Failed to get element from XML Doc.\n", LOG_ERROR, OD_STATUS_OPERATION_FAILED); - XMLElement *elmt = m_xmlDoc->FirstChildElement(); - CheckNullPtr_PrintLog_ReturnStatus(elmt, "Failed to get element from XML Doc.", ERROR, OD_STATUS_OPERATION_FAILED); + OmafXMLElement* root = BuildXMLElementTree(elmt); + if (!root) { + OMAF_LOG(LOG_ERROR, "Build XML elements tree failed!\n"); + return OD_STATUS_OPERATION_FAILED; + } - OmafXMLElement *root = BuildXMLElementTree(elmt); - if(!root) - { - LOG(ERROR)<<"Build XML elements tree failed!"<Value(); - if(!name) - return nullptr; + const char* name = elmt->Value(); + if (!name) return nullptr; - OmafXMLElement * element = new OmafXMLElement(); - CheckNullPtr_PrintLog_ReturnNullPtr(element, "Failed to create element.", WARNING); + OmafXMLElement* element = new OmafXMLElement(); + CheckNullPtr_PrintLog_ReturnNullPtr(element, "Failed to create element.\n", LOG_WARNING); - element->SetName(name); - element->SetPath(m_path); + element->SetName(name); + element->SetPath(m_path); - const char* text = elmt->GetText(); - if(text) - element->SetText(text); + const char* text = elmt->GetText(); + if (text) element->SetText(text); - this->ReadAttributes(element, elmt); + this->ReadAttributes(element, elmt); - if(elmt->NoChildren()) - return element; + if (elmt->NoChildren()) return element; - // read all child element - XMLElement* child = elmt->FirstChildElement(); - while(child) - { - OmafXMLElement* childElement = this->BuildXMLElementTree(child); - // only add valid child element - if(childElement) - element->AddChildElement(childElement); + // read all child element + XMLElement* child = elmt->FirstChildElement(); + while (child) { + OmafXMLElement* childElement = this->BuildXMLElementTree(child); + // only add valid child element + if (childElement) element->AddChildElement(childElement); - child = child->NextSiblingElement(); - } + child = child->NextSiblingElement(); + } - return element; + return element; } -ODStatus OmafXMLParser::BuildMPDwithXMLElements(OmafXMLElement *root) -{ - ODStatus ret = OD_STATUS_SUCCESS; +ODStatus OmafXMLParser::BuildMPDwithXMLElements(OmafXMLElement* root) { + ODStatus ret = OD_STATUS_SUCCESS; - m_mpdReader = new OmafMPDReader(root); - if(!m_mpdReader) - return OD_STATUS_INVALID; + m_mpdReader = new OmafMPDReader(root); + if (!m_mpdReader) return OD_STATUS_INVALID; - m_mpdReader->BuildMPD(); + m_mpdReader->BuildMPD(); - return ret; + return ret; } -void OmafXMLParser::ReadAttributes(OmafXMLElement* element, XMLElement* orgElement) -{ - const XMLAttribute* attribute = orgElement->FirstAttribute(); - if(!attribute) - return; +void OmafXMLParser::ReadAttributes(OmafXMLElement* element, XMLElement* orgElement) { + const XMLAttribute* attribute = orgElement->FirstAttribute(); + if (!attribute) return; - // read all attributes - while(attribute) - { - const char* attrKey = attribute->Name(); - const char* attrValue = attribute->Value(); - element->AddAttribute(attrKey, attrValue); + // read all attributes + while (attribute) { + const char* attrKey = attribute->Name(); + const char* attrValue = attribute->Value(); + element->AddAttribute(attrKey, attrValue); - attribute = attribute->Next(); - } + attribute = attribute->Next(); + } } +MPDElement* OmafXMLParser::GetGeneratedMPD() { + if (!m_mpdReader) { + OMAF_LOG(LOG_ERROR, "please generate MPD tree firstly.\n"); + return nullptr; + } -MPDElement* OmafXMLParser::GetGeneratedMPD() -{ - if(!m_mpdReader) - { - LOG(ERROR)<<"please generate MPD tree firstly."<GetMPD(); + return m_mpdReader->GetMPD(); } VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafDashParser/OmafXMLParser.h b/src/OmafDashAccess/OmafDashParser/OmafXMLParser.h index 1d995df4..81179a10 100644 --- a/src/OmafDashAccess/OmafDashParser/OmafXMLParser.h +++ b/src/OmafDashAccess/OmafDashParser/OmafXMLParser.h @@ -33,10 +33,11 @@ #define OMAFXMLPARSER_H #include "../../utils/tinyxml2.h" +#include "../OmafDashDownload/OmafCurlEasyHandler.h" #include "Common.h" -#include "OmafXMLElement.h" #include "OmafMPDReader.h" +#include "OmafXMLElement.h" VCD_OMAF_BEGIN @@ -44,107 +45,119 @@ VCD_OMAF_BEGIN //! \class: OmafXMLParser //! \brief: OMAF XML parser //! -class OmafXMLParser -{ -public: - //! - //! \brief Constructor - //! - OmafXMLParser(); - - //! - //! \brief Destructor - //! - virtual ~OmafXMLParser(); - - //! - //! \brief Generate XML tree and MPD tree - //! - //! \param [in] url - //! MPD file url - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus Generate(string url); - - //! - //! \brief Download MPD file - //! - //! \param [in] url - //! MPD file url - //! - //! \return string - //! the name of downloaded file - //! - string DownloadXMLFile(string url); - - //! - //! \brief Generate XML tree - //! - //! \param [in] elmt - //! root tinyxml element - //! - //! \return OmafXMLElement - //! root OMAF XML element - //! - OmafXMLElement* BuildXMLElementTree(tinyxml2::XMLElement *elmt); - - //! - //! \brief Generate MPD tree with XML elements - //! - //! \param [in] root - //! root XML element - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus BuildMPDwithXMLElements(OmafXMLElement *root); - - //! - //! \brief Get generated MPD element - //! - //! \return MPDElement - //! OMAF MPD element - //! - MPDElement* GetGeneratedMPD(); +class OmafXMLParser { + public: + //! + //! \brief Constructor + //! + OmafXMLParser(); + + //! + //! \brief Destructor + //! + virtual ~OmafXMLParser(); + + //! + //! \brief Generate XML tree and MPD tree + //! + //! \param [in] url + //! MPD file url + //! [in] cacheDir + //! cache directory + //! + //! \return ODStatus + //! OD_STATUS_SUCCESS if success, else fail reason + //! + ODStatus Generate(string url, string cacheDir); + + //! + //! \brief Download MPD file + //! + //! \param [in] url + //! MPD file url + //! [in] cacheDir + //! cache directory + //! + //! \return string + //! the name of downloaded file + //! + std::string DownloadXMLFile(string url, string cacheDir); + + //! + //! \brief Generate XML tree + //! + //! \param [in] elmt + //! root tinyxml element + //! + //! \return OmafXMLElement + //! root OMAF XML element + //! + OmafXMLElement* BuildXMLElementTree(tinyxml2::XMLElement* elmt); + + //! + //! \brief Generate MPD tree with XML elements + //! + //! \param [in] root + //! root XML element + //! + //! \return ODStatus + //! OD_STATUS_SUCCESS if success, else fail reason + //! + ODStatus BuildMPDwithXMLElements(OmafXMLElement* root); + + //! + //! \brief Get generated MPD element + //! + //! \return MPDElement + //! OMAF MPD element + //! + MPDElement* GetGeneratedMPD(); + + void SetOmafHttpParams(const OmafDashHttpProxy& http_proxy, const OmafDashHttpParams& http_params) { + m_curl_params.http_proxy_ = http_proxy; + m_curl_params.http_params_ = http_params; + } private: - - //! - //! \brief Read attributes from tinyxml element - //! - //! \param [in] element - //! OMAF XML element - //! \param [in] orgElement - //! tinyxml element - //! - //! \return void - //! - void ReadAttributes(OmafXMLElement* element, tinyxml2::XMLElement* orgElement); - - //! - //! \brief Write data to file - //! - //! \param [in] ptr - //! data pointer - //! \param [in] size - //! data size - //! \param [in] nmemb - //! data type size - //! \param [in] fp - //! file handle - //! - //! \return size_t - //! size of wrote data - //! - static size_t WriteData(void* ptr, size_t size, size_t nmemb, FILE* fp); - - tinyxml2::XMLDocument *m_xmlDoc; //!< tinyxml document - string m_path; //!< url path - OmafReaderBase *m_mpdReader; //!< MPD reader + OmafXMLParser& operator=(const OmafXMLParser& other) { return *this; }; + OmafXMLParser(const OmafXMLParser& other) { /* do not create copies */ }; + + private: + //! + //! \brief Read attributes from tinyxml element + //! + //! \param [in] element + //! OMAF XML element + //! \param [in] orgElement + //! tinyxml element + //! + //! \return void + //! + void ReadAttributes(OmafXMLElement* element, tinyxml2::XMLElement* orgElement); + + //! + //! \brief Write data to file + //! + //! \param [in] ptr + //! data pointer + //! \param [in] size + //! data size + //! \param [in] nmemb + //! data type size + //! \param [in] fp + //! file handle + //! + //! \return size_t + //! size of wrote data + //! + static size_t WriteData(void* ptr, size_t size, size_t nmemb, FILE* fp); + + tinyxml2::XMLDocument* m_xmlDoc = nullptr; //!< tinyxml document + string m_path; //!< url path + OmafReaderBase* m_mpdReader; //!< MPD reader + CurlParams m_curl_params; }; VCD_OMAF_END -#endif //OMAFXMLPARSER_H +#endif // OMAFXMLPARSER_H diff --git a/src/OmafDashAccess/OmafDashParser/PeriodElement.cpp b/src/OmafDashAccess/OmafDashParser/PeriodElement.cpp index 9d403e70..dae580b9 100644 --- a/src/OmafDashAccess/OmafDashParser/PeriodElement.cpp +++ b/src/OmafDashAccess/OmafDashParser/PeriodElement.cpp @@ -55,7 +55,7 @@ void PeriodElement::AddAdaptationSet(AdaptationSetElement* adaptionSet) { if(!adaptionSet) { - LOG(ERROR)<<"Fail to add adaptionSet in PeriodElement."< - //! vector of string - //! - vector GetDependencyIDs() { return m_dependencyId; } +class RepresentationElement : public OmafElementBase { + public: + //! + //! \brief Constructor + //! + RepresentationElement(); + + //! + //! \brief Destructor + //! + virtual ~RepresentationElement(); + + //! + //! \brief Set function for m_id member + //! + //! \param [in] string + //! value to set + //! \param [in] m_id + //! m_id member in class + //! \param [in] Id + //! m_id name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_id, Id); + + //! + //! \brief Set function for m_codecs member + //! + //! \param [in] string + //! value to set + //! \param [in] m_codecs + //! m_codecs member in class + //! \param [in] Codecs + //! m_codecs name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_codecs, Codecs); + + //! + //! \brief Set function for m_mimeType member + //! + //! \param [in] string + //! value to set + //! \param [in] m_mimeType + //! m_mimeType member in class + //! \param [in] MimeType + //! m_mimeType name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_mimeType, MimeType); + + //! + //! \brief Set function for m_width member + //! + //! \param [in] int32_t + //! value to set + //! \param [in] m_width + //! m_width member in class + //! \param [in] Width + //! m_width name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(int32_t, m_width, Width); + + //! + //! \brief Set function for m_height member + //! + //! \param [in] int32_t + //! value to set + //! \param [in] m_height + //! m_height member in class + //! \param [in] Height + //! m_height name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(int32_t, m_height, Height); + + MEMBER_SET_AND_GET_FUNC(int32_t, m_audioSamplingRate, AudioSamplingRate); + //! + //! \brief Set function for m_frameRate member + //! + //! \param [in] string + //! value to set + //! \param [in] m_frameRate + //! m_frameRate member in class + //! \param [in] FrameRate + //! m_frameRate name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_frameRate, FrameRate); + + //! + //! \brief Set function for m_sar member + //! + //! \param [in] string + //! value to set + //! \param [in] m_sar + //! m_sar member in class + //! \param [in] Sar + //! m_sar name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_sar, Sar); + + //! + //! \brief Set function for m_startWithSAP member + //! + //! \param [in] string + //! value to set + //! \param [in] m_startWithSAP + //! m_xmlns_omaf member in class + //! \param [in] StartWithSAP + //! m_startWithSAP name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_startWithSAP, StartWithSAP); + + //! + //! \brief Set function for m_qualityRanking member + //! + //! \param [in] string + //! value to set + //! \param [in] m_qualityRanking + //! m_qualityRanking member in class + //! \param [in] QualityRanking + //! m_qualityRanking name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_qualityRanking, QualityRanking); + + //! + //! \brief Set function for m_bandwidth member + //! + //! \param [in] int32_t + //! value to set + //! \param [in] m_bandwidth + //! m_bandwidth member in class + //! \param [in] Bandwidth + //! m_bandwidth name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(int32_t, m_bandwidth, Bandwidth); + + //! + //! \brief Set function for m_segment member + //! + //! \param [in] SegmentElement + //! value to set + //! \param [in] m_segment + //! m_segment member in class + //! \param [in] Segment + //! m_segment name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(SegmentElement*, m_segment, Segment); + + MEMBER_SET_AND_GET_FUNC(AudioChannelConfigurationElement*, m_audioChlCfg, AudioChlCfg); + + //! + //! \brief Set value to m_dependencyId member + //! + //! \param [in] ids + //! A string of value to set + //! + //! \return void + //! + void SetDependencyID(string ids) { SplitString(ids, m_dependencyId, ","); } + + //! + //! \brief Get all items in m_dependencyId + //! + //! \return vector + //! vector of string + //! + vector GetDependencyIDs() { return m_dependencyId; } private: - - string m_id; //!< the id attribute - string m_codecs; //!< the codecs attribute - string m_mimeType; //!< the mimeType attribute - int32_t m_width; //!< the width attribute - int32_t m_height; //!< the height attribute - string m_frameRate; //!< the frameRate attribute - string m_sar; //!< the sar attribute - string m_startWithSAP; //!< the startWithSAP attribute - string m_qualityRanking; //!< the qualityRanking attribute - int32_t m_bandwidth; //!< the bandwidth attribute - vector m_dependencyId; //!< the dependencyId attribute - - SegmentElement* m_segment; //!< the SegmentTemplate child elements + RepresentationElement& operator=(const RepresentationElement& other) { return *this; }; + RepresentationElement(const RepresentationElement& other) { /* do not create copies */ }; + + private: + string m_id; //!< the id attribute + string m_codecs; //!< the codecs attribute + string m_mimeType; //!< the mimeType attribute + int32_t m_width; //!< the width attribute + int32_t m_height; //!< the height attribute + string m_frameRate; //!< the frameRate attribute + int32_t m_audioSamplingRate; //!< the audio sampling rate attribute + string m_sar; //!< the sar attribute + string m_startWithSAP; //!< the startWithSAP attribute + string m_qualityRanking; //!< the qualityRanking attribute + int32_t m_bandwidth; //!< the bandwidth attribute + vector m_dependencyId; //!< the dependencyId attribute + + SegmentElement* m_segment; //!< the SegmentTemplate child elements + AudioChannelConfigurationElement* m_audioChlCfg; }; VCD_OMAF_END; -#endif //REPRESENTATIONELEMENT_H +#endif // REPRESENTATIONELEMENT_H diff --git a/src/OmafDashAccess/OmafDashParser/SegmentElement.cpp b/src/OmafDashAccess/OmafDashParser/SegmentElement.cpp index 42742d60..fa57b9c3 100644 --- a/src/OmafDashAccess/OmafDashParser/SegmentElement.cpp +++ b/src/OmafDashAccess/OmafDashParser/SegmentElement.cpp @@ -31,150 +31,57 @@ //! #include "SegmentElement.h" -#include "../OmafDashDownload/OmafCurlDownloader.h" -VCD_OMAF_BEGIN - -SegmentElement::SegmentElement() -{ - m_downloader = nullptr; - - m_duration = 0; - m_startNumber = 0; - m_timescale = 0; -} - -SegmentElement::~SegmentElement() -{ - m_media.clear(); - m_initialization.clear(); - m_duration = 0; - m_startNumber = 0; - m_timescale = 0; - - m_url.clear(); // TBD - - ResetDownload(); -} - - -ODStatus SegmentElement::InitDownload(vector& baseURL, string& representationID, int32_t number, int32_t bandwidth, int32_t time) -{ - this->ResetDownload(); +//#include "../OmafDashDownload/OmafCurlDownloader.h" - string completeURL = GenerateCompleteURL(baseURL, representationID, number, bandwidth, time); - //LOG(INFO)<<"Download "<Stop(); - } - SAFE_DELETE(m_downloader); - - m_url.clear(); - return OD_STATUS_SUCCESS; -} - -ODStatus SegmentElement::StartDownloadSegment(OmafDownloaderObserver* observer) -{ - CheckNullPtr_PrintLog_ReturnStatus(m_downloader, "The downloader is not created yet!", ERROR, OD_STATUS_INVALID); - - //attach the observers to downloader - CheckAndReturn(m_downloader->ObserverAttach(observer)); - - m_downloader->Start(); - - return OD_STATUS_SUCCESS; -} - -ODStatus SegmentElement::StopDownloadSegment(OmafDownloaderObserver* observer) -{ - if(!m_downloader) - return OD_STATUS_INVALID; - - m_downloader->Stop(); - if(observer) m_downloader->ObserverDetach(observer); - - return OD_STATUS_SUCCESS; -} - -ODStatus SegmentElement::Read(uint8_t* data, size_t size) -{ - CheckNullPtr_PrintLog_ReturnStatus(m_downloader, "The downloader is not created yet!", ERROR, OD_STATUS_INVALID); +VCD_OMAF_BEGIN - return m_downloader->Read(data, size); +SegmentElement::SegmentElement() { + m_duration = 0; + m_startNumber = 0; + m_timescale = 0; } -ODStatus SegmentElement::Peek(uint8_t* data, size_t size) -{ - CheckNullPtr_PrintLog_ReturnStatus(m_downloader, "The downloader is not created yet!", ERROR, OD_STATUS_INVALID); - - return m_downloader->Peek(data, size); +SegmentElement::~SegmentElement() { + m_media.clear(); + m_initialization.clear(); + m_duration = 0; + m_startNumber = 0; + m_timescale = 0; } -ODStatus SegmentElement::Peek(uint8_t* data, size_t size, size_t offset) -{ - CheckNullPtr_PrintLog_ReturnStatus(m_downloader, "The downloader is not created yet!", ERROR, OD_STATUS_INVALID); - - return m_downloader->Peek(data, size, offset); -} - -string SegmentElement::GenerateCompleteURL(vector& baseURL, string& representationID, int32_t number, int32_t bandwidth, int32_t time) -{ - string combinedBaseURL; - for(uint32_t i = 0; i < baseURL.size() - 1 ; i++) - { - auto url = baseURL[i]; - combinedBaseURL = PathSplice(combinedBaseURL, url->GetPath()); - } - - // check if it is getting the initialization segment - string fileName = number ? m_media : m_initialization; - - // replace the reference value with real value in file name - vector subNames; - SplitString(fileName, subNames, "$"); - fileName.clear(); - for( uint32_t i = 0; i < subNames.size();i++) - { - string sn = subNames[i]; - if(sn == SEGMENT_NUMBER) - { - sn = to_string(number); - } - else if(sn == SEGMENT_REPRESENTATIONID) - { - sn = representationID; - } - else if(sn == SEGMENT_BANDWIDTH) - { - sn = to_string(bandwidth); - } - else if (sn == SEGMENT_TIME) - { - sn = to_string(time); - } - fileName += sn; +std::string SegmentElement::GenerateCompleteURL(const vector& baseURL, string& representationID, + int32_t number, int32_t bandwidth, int32_t time) { + string combinedBaseURL; + for (uint32_t i = 0; i < baseURL.size() - 1; i++) { + auto url = baseURL[i]; + combinedBaseURL = PathSplice(combinedBaseURL, url->GetPath()); + } + + // check if it is getting the initialization segment + string fileName = number ? m_media : m_initialization; + + // replace the reference value with real value in file name + vector subNames; + SplitString(fileName, subNames, "$"); + fileName.clear(); + for (uint32_t i = 0; i < subNames.size(); i++) { + string sn = subNames[i]; + if (sn == SEGMENT_NUMBER) { + sn = to_string(number); + } else if (sn == SEGMENT_REPRESENTATIONID) { + sn = representationID; + } else if (sn == SEGMENT_BANDWIDTH) { + sn = to_string(bandwidth); + } else if (sn == SEGMENT_TIME) { + sn = to_string(time); } + fileName += sn; + } - combinedBaseURL = PathSplice(combinedBaseURL, fileName); + combinedBaseURL = PathSplice(combinedBaseURL, fileName); - return combinedBaseURL; + return combinedBaseURL; } VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafDashParser/SegmentElement.h b/src/OmafDashAccess/OmafDashParser/SegmentElement.h index 81653900..1999105b 100644 --- a/src/OmafDashAccess/OmafDashParser/SegmentElement.h +++ b/src/OmafDashAccess/OmafDashParser/SegmentElement.h @@ -32,233 +32,124 @@ #ifndef SEGMENTELEMENT_H #define SEGMENTELEMENT_H -#include "OmafElementBase.h" -#include "BaseUrlElement.h" +#include + #include "../OmafDashDownload/OmafDownloader.h" -#include "../OmafDashDownload/OmafDownloaderObserver.h" +#include "BaseUrlElement.h" +#include "OmafElementBase.h" VCD_OMAF_BEGIN -class SegmentElement: public OmafElementBase -{ -public: - - //! - //! \brief Constructor - //! - SegmentElement(); - - //! - //! \brief Destructor - //! - virtual ~SegmentElement(); - - //! - //! \brief Set function for m_media member - //! - //! \param [in] string - //! value to set - //! \param [in] m_media - //! m_media member in class - //! \param [in] Media - //! m_media name in class - //! - //! \return void - //! - MEMBER_SET_AND_GET_FUNC(string, m_media, Media); - - //! - //! \brief Set function for m_initialization member - //! - //! \param [in] string - //! value to set - //! \param [in] m_initialization - //! m_initialization member in class - //! \param [in] Initialization - //! m_initialization name in class - //! - //! \return void - //! - MEMBER_SET_AND_GET_FUNC(string, m_initialization, Initialization); - - //! - //! \brief Set function for m_duration member - //! - //! \param [in] int32_t - //! value to set - //! \param [in] m_duration - //! m_duration member in class - //! \param [in] Duration - //! m_duration name in class - //! - //! \return void - //! - MEMBER_SET_AND_GET_FUNC(int32_t, m_duration, Duration); - - //! - //! \brief Set function for m_startNumber member - //! - //! \param [in] int32_t - //! value to set - //! \param [in] m_startNumber - //! m_startNumber member in class - //! \param [in] StartNumber - //! m_startNumber name in class - //! - //! \return void - //! - MEMBER_SET_AND_GET_FUNC(int32_t, m_startNumber, StartNumber); - - //! - //! \brief Set function for m_timescale member - //! - //! \param [in] int32_t - //! value to set - //! \param [in] m_timescale - //! m_timescale member in class - //! \param [in] Timescale - //! m_timescale name in class - //! - //! \return void - //! - MEMBER_SET_AND_GET_FUNC(int32_t, m_timescale, Timescale); - - //! - //! \brief Set function for m_url member - //! - //! \param [in] string - //! value to set - //! \param [in] m_url - //! m_url member in class - //! \param [in] URL - //! m_url name in class - //! - //! \return void - //! - MEMBER_SET_AND_GET_FUNC(string, m_url, URL); // TBD - - //! - //! \brief Initialization process - //! - //! \param [in] baseURL - //! A vector of BaseUrlElement pointer - //! \param [in] representation - //! A string of representationID address - //! \param [in] number - //! The index of this element - //! \param [in] bandwidth - //! Which bandwidth does this element belong to - //! \param [in] time - //! An int of current time - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus InitDownload(vector& baseURL, string& representationID, int32_t number = 0, int32_t bandwidth = 0, int32_t time = 0); - - //! - //! \brief Reset initialization process - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus ResetDownload(); - - //! - //! \brief Reset download process - //! - //! \param [in] observer - //! A pointer of OmafDownloaderObserver class - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus StartDownloadSegment(OmafDownloaderObserver* observer); - - //! - //! \brief Read given size stream to data pointer - //! - //! \param [in] size - //! size of stream that should read - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus Read(uint8_t* data, size_t size); - - //! - //! \brief Peek given size stream to data pointer - //! - //! \param [in] size - //! size of stream that should read - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus Peek(uint8_t* data, size_t size); - - //! - //! \brief Peek given size stream to data pointer start from offset - //! - //! \param [in] size - //! size of stream that should read - //! \param [in] offset - //! stream offset that read should start - //! \param [out] data - //! pointer stores read stream data - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus Peek(uint8_t* data, size_t size, size_t offset); - - //! - //! \brief Initialization process - //! - //! \param [in] observer - //! A pointer of OmafDownloaderObserver class - //! - //! \return ODStatus - //! OD_STATUS_SUCCESS if success, else fail reason - //! - ODStatus StopDownloadSegment(OmafDownloaderObserver* observer = nullptr); - -private: - - //! - //! \brief Generate the complete URL - //! - //! \param [in] baseURL - //! A vector of BaseUrlElement pointer - //! \param [in] representation - //! A string of representationID address - //! \param [in] number - //! The index of this element - //! \param [in] bandwidth - //! Which bandwidth does this element belong to - //! \param [in] time - //! An int of current time - //! - //! \return string - //! The string of complete URL - //! - string GenerateCompleteURL(vector& baseURL, string& representationID, int32_t number, int32_t bandwidth, int32_t time); - - string m_media; //!< the media attribute - string m_initialization; //!< the initialization attribute - int32_t m_duration; //!< the duration attribute - int32_t m_startNumber; //!< the startNumber attribute - int32_t m_timescale; //!< the timescale attribute - - // download part - string m_url; //!< the string to save URL - OmafDownloader *m_downloader; //!< the downloader member +class SegmentElement : public OmafElementBase { + public: + //! + //! \brief Constructor + //! + SegmentElement(); + + //! + //! \brief Destructor + //! + virtual ~SegmentElement(); + + //! + //! \brief Set function for m_media member + //! + //! \param [in] string + //! value to set + //! \param [in] m_media + //! m_media member in class + //! \param [in] Media + //! m_media name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_media, Media); + + //! + //! \brief Set function for m_initialization member + //! + //! \param [in] string + //! value to set + //! \param [in] m_initialization + //! m_initialization member in class + //! \param [in] Initialization + //! m_initialization name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(string, m_initialization, Initialization); + + //! + //! \brief Set function for m_duration member + //! + //! \param [in] int32_t + //! value to set + //! \param [in] m_duration + //! m_duration member in class + //! \param [in] Duration + //! m_duration name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(int32_t, m_duration, Duration); + + //! + //! \brief Set function for m_startNumber member + //! + //! \param [in] int32_t + //! value to set + //! \param [in] m_startNumber + //! m_startNumber member in class + //! \param [in] StartNumber + //! m_startNumber name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(int32_t, m_startNumber, StartNumber); + + //! + //! \brief Set function for m_timescale member + //! + //! \param [in] int32_t + //! value to set + //! \param [in] m_timescale + //! m_timescale member in class + //! \param [in] Timescale + //! m_timescale name in class + //! + //! \return void + //! + MEMBER_SET_AND_GET_FUNC(int32_t, m_timescale, Timescale); + + //! + //! \brief Generate the complete URL + //! + //! \param [in] baseURL + //! A vector of BaseUrlElement pointer + //! \param [in] representation + //! A string of representationID address + //! \param [in] number + //! The index of this element + //! \param [in] bandwidth + //! Which bandwidth does this element belong to + //! \param [in] time + //! An int of current time + //! + //! \return string + //! The string of complete URL + //! + std::string GenerateCompleteURL(const vector& baseURL, string& representationID, int32_t number, + int32_t bandwidth = 0, int32_t time = 0); + + private: + string m_media; //!< the media attribute + string m_initialization; //!< the initialization attribute + int32_t m_duration; //!< the duration attribute + int32_t m_startNumber; //!< the startNumber attribute + int32_t m_timescale; //!< the timescale attribute }; VCD_OMAF_END; -#endif //SEGMENTELEMENT_H +#endif // SEGMENTELEMENT_H diff --git a/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.cpp b/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.cpp index debe0843..3a3ecf39 100644 --- a/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.cpp +++ b/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.cpp @@ -67,7 +67,7 @@ SphRegionQualityElement::~SphRegionQualityElement() ODStatus SphRegionQualityElement::AddQualityInfo(QualityInfoElement* qualityInfo) { - CheckNullPtr_PrintLog_ReturnStatus(qualityInfo, "The input qualityInfo is null.", ERROR, OD_STATUS_INVALID); + CheckNullPtr_PrintLog_ReturnStatus(qualityInfo, "The input qualityInfo is null.\n", LOG_ERROR, OD_STATUS_INVALID); m_qualityInfo.push_back(qualityInfo); @@ -98,7 +98,7 @@ ContentCoverage* SphRegionQualityElement::GetContentCoverage() return m_contentCoverage; m_contentCoverage = new ContentCoverage(); - CheckNullPtr_PrintLog_ReturnNullPtr(m_contentCoverage, "Failed to create content coverage for SphRegionQualityElement", ERROR); + CheckNullPtr_PrintLog_ReturnNullPtr(m_contentCoverage, "Failed to create content coverage for SphRegionQualityElement\n", LOG_ERROR); m_contentCoverage->shape_type = m_shape_type; m_contentCoverage->view_idc_presence = m_viewIdcPresence; diff --git a/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.h b/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.h index 1c823b1a..fa2c6ab6 100644 --- a/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.h +++ b/src/OmafDashAccess/OmafDashParser/SphRegionQualityElement.h @@ -161,6 +161,10 @@ class SphRegionQualityElement: public OmafElementBase //! ContentCoverage* GetContentCoverage(); +private: + SphRegionQualityElement& operator=(const SphRegionQualityElement& other) { return *this; }; + SphRegionQualityElement(const SphRegionQualityElement& other) { /* do not create copies */ }; + private: int32_t m_shape_type; //!< the shape_type attribute diff --git a/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.cpp b/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.cpp index ed36f519..414b77c3 100644 --- a/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.cpp +++ b/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.cpp @@ -40,6 +40,7 @@ SupplementalPropertyElement::SupplementalPropertyElement() m_srqr = nullptr; m_preselection = nullptr; m_sphRegionQuality = nullptr; + m_twoDRegionQuality = nullptr; } SupplementalPropertyElement::~SupplementalPropertyElement() @@ -52,6 +53,8 @@ SupplementalPropertyElement::~SupplementalPropertyElement() SAFE_DELETE(m_preselection); } SAFE_DELETE(m_sphRegionQuality); + SAFE_DELETE(m_twoDRegionQuality); + m_twoDQualityInfos.clear(); } @@ -60,22 +63,22 @@ ODStatus SupplementalPropertyElement::ParseSchemeIdUriAndValue() if(GetSchemeIdUri() == SCHEMEIDURI_SRD) { if(0 == GetValue().length()) - LOG(WARNING)<<"SRD doesn't have value."<SetInfo((char*)GetValue().c_str()); } else if(GetSchemeIdUri() == SCHEMEIDURI_SRQR) { m_srqr = new SphereQuality(); - CheckNullPtr_PrintLog_ReturnStatus(m_srqr, "Failed to create SphereQuality.", ERROR, OD_STATUS_OPERATION_FAILED); + CheckNullPtr_PrintLog_ReturnStatus(m_srqr, "Failed to create SphereQuality.\n", LOG_ERROR, OD_STATUS_OPERATION_FAILED); } else if(GetSchemeIdUri() == SCHEMEIDURI_PRESELECTION) { m_preselection = new PreselValue(); - CheckNullPtr_PrintLog_ReturnStatus(m_preselection, "Failed to create PreselValue.", ERROR, OD_STATUS_OPERATION_FAILED); + CheckNullPtr_PrintLog_ReturnStatus(m_preselection, "Failed to create PreselValue.\n", LOG_ERROR, OD_STATUS_OPERATION_FAILED); string preselVal = GetValue(); std::vector splitTag; @@ -92,12 +95,11 @@ ODStatus SupplementalPropertyElement::ParseSchemeIdUriAndValue() if(s.length()) m_preselection->SelAsIDs.push_back(StringToInt(s)); else - LOG(WARNING)<<"this adaptation set ID is invalid."<shape_type = sphRegionQuality->GetShapeType(); @@ -124,4 +126,16 @@ ODStatus SupplementalPropertyElement::SetSphereRegionQuality(SphRegionQualityEle return OD_STATUS_SUCCESS; } +ODStatus SupplementalPropertyElement::SetTwoDRegionQuality(TwoDRegionQualityElement* twoDRegionQuality) +{ + if (!twoDRegionQuality) + return OD_STATUS_INVALID; + + m_twoDRegionQuality = twoDRegionQuality; + + m_twoDQualityInfos = m_twoDRegionQuality->GetTwoDQualityInfos(); + + return OD_STATUS_SUCCESS; +} + VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.h b/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.h index 0e0915ed..a776b211 100644 --- a/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.h +++ b/src/OmafDashAccess/OmafDashParser/SupplementalPropertyElement.h @@ -35,6 +35,7 @@ #include "OmafElementBase.h" #include "DescriptorElement.h" #include "SphRegionQualityElement.h" +#include "TwoDRegionQualityElement.h" VCD_OMAF_BEGIN @@ -71,6 +72,7 @@ class SupplementalPropertyElement: public OmafElementBase, public DescriptorElem //! ODStatus SetSphereRegionQuality(SphRegionQualityElement* sphRegionQuality); + ODStatus SetTwoDRegionQuality(TwoDRegionQualityElement* twoDRegionQuality); //! //! \brief Get member m_srd //! @@ -103,12 +105,21 @@ class SupplementalPropertyElement: public OmafElementBase, public DescriptorElem //! ContentCoverage* GetContentCoverage() {return m_sphRegionQuality ? m_sphRegionQuality->GetContentCoverage() : nullptr;} + map GetTwoDRegionQualityInfos() { return m_twoDQualityInfos; } + +private: + SupplementalPropertyElement& operator=(const SupplementalPropertyElement& other) { return *this; }; + SupplementalPropertyElement(const SupplementalPropertyElement& other) { /* do not create copies */ }; + private: SphRegionQualityElement *m_sphRegionQuality; //!< pointer of the omaf:sphRegionQuality child elements OmafSrd *m_srd; //!< pointer of the OmafSrd child elements SphereQuality *m_srqr; //!< pointer of the SphereQuality child elements PreselValue *m_preselection; //!< pointer of the PreselValue child elements + TwoDRegionQualityElement *m_twoDRegionQuality; + + map m_twoDQualityInfos; }; VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafDashParser/TwoDRegionQualityElement.cpp b/src/OmafDashAccess/OmafDashParser/TwoDRegionQualityElement.cpp new file mode 100644 index 00000000..03e128c5 --- /dev/null +++ b/src/OmafDashAccess/OmafDashParser/TwoDRegionQualityElement.cpp @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2020, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: TwoDRegionQualityElement.cpp +//! \brief: urn:mpeg:mpegI:omaf:2017:2dqr element class +//! + +#include "TwoDRegionQualityElement.h" + +VCD_OMAF_BEGIN + +TwoDRegionQualityElement::TwoDRegionQualityElement() +{ +} + +TwoDRegionQualityElement::~TwoDRegionQualityElement() +{ + m_twoDQualityInfos.clear(); + if(m_qualityInfo.size()) + { + for(auto qi : m_qualityInfo) + { + SAFE_DELETE(qi); + } + m_qualityInfo.clear(); + } +} + +ODStatus TwoDRegionQualityElement::AddQualityInfo(QualityInfoElement* qualityInfo) +{ + CheckNullPtr_PrintLog_ReturnStatus(qualityInfo, "The input qualityInfo is null.\n", LOG_ERROR, OD_STATUS_INVALID); + + m_qualityInfo.push_back(qualityInfo); + + TwoDQualityInfo planarQualityInfo; + memset(&planarQualityInfo, 0, sizeof(TwoDQualityInfo)); + planarQualityInfo.orig_height = qualityInfo->GetOrigHeight(); + planarQualityInfo.orig_width = qualityInfo->GetOrigWidth(); + planarQualityInfo.quality_ranking = qualityInfo->GetQualityRanking(); + planarQualityInfo.region_width = qualityInfo->GetRegionWidth(); + planarQualityInfo.region_height = qualityInfo->GetRegionHeight(); + + m_twoDQualityInfos.insert(make_pair(planarQualityInfo.quality_ranking, planarQualityInfo)); + + return OD_STATUS_SUCCESS; +} + +map TwoDRegionQualityElement::GetTwoDQualityInfos() +{ + return m_twoDQualityInfos; +} + +VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafDashParser/TwoDRegionQualityElement.h b/src/OmafDashAccess/OmafDashParser/TwoDRegionQualityElement.h new file mode 100644 index 00000000..841846f6 --- /dev/null +++ b/src/OmafDashAccess/OmafDashParser/TwoDRegionQualityElement.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2020, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! +//! \file: TwoDRegionQualityElement.h +//! \brief: omaf:twoDRegionQuality element class +//! + +#ifndef TWODREGIONQUALITYELEMENT_H +#define TWODREGIONQUALITYELEMENT_H +#include "QualityInfoElement.h" + +VCD_OMAF_BEGIN + +class TwoDRegionQualityElement: public OmafElementBase +{ +public: + + //! + //! \brief Constructor + //! + TwoDRegionQualityElement(); + + //! + //! \brief Destructor + //! + virtual ~TwoDRegionQualityElement(); + + //! + //! \brief Add an pointer of QualityInfo element + //! + //! \param [in] qualityInfo + //! An pointer of QualityInfo element class + //! + //! \return ODStatus + //! OD_STATUS_SUCCESS if success, else fail reason + //! + ODStatus AddQualityInfo(QualityInfoElement* qualityInfo); + + //! + //! \brief Get Quality Information + //! + //! \return map + //! a map of all planar qality info + //! + //vector GetQualityInfos(); + map GetTwoDQualityInfos(); + +private: + TwoDRegionQualityElement& operator=(const TwoDRegionQualityElement& other) { return *this; }; + TwoDRegionQualityElement(const TwoDRegionQualityElement& other) { /* do not create copies */ }; + +private: + + vector m_qualityInfo; //!< the omaf:qualityInfo child elements + + map m_twoDQualityInfos; //!< the map of for all planar video sources +}; + +VCD_OMAF_END; + +#endif //TWODREGIONQUALITYELEMENT_H diff --git a/src/OmafDashAccess/OmafDashParser/ViewportElement.cpp b/src/OmafDashAccess/OmafDashParser/ViewportElement.cpp index db8a9f8a..23ebf600 100644 --- a/src/OmafDashAccess/OmafDashParser/ViewportElement.cpp +++ b/src/OmafDashAccess/OmafDashParser/ViewportElement.cpp @@ -43,7 +43,7 @@ ODStatus ViewportElement::ParseSchemeIdUriAndValue() if(GetSchemeIdUri() == SCHEMEIDURI_VIEWPORT) { if(0 == GetValue().length()) - LOG(WARNING)<<"SRD doesn't have value."< + +#include "OmafAdaptationSet.h" + +VCD_OMAF_BEGIN + +class OmafDashRangeSyncImpl : public OmafDashRangeSync { + public: + OmafDashRangeSyncImpl(const OmafAdaptationSet& oas, SegmentSyncNodeCB cb) : adaptation_set_(oas), sync_cb_(cb){}; + virtual ~OmafDashRangeSyncImpl() {} + + public: + virtual std::string getUrl(const SegmentSyncNode& value) const override; + virtual SegmentSyncNode getSegmentNode() override { + SegmentSyncNode node; + node.segment_value.number_ = adaptation_set_.GetSegmentNumber(); + return node; + }; + virtual int64_t getStartSegment() override; + virtual void notifyRangeChange(SyncRange range) override; + + private: + const OmafAdaptationSet& adaptation_set_; + SegmentSyncNodeCB sync_cb_; +}; + +OmafDashRangeSync::Ptr make_omaf_syncer(const OmafAdaptationSet& oas, SegmentSyncNodeCB cb) { + return std::make_shared(oas, cb); +} + +std::string OmafDashRangeSyncImpl::getUrl(const SegmentSyncNode& value) const { return adaptation_set_.GetUrl(value); }; + +int64_t OmafDashRangeSyncImpl::getStartSegment() { + return 0; // return adaptation_set_.GetSegmentNumber(); +}; + +void OmafDashRangeSyncImpl::notifyRangeChange(SyncRange range) { + int64_t number = adaptation_set_.GetSegmentNumber(); + if (number < range.left_) { + OMAF_LOG(LOG_INFO, "slower than server, reset segment number to left range [%ld, %ld], segment number=%ld\n", range.left_, range.right_, number); + if (sync_cb_) { + SegmentSyncNode node; + node.segment_value.number_ = range.left_; + sync_cb_(node); + } + } else if (number > range.right_) { + OMAF_LOG(LOG_INFO, "faster than server, reset segment number to right range [%ld, %ld], segment number=%ld\n", range.left_, range.right_, number); + if (sync_cb_) { + SegmentSyncNode node; + node.segment_value.number_ = range.right_; + sync_cb_(node); + } + } +}; + +int OmafDashSourceSyncHelper::start(CurlParams params) noexcept { + try { + checker_.reset(new OmafCurlChecker()); + int ret = ERROR_NONE; + + ret = checker_->init(params); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to init the curl checker with error: %d\n", ret); + return ret; + } + bsyncing_ = true; + sync_worker_ = std::thread(&OmafDashSourceSyncHelper::threadRunner, this); + + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when start the dash source sync, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +int OmafDashSourceSyncHelper::stop() noexcept { + try { + bsyncing_ = false; + if (sync_worker_.joinable()) { + bsyncing_ = false; + sync_worker_.join(); + } + return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when stop the dash source sync, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +void OmafDashSourceSyncHelper::addSyncer(OmafDashRangeSync::Ptr syncer) noexcept { + syncers_range_.emplace_back(nullptr); + syncers_.push_back(std::move(syncer)); +} + +void OmafDashSourceSyncHelper::threadRunner() noexcept { + try { + while (bsyncing_) { + int syncer_index = 0; + auto start = std::chrono::high_resolution_clock::now(); + + for (auto syncer : syncers_) { + if (!bsyncing_) { + break; + } + + auto& range = syncers_range_[syncer_index++]; + + if (range.get() == nullptr) { + range.reset(new SyncRange()); + if (!initRange(syncer, range)) { + OMAF_LOG(LOG_ERROR, "Failed to initialize the sync range!\n" ); + range.reset(); + } + } else { + if (!updateRange(syncer, range)) { + OMAF_LOG(LOG_ERROR, "Failed to update the sync range!\n"); + range.reset(); + } + } + if (range.get() != nullptr) { + syncer->notifyRangeChange(*range.get()); + } + + } // loop all syncer + + // 2. sync frequncy logic + auto end = std::chrono::high_resolution_clock::now(); + std::chrono::duration elapsed = end - start; + int64_t sleep_time = sync_frequency_ - static_cast(elapsed.count() * 1000); + while (bsyncing_ && sleep_time > 0) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + sleep_time--; + } + } // end thread while(bsyncing_) + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception in the dash source sync runner, ex: %s\n", ex.what()); + } +} + +bool OmafDashSourceSyncHelper::initRange(OmafDashRangeSync::Ptr syncer, std::shared_ptr range) noexcept { + try { + SegmentSyncNode syncnode = syncer->getSegmentNode(); + OMAF_LOG(LOG_INFO, "Calling initRange from start point: %ld\n", syncnode.segment_value.number_); + int64_t left_check_start = syncnode.segment_value.number_; + int64_t right_check_start = syncnode.segment_value.number_; + int32_t check_times = 0; + Direction direction = Direction::RIGHT; + bool meetleft = false; + int64_t point = 0; + bool bfind = true; + while (bsyncing_ && check_times < check_range_times_) { + if (direction == Direction::RIGHT) { + bfind = findRange(syncer, right_check_start, direction, point); + if (bfind) { + bfind = findRangeEdge(syncer, point, range); + break; + } else { + if (!meetleft) { + direction = Direction::LEFT; + } + right_check_start += range_size_ * check_range_strides_; + } + } else { + bfind = findRange(syncer, left_check_start, direction, point); + if (bfind) { + bfind = findRangeEdge(syncer, point, range); + break; + } else if (point == MEET_LEFT) { + meetleft = true; + } + direction = Direction::RIGHT; + left_check_start -= range_size_ * check_range_strides_; + } + } + + return bfind; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception create the range, ex: %s\n", ex.what()); + return false; + } +} +bool OmafDashSourceSyncHelper::findRange(OmafDashRangeSync::Ptr syncer, int64_t check_start, Direction direction, + int64_t& point) noexcept { + try { + SegmentSyncNode syncnode = syncer->getSegmentNode(); + int64_t index = 0; + while (bsyncing_ && (index < check_range_strides_)) { + if (direction == Direction::RIGHT) { + syncnode.segment_value.number_ = check_start + index * range_size_; + } else { + syncnode.segment_value.number_ = check_start - index * range_size_; + if (syncnode.segment_value.number_ <= syncer->getStartSegment()) { + point = MEET_LEFT; + return false; + } + } + std::string url = syncer->getUrl(syncnode); + OMAF_LOG(LOG_INFO, "To check the url: %s\n", url.c_str()); + if (checker_->check(url)) { + point = syncnode.segment_value.number_; + return true; + } + index++; + } + return false; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception create the range, ex: %s\n", ex.what()); + return false; + } +} + +bool OmafDashSourceSyncHelper::findRangeEdge(OmafDashRangeSync::Ptr syncer, int64_t point, + std::shared_ptr range) noexcept { + try { + SegmentSyncNode syncnode = syncer->getSegmentNode(); + + // 1. find right + int64_t index = 0; + bool pre_valid = false; + while (bsyncing_ && (index < range_size_)) { + syncnode.segment_value.number_ = point + index; + std::string url = syncer->getUrl(syncnode); + bool bvalid = checker_->check(url); + if (!bvalid && pre_valid) { + break; + } + pre_valid = bvalid; + index++; + } + if (!pre_valid) { + OMAF_LOG(LOG_ERROR, "Failed to find the range right edge!\n"); + return false; + } + + range->right_ = point + index - 1; + + // 2. find the left, + // FIXME optimize the search direction in the future + index = 0; + point = range->right_ - range_size_; + while (bsyncing_ && (index < range_size_)) { + syncnode.segment_value.number_ = point + index; + std::string url = syncer->getUrl(syncnode); + if (checker_->check(url)) { + break; + } + index++; + } + range->left_ = point + index; + return true; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception create the range, ex: %s\n", ex.what()); + return false; + } +} +bool OmafDashSourceSyncHelper::updateRange(OmafDashRangeSync::Ptr syncer, std::shared_ptr range) noexcept { + try { + SegmentSyncNode syncnode = syncer->getSegmentNode(); + + // 1. update the left, + int64_t index = 0; + while (bsyncing_ && (index < range_size_)) { + syncnode.segment_value.number_ = range->left_ + index; + std::string url = syncer->getUrl(syncnode); + if (checker_->check(url)) { + break; + } + index++; + } + range->left_ = range->left_ + index; + + // 2. update right + index = 0; + range->right_ = range->left_ + range_size_; + while (bsyncing_ && (index < range_size_)) { + syncnode.segment_value.number_ = range->right_ + index; + std::string url = syncer->getUrl(syncnode); + if (!checker_->check(url)) { + break; + } + index++; + } + + range->right_ = range->right_ + index - 1; + return true; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception create the range, ex: %s\n", ex.what()); + return false; + } +} + +VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafDashRangeSync.h b/src/OmafDashAccess/OmafDashRangeSync.h new file mode 100644 index 00000000..2a8ce323 --- /dev/null +++ b/src/OmafDashAccess/OmafDashRangeSync.h @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ + +//! +//! \file: OmafDashRangeSync.h +//! \brief: +//! \detail: +//! Created on Jun 4, 2020, 3:18 PM +//! + +#ifndef OMAFDASHSOURCESYNC_H +#define OMAFDASHSOURCESYNC_H + +#include "OmafDashParser/Common.h" +#include "general.h" +#include "OmafDashDownload/OmafCurlEasyHandler.h" + +#include +#include +#include +#include + +namespace VCD { +namespace OMAF { + +enum class SegmentTemplateType { SEGTEMPLATE_NUMBER = 0, SEGTEMPLATE_TIME = 1 }; +struct _segmentSyncNode { + SegmentTemplateType type_ = SegmentTemplateType::SEGTEMPLATE_NUMBER; + int32_t bandwith_ = 0; + union _segment_value { + int64_t number_; + int64_t time_; + } segment_value; +}; + +struct _syncRange { + int64_t left_; + int64_t right_; +}; + +using SegmentSyncNode = struct _segmentSyncNode; +using SyncRange = struct _syncRange; +using SegmentSyncNodeCB = std::function; + +class OmafDashRangeSync : public std::enable_shared_from_this { + public: + using Ptr = std::shared_ptr; + + public: + OmafDashRangeSync(){}; + OmafDashRangeSync(OmafDashRangeSync &&) = default; + OmafDashRangeSync(const OmafDashRangeSync &) = default; + OmafDashRangeSync &operator=(OmafDashRangeSync &&) = default; + OmafDashRangeSync &operator=(const OmafDashRangeSync &) = default; + virtual ~OmafDashRangeSync(){}; + + public: + virtual std::string getUrl(const SegmentSyncNode &value) const = 0; + virtual SegmentSyncNode getSegmentNode() = 0; + virtual int64_t getStartSegment() = 0; + virtual void notifyRangeChange(SyncRange range) = 0; +}; + +class OmafAdaptationSet; + +OmafDashRangeSync::Ptr make_omaf_syncer(const OmafAdaptationSet &, SegmentSyncNodeCB); + +class OmafDashSourceSyncHelper : public VCD::NonCopyable { + public: + OmafDashSourceSyncHelper(){}; + + virtual ~OmafDashSourceSyncHelper() { stop(); }; + + private: + enum class Direction { + LEFT = 0, + RIGHT = 1, + }; + + public: + int start(CurlParams params) noexcept; + int stop() noexcept; + + public: + void addSyncer(OmafDashRangeSync::Ptr) noexcept; + void setWindowSize(int s) noexcept { + if (s > 0) range_size_ = s - 1; + } + void setSyncFrequency(int ms) noexcept { sync_frequency_ = ms; }; + + private: + void threadRunner() noexcept; + bool initRange(OmafDashRangeSync::Ptr, std::shared_ptr) noexcept; + bool findRange(OmafDashRangeSync::Ptr, int64_t check_start, Direction direction, int64_t &point) noexcept; + bool findRangeEdge(OmafDashRangeSync::Ptr, int64_t point, std::shared_ptr range) noexcept; + bool updateRange(OmafDashRangeSync::Ptr, std::shared_ptr range) noexcept; + + private: + std::shared_ptr checker_; + int64_t sync_frequency_ = 1000; + int32_t range_size_ = 19; + int32_t check_range_strides_ = 10; + int32_t check_range_times_ = 1000; + // std::mutex syncers_mutex_; + // not use weak_ptr to simple the logic + std::vector syncers_; + std::vector> syncers_range_; + + std::thread sync_worker_; + bool bsyncing_ = false; + + const int64_t NOT_FOUND = -1; + const int64_t MEET_LEFT = -2; + const int64_t EXCEPTION = -3; +}; +} // namespace OMAF +} // namespace VCD + +#endif // OMAFDASHSOURCESYNC_H \ No newline at end of file diff --git a/src/OmafDashAccess/OmafDashSource.cpp b/src/OmafDashAccess/OmafDashSource.cpp index 3496d39c..c865dcd0 100644 --- a/src/OmafDashAccess/OmafDashSource.cpp +++ b/src/OmafDashAccess/OmafDashSource.cpp @@ -27,628 +27,799 @@ */ #include "OmafDashSource.h" +#include +#include #include +#include "OmafExtractorTracksSelector.h" #include "OmafReaderManager.h" -#include -#include +#include "OmafTileTracksSelector.h" +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include +#include "../trace/Bandwidth_tp.h" +#include "../trace/MtHQ_tp.h" +#include "../trace/E2E_latency_tp.h" +#endif +#endif VCD_OMAF_BEGIN -#define MAX_CACHE_SIZE 100*1024*1024 - -OmafDashSource::OmafDashSource() -{ - mMPDParser = NULL; - mStatus = STATUS_CREATED; - mViewPortChanged = false; - pthread_mutex_init(&mMutex, NULL); - memset(&mHeadSetInfo, 0, sizeof(mHeadSetInfo)); - memset(&mPose, 0, sizeof(mPose)); - mLoop = false; - mEOS = false; - mSelector = new OmafExtractorSelector(); - mMPDinfo = nullptr; - dcount = 1; - m_glogWrapper = new GlogWrapper((char*)"glogAccess"); +#define MAX_CACHE_SIZE 100 * 1024 * 1024 + +OmafDashSource::OmafDashSource() { + mMPDParser = nullptr; + mStatus = STATUS_CREATED; + mViewPortChanged = false; + memset(&mHeadSetInfo, 0, sizeof(mHeadSetInfo)); + memset(&mPose, 0, sizeof(mPose)); + mLoop = false; + mEOS = false; + // mSelector = new OmafExtractorSelector(); + m_selector = nullptr; + mMPDinfo = nullptr; + dcount = 1; + mPreExtractorID = 0; + m_stitch = nullptr; + mIsLocalMedia = false; } -OmafDashSource::~OmafDashSource() -{ - pthread_mutex_destroy( &mMutex ); - SAFE_DELETE(mMPDParser); - SAFE_DELETE(mSelector); - SAFE_DELETE(mMPDinfo); - mViewPorts.clear(); - ClearStreams(); - SAFE_DELETE(m_glogWrapper); +OmafDashSource::~OmafDashSource() { + SAFE_DELETE(mMPDParser); + // SAFE_DELETE(mSelector); + SAFE_DELETE(m_selector); + SAFE_DELETE(mMPDinfo); + mViewPorts.clear(); + ClearStreams(); + SAFE_DELETE(m_stitch); } -int OmafDashSource::SyncTime(std::string url) -{ - // base URL should be "http://IP:port/FilePrefix/" - std::size_t posf = url.find(":"); - std::size_t poss = url.find(":", posf + 1); - if(poss == string::npos) - { - LOG(ERROR)<<"Failed to find IP port in baseURL!"<SetMaxCacheSize(MAX_CACHE_SIZE); + + pDM->SetCacheFolder(cacheDir); + + OmafDashSegmentHttpClient::Ptr http_source = + OmafDashSegmentHttpClient::create(omaf_dash_params_.max_parallel_transfers_); + if (http_source) { + http_source->setProxy(omaf_dash_params_.http_proxy_); + http_source->setParams(omaf_dash_params_.http_params_); + if (omaf_dash_params_.stats_params_.enable_) { + http_source->setStatisticsWindows(omaf_dash_params_.stats_params_.window_size_ms_); + } } else { - LOG(INFO)<<"Failed to open the cache path: " << cacheDir <<" , create a folder with this path!"<start(); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to start the client for omaf dash segment http source, err=%d\n", ret); + return ret; } - ///init download manager - SAFE_DELETE(mMPDParser); + dash_client_ = std::move(http_source); + } - DownloadManager* pDM = DOWNLOADMANAGER::GetInstance(); + mMPDParser = new OmafMPDParser(); + if (nullptr == mMPDParser) return ERROR_NULL_PTR; + mMPDParser->SetOmafDashParams(omaf_dash_params_); + mMPDParser->SetExtractorEnabled(enableExtractor); - if (!isLocalMedia) - { - pDM->SetMaxCacheSize(MAX_CACHE_SIZE); + OMAFSTREAMS listStream; + mMPDParser->SetCacheDir(cacheDir); + ret = mMPDParser->ParseMPD(url, listStream); - pDM->SetCacheFolder(cacheDir); - } - - mMPDParser = new OmafMPDParser( ); - - if( NULL == mMPDParser ) return ERROR_NULL_PTR; - - OMAFSTREAMS listStream; - int ret = mMPDParser->ParseMPD(url, listStream); - if(ret != ERROR_NONE) return ret; - - mMPDinfo = this->GetMPDInfo(); + if (ret == OMAF_INVALID_EXTRACTOR_ENABLEMENT) { + enableExtractor = false; //! enableExtractor; + mMPDParser->SetExtractorEnabled(enableExtractor); + } else if (ret != ERROR_NONE) + return ret; - if (!isLocalMedia) + mMPDinfo = this->GetMPDInfo(); + + ProjectionFormat projFmt = mMPDParser->GetProjectionFmt(); + std::string projStr; + if (projFmt == ProjectionFormat::PF_ERP) { + projStr = "ERP"; + } else if (projFmt == ProjectionFormat::PF_CUBEMAP) { + projStr = "CubeMap"; + } else if (projFmt == ProjectionFormat::PF_PLANAR) { + projStr = "Planar"; + } else { + OMAF_LOG(LOG_ERROR, "Invalid projection format !\n"); + return OMAF_ERROR_INVALID_PROJECTIONTYPE; + } + OMAF_LOG(LOG_INFO, "The DASH Source is from %s projection !\n", projStr.c_str()); + if (!mIsLocalMedia) { + // base URL should be "http://IP:port/FilePrefix/" + std::size_t pos = mMPDinfo->baseURL[0].find(":"); + pos = mMPDinfo->baseURL[0].find(":", pos + 1); + if (pos == string::npos) { + OMAF_LOG(LOG_ERROR, "Failed to find IP port in baseURL!\n"); + return ERROR_INVALID; + } + pos = mMPDinfo->baseURL[0].find("/", pos + 1); + if (pos == string::npos) { + OMAF_LOG(LOG_ERROR, "Failed to find file prefix in baseURL!\n"); + return ERROR_INVALID; + } + + std::string prefix = mMPDinfo->baseURL[0].substr(pos + 1, mMPDinfo->baseURL[0].length() - (pos + 1)); + pDM->SetFilePrefix(prefix); + pDM->SetUseCache((cacheDir == "") ? false : true); + + // sync local time according to the remote mechine for live mode + if (mMPDinfo->type == TYPE_LIVE) { + SyncTime(mMPDinfo->baseURL[0]); + } + } + + // create the reader manager + { + OmafReaderManager::OmafReaderParams params; + if (mMPDinfo->type == TYPE_STATIC) { + params.stream_type_ = DASH_STREAM_STATIC; + params.duration_ = mMPDinfo->media_presentation_duration; + } else { + params.stream_type_ = DASH_STREAM_DYNMIC; + } + + if (enableExtractor) { + params.mode_ = OmafDashMode::EXTRACTOR; + } else { + params.mode_ = OmafDashMode::LATER_BINDING; + } + params.proj_fmt_ = projFmt; + params.segment_timeout_ms_ = mMPDinfo->max_segment_duration; + + OMAF_LOG(LOG_INFO, "media stream type=%s\n", mMPDinfo->type.c_str()); + OMAF_LOG(LOG_INFO, "media stream duration=%lld\n", mMPDinfo->media_presentation_duration); + OMAF_LOG(LOG_INFO, "media stream extractor=%d\n", enableExtractor); + + OmafReaderManager::Ptr omaf_reader_mgr = std::make_shared(dash_client_, params); + ret = omaf_reader_mgr->Initialize(this); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to start the omaf reader manager, err=%d\n", ret); + return ret; + } + omaf_reader_mgr_ = std::move(omaf_reader_mgr); + } + + int id = 0; + for (auto& stream : listStream) { + this->mMapStream[id] = stream; + stream->SetStreamID(id); + // stream->SetEnabledExtractor(enableExtractor); + stream->SetOmafReaderMgr(omaf_reader_mgr_); + if (!enableExtractor && (stream->GetStreamMediaType() == MediaType_Video)) { - // base URL should be "http://IP:port/FilePrefix/" - std::size_t pos = mMPDinfo->baseURL[0].find(":"); - pos = mMPDinfo->baseURL[0].find(":", pos + 1); - if(pos == string::npos) - { - LOG(ERROR)<<"Failed to find IP port in baseURL!"<baseURL[0].find("/", pos + 1); - if(pos == string::npos) - { - LOG(ERROR)<<"Failed to find file prefix in baseURL!"<baseURL[0].substr(pos+1, mMPDinfo->baseURL[0].length() - (pos+1)); - pDM->SetFilePrefix(prefix); - pDM->SetUseCache( (cacheDir == "")?false:true ); - - // sync local time according to the remote mechine for live mode - if(mMPDinfo->type == TYPE_LIVE) - SyncTime(mMPDinfo->baseURL[0]); - } - - int id = 0; - for(auto it=listStream.begin(); it!=listStream.end(); it++){ - this->mMapStream[id] = (OmafMediaStream*)(*it); - (*it)->SetStreamID(id); - id++; - } - - if(enablePredictor) mSelector->EnablePosePrediction(); - // Setup initial Viewport and select Adaption Set - auto it = mMapStream.begin(); - if (it == mMapStream.end()) + stream->SetMaxStitchResolution(omaf_dash_params_.max_decode_width_, omaf_dash_params_.max_decode_height_); + } + id++; + } + + if (enableExtractor) { + m_selector = new OmafExtractorTracksSelector(); + if (!m_selector) { + OMAF_LOG(LOG_ERROR, "Failed to create extractor tracks selector !\n"); + return ERROR_NULL_PTR; + } + } else { + m_selector = new OmafTileTracksSelector(); + if (!m_selector) { + OMAF_LOG(LOG_ERROR, "Failed to create tile tracks selector !\n"); + return ERROR_NULL_PTR; + } + } + + m_selector->SetProjectionFmt(projFmt); + if (projFmt == ProjectionFormat::PF_PLANAR) + { + m_selector->SetTwoDQualityInfos(mMPDParser->GetTwoDQualityInfos()); + } + if (enablePredictor) m_selector->EnablePosePrediction(predictPluginName, libPath, enableExtractor); + m_selector->SetSegmentDuration(mMPDinfo->max_segment_duration); + m_selector->SetI360SCVPPlugin(i360scvp_plugin); + + for (auto it = mMapStream.begin(); it != mMapStream.end(); it++) + { + if ((it->second)->GetStreamMediaType() == MediaType_Video) { - return ERROR_INVALID; + ret = m_selector->SetInitialViewport(mViewPorts, &mHeadSetInfo, (it->second)); + if (ret != ERROR_NONE) return ret; } - ret = mSelector->SetInitialViewport(mViewPorts, &mHeadSetInfo, (it->second)); - if(ret != ERROR_NONE) return ret; - - // set status - this->SetStatus( STATUS_READY ); + } - READERMANAGER::GetInstance()->Initialize(this); + // set status + this->SetStatus(STATUS_READY); - ///if MPD is static one, don't create thread to download - if (!isLocalMedia) - { - StartThread(); - } - - return ERROR_NONE; + return ERROR_NONE; } -int OmafDashSource::GetTrackCount() +int OmafDashSource::StartStreaming() { - int cnt = 0; - std::map::iterator it; - for(it=this->mMapStream.begin(); it!=this->mMapStream.end(); it++){ - OmafMediaStream* pStream = (OmafMediaStream*)it->second; - cnt += pStream->GetTrackCount(); - } + if (!mIsLocalMedia) { + StartThread(); + } + return ERROR_NONE; +} - return cnt; +int OmafDashSource::GetTrackCount() { + int cnt = 0; + std::map::iterator it; + for (it = this->mMapStream.begin(); it != this->mMapStream.end(); it++) { + OmafMediaStream* pStream = (OmafMediaStream*)it->second; + cnt += pStream->GetTrackCount(); + } + OMAF_LOG(LOG_INFO, "All tracks cnt %d\n", cnt); + return cnt; } -void OmafDashSource::StopThread() -{ - this->SetStatus(STATUS_EXITING); - this->Join(); +void OmafDashSource::StopThread() { + this->SetStatus(STATUS_EXITING); + this->Join(); } -int OmafDashSource::CloseMedia() -{ - if( STATUS_STOPPED != this->GetStatus() ) - this->StopThread(); +int OmafDashSource::CloseMedia() { + if (STATUS_STOPPED != this->GetStatus()) this->StopThread(); - READERMANAGER::GetInstance()->Close(); + // READERMANAGER::GetInstance()->Close(); + if (omaf_reader_mgr_ != nullptr) { + omaf_reader_mgr_->Close(); + } - return ERROR_NONE; + for (auto it : mMapStream) { + OmafMediaStream* stream = it.second; + stream->Close(); + } + + return ERROR_NONE; } -int OmafDashSource::GetPacket( - int streamID, - std::list* pkts, - bool needParams, - bool clearBuf ) -{ - OmafMediaStream* pStream = this->GetStream(streamID); - - MediaPacket* pkt = NULL; - - if(pStream->HasExtractor()){ - std::list extractors = pStream->GetEnabledExtractor(); - for(auto it=extractors.begin(); it!=extractors.end(); it++){ - OmafExtractor* pExt = (OmafExtractor*)(*it); - int ret = READERMANAGER::GetInstance()->GetNextFrame(pExt->GetTrackNumber(), pkt, needParams); - if(ret == ERROR_NONE) - { - pkts->push_back(pkt); - } +int OmafDashSource::GetPacket(int streamID, std::list* pkts, bool needParams, bool clearBuf) { + OmafMediaStream* pStream = this->GetStream(streamID); + + MediaPacket* pkt = nullptr; + + int currentExtractorID = 0; + + if (pStream->HasExtractor()) { + std::list extractors = pStream->GetEnabledExtractor(); + int enabledSize = pStream->GetExtractorSize(); + int totalSize = pStream->GetTotalExtractorSize(); + for (auto it = extractors.begin(); it != extractors.end(); it++) { + OmafExtractor* pExt = (OmafExtractor*)(*it); + int trackNumber = pExt->GetTrackNumber(); + if (enabledSize < totalSize) // normal track + { + size_t remainSize = 0; + currentExtractorID = pExt->GetTrackNumber(); + omaf_reader_mgr_->GetPacketQueueSize(mPreExtractorID, remainSize); + if (mPreExtractorID != currentExtractorID) { + if (remainSize > 0) // if there exit remaining data in previous Extractor, then need to pop up them all. + { + trackNumber = mPreExtractorID; + OMAF_LOG(LOG_INFO, "Remaining data in previous track id have to be got! remainSize is %lld\n", remainSize); + } else // if there is no data in previous track, then fetch data in current track. + { + mPreExtractorID = currentExtractorID; + } } - }else{ - std::map mapAS = pStream->GetMediaAdaptationSet(); - for(auto as_it=mapAS.begin(); as_it!=mapAS.end(); as_it++){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it->second); - int ret = READERMANAGER::GetInstance()->GetNextFrame(pAS->GetTrackNumber(), pkt, needParams); - if(ret == ERROR_NONE) - pkts->push_back(pkt); - } - } - - return ERROR_NONE; + } + int ret = omaf_reader_mgr_->GetNextPacket(trackNumber, pkt, needParams); + if (ret == ERROR_NONE) { + pkts->push_back(pkt); + } + } + } else { + std::map mapAS = pStream->GetMediaAdaptationSet(); + // std::map mapSelectedAS = pStream->GetSelectedTileTracks(); + if (mapAS.size() == 1) { + //OMAF_LOG(LOG_INFO, "There is only one tile for the video stream !\n"); + if (pStream->GetStreamMediaType() == MediaType_Audio) + { + OMAF_LOG(LOG_INFO, "Get one packet for audio !\n"); + for (auto as_it = mapAS.begin(); as_it != mapAS.end(); as_it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it->second); + int ret = omaf_reader_mgr_->GetNextPacket(pAS->GetTrackNumber(), pkt, true); + if (ret == ERROR_NONE) pkts->push_back(pkt); + } + } + } else { + std::list mergedPackets; + pStream->SetNeedVideoParams(needParams); + mergedPackets = pStream->GetOutTilesMergedPackets(); + // OMAF_LOG(LOG_INFO, " merged packets has the size of %lld\n", mergedPackets.size()); + std::list::iterator itPacket; + for (itPacket = mergedPackets.begin(); itPacket != mergedPackets.end(); itPacket++) { + MediaPacket* onePacket = *itPacket; + pkts->push_back(onePacket); + } + mergedPackets.clear(); + } + } + + return ERROR_NONE; } -int OmafDashSource::GetStatistic(DashStatisticInfo* dsInfo) -{ - DownloadManager* pDM = DOWNLOADMANAGER::GetInstance(); - dsInfo->avg_bandwidth = pDM->GetAverageBitrate(); - dsInfo->immediate_bandwidth = pDM->GetImmediateBitrate(); - return ERROR_NONE; +int OmafDashSource::GetStatistic(DashStatisticInfo* dsInfo) { +#if 0 + DownloadManager *pDM = DOWNLOADMANAGER::GetInstance(); + dsInfo->avg_bandwidth = pDM->GetAverageBitrate(); + dsInfo->immediate_bandwidth = pDM->GetImmediateBitrate(); +#else + if (dsInfo && dash_client_) { + std::unique_ptr perf_stats = dash_client_->statistics(); + if (perf_stats) { + dsInfo->avg_bandwidth = static_cast(perf_stats->download_speed_bps_); + } + } + +#endif + return ERROR_NONE; } -int OmafDashSource::SetupHeadSetInfo(HeadSetInfo* clientInfo) -{ - memcpy(&mHeadSetInfo, clientInfo, sizeof(HeadSetInfo)); - return ERROR_NONE; +int OmafDashSource::SetupHeadSetInfo(HeadSetInfo* clientInfo) { + memcpy_s(&mHeadSetInfo, sizeof(HeadSetInfo), clientInfo, sizeof(HeadSetInfo)); + return ERROR_NONE; } -int OmafDashSource::ChangeViewport(HeadPose* pose) -{ - int ret = mSelector->UpdateViewport( pose ); +int OmafDashSource::ChangeViewport(HeadPose* pose) { + int ret = m_selector->UpdateViewport(pose); - return ret; + return ret; } -int OmafDashSource::GetMediaInfo( DashMediaInfo* media_info ) -{ - MPDInfo *mInfo = this->GetMPDInfo(); - if(!mInfo) return ERROR_NULL_PTR; - - uint32_t pointerLen = sizeof(char*); - - media_info->duration = mInfo->media_presentation_duration; - media_info->stream_count = this->GetStreamCount(); - if(mInfo->type == TYPE_STATIC){ - media_info->streaming_type = 1; - }else{ - media_info->streaming_type = 2; - } - - for(int i=0; istream_count; i++){ - DashStreamInfo* pStreamInfo = this->mMapStream[i]->GetStreamInfo(); - media_info->stream_info[i].bit_rate = pStreamInfo->bit_rate; - media_info->stream_info[i].height = pStreamInfo->height; - media_info->stream_info[i].width = pStreamInfo->width; - media_info->stream_info[i].stream_type = pStreamInfo->stream_type; - media_info->stream_info[i].framerate_den = pStreamInfo->framerate_den; - media_info->stream_info[i].framerate_num = pStreamInfo->framerate_num; - media_info->stream_info[i].channel_bytes = pStreamInfo->channel_bytes; - media_info->stream_info[i].channels = pStreamInfo->channels; - media_info->stream_info[i].sample_rate = pStreamInfo->sample_rate; - media_info->stream_info[i].mProjFormat = pStreamInfo->mProjFormat; - media_info->stream_info[i].codec = new char; - media_info->stream_info[i].mime_type = new char; - media_info->stream_info[i].source_number = pStreamInfo->source_number; - media_info->stream_info[i].source_resolution = pStreamInfo->source_resolution; - memcpy( const_cast(media_info->stream_info[i].codec), - pStreamInfo->codec, - //sizeof(pStreamInfo->codec)); - pointerLen); - memcpy( const_cast(media_info->stream_info[i].mime_type), - pStreamInfo->mime_type, - //sizeof(pStreamInfo->mime_type)); - pointerLen); - } - - return ERROR_NONE; +int OmafDashSource::GetMediaInfo(DashMediaInfo* media_info) { + MPDInfo* mInfo = this->GetMPDInfo(); + if (!mInfo) return ERROR_NULL_PTR; + + media_info->duration = mInfo->media_presentation_duration; + media_info->stream_count = this->GetStreamCount(); + if (mInfo->type == TYPE_STATIC) { + media_info->streaming_type = DASH_STREAM_STATIC; + } else { + media_info->streaming_type = DASH_STREAM_DYNMIC; + } + + for (int i = 0; i < media_info->stream_count; i++) { + DashStreamInfo* pStreamInfo = this->mMapStream[i]->GetStreamInfo(); + media_info->stream_info[i].bit_rate = pStreamInfo->bit_rate; + media_info->stream_info[i].height = pStreamInfo->height; + media_info->stream_info[i].width = pStreamInfo->width; + media_info->stream_info[i].stream_type = pStreamInfo->stream_type; + media_info->stream_info[i].framerate_den = pStreamInfo->framerate_den; + media_info->stream_info[i].framerate_num = pStreamInfo->framerate_num; + media_info->stream_info[i].channel_bytes = pStreamInfo->channel_bytes; + media_info->stream_info[i].channels = pStreamInfo->channels; + media_info->stream_info[i].sample_rate = pStreamInfo->sample_rate; + media_info->stream_info[i].mProjFormat = pStreamInfo->mProjFormat; + media_info->stream_info[i].codec = new char[1024]; + media_info->stream_info[i].mime_type = new char[1024]; + media_info->stream_info[i].source_number = pStreamInfo->source_number; + media_info->stream_info[i].source_resolution = pStreamInfo->source_resolution; + media_info->stream_info[i].segmentDuration = pStreamInfo->segmentDuration; + media_info->stream_info[i].tileRowNum = pStreamInfo->tileRowNum; + media_info->stream_info[i].tileColNum = pStreamInfo->tileColNum; + memcpy_s(const_cast(media_info->stream_info[i].codec), 1024, pStreamInfo->codec, 1024); + memcpy_s(const_cast(media_info->stream_info[i].mime_type), 1024, pStreamInfo->mime_type, 1024); + DELETE_ARRAY(pStreamInfo->codec); + DELETE_ARRAY(pStreamInfo->mime_type); + } + + return ERROR_NONE; } -void OmafDashSource::Run() -{ - if(mMPDinfo->type == TYPE_LIVE){ - thread_dynamic(); - } - thread_static(); +void OmafDashSource::Run() { + if (mMPDinfo->type == TYPE_LIVE) { + thread_dynamic(); + return; + } + thread_static(); } -int OmafDashSource::TimedDownloadSegment( bool bFirst ) -{ - std::map::iterator it; - for(it=this->mMapStream.begin(); it!=this->mMapStream.end(); it++){ - OmafMediaStream* pStream = it->second; - if(bFirst){ - if(mMPDinfo->type == TYPE_LIVE) - pStream->UpdateStartNumber(mMPDinfo->availabilityStartTime); +int OmafDashSource::TimedDownloadSegment(bool bFirst) { +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + struct timeval currTime; + gettimeofday(&currTime, nullptr); + uint64_t timeUs = currTime.tv_sec * 1000000 + currTime.tv_usec; + tracepoint(bandwidth_tp_provider, download_info, timeUs, dcount); + tracepoint(mthq_tp_provider, T3_start_download_time, dcount); +#endif +#endif + + std::map::iterator it; + for (it = this->mMapStream.begin(); it != this->mMapStream.end(); it++) { + OmafMediaStream* pStream = it->second; + if (bFirst) { + if (mMPDinfo->type == TYPE_LIVE) { + pStream->UpdateStartNumber(mMPDinfo->availabilityStartTime); + if (omaf_dash_params_.syncer_params_.enable_) { + pStream->SetupSegmentSyncer(omaf_dash_params_); } - pStream->DownloadSegments(); - } - - LOG(INFO)<<"now download number"<DownloadSegments(); + } +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + string tag = "sgmtIdx:" + to_string(dcount); + tracepoint(E2E_latency_tp_provider, + pre_da_info, + -1, + tag.c_str()); +#endif +#endif + OMAF_LOG(LOG_INFO, "Start to download segments and id is %d\n", dcount++); + +#if 0 + std::unique_ptr perf = dash_client_->statistics(); + if (perf) { + OMAF_LOG(LOG_INFO, perf->to_string()); + } +#endif + return ERROR_NONE; } -int OmafDashSource::StartReadThread() -{ - int ret = TimedSelectSegements( ); - if(ERROR_NONE != ret) return ret; - - uint32_t cnt = 0; - while(cnt < mMapStream.size()) - { - cnt = 0; - for(auto it : mMapStream) - { - int enableSize = it.second->GetExtractorSize(); - int totalSize = it.second->GetTotalExtractorSize(); - if( enableSize < totalSize) - cnt++; - } - } - READERMANAGER::GetInstance()->StartThread(); - - return ERROR_NONE; +int OmafDashSource::StartReadThread() { + int ret = TimedSelectSegements(); + if (ERROR_NONE != ret) return ret; + + uint32_t cnt = 0; + while (cnt < mMapStream.size()) { + cnt = 0; + for (auto it : mMapStream) { + if ((it.second)->IsExtractorEnabled()) { + int enableSize = it.second->GetExtractorSize(); + int totalSize = it.second->GetTotalExtractorSize(); + if (enableSize < totalSize) cnt++; + } else { + cnt++; + } + } + } + // READERMANAGER::GetInstance()->StartThread(); + // omaf_reader_mgr_->StartThread(); + + return ERROR_NONE; } -int OmafDashSource::SelectSpecialSegments(int extractorTrackIdx) -{ - int ret = ERROR_NONE; +int OmafDashSource::SelectSpecialSegments(int extractorTrackIdx) { + int ret = ERROR_NONE; - std::map::iterator it; - for(it=this->mMapStream.begin(); it!=this->mMapStream.end(); it++){ - OmafMediaStream* pStream = it->second; + std::map::iterator it; + for (it = this->mMapStream.begin(); it != this->mMapStream.end(); it++) { + OmafMediaStream* pStream = it->second; - pStream->ClearEnabledExtractors(); - OmafExtractor *specialExtractor = pStream->AddEnabledExtractor(extractorTrackIdx); - if (!specialExtractor) - return OMAF_ERROR_INVALID_DATA; - } - return ret; + pStream->ClearEnabledExtractors(); + OmafExtractor* specialExtractor = pStream->AddEnabledExtractor(extractorTrackIdx); + if (!specialExtractor) return OMAF_ERROR_INVALID_DATA; + } + return ret; } -int OmafDashSource::TimedSelectSegements( ) -{ - int ret = ERROR_NONE; - if(NULL == mSelector) return ERROR_NULL_PTR; - - std::map::iterator it; - for(it=this->mMapStream.begin(); it!=this->mMapStream.end(); it++){ - OmafMediaStream* pStream = it->second; - ret = mSelector->SelectExtractors(pStream); - if(ERROR_NONE != ret) break; - } - return ret; +int OmafDashSource::TimedSelectSegements() { + int ret = ERROR_NONE; + if (nullptr == m_selector) return ERROR_NULL_PTR; + + std::map::iterator it; + for (it = this->mMapStream.begin(); it != this->mMapStream.end(); it++) { + OmafMediaStream* pStream = it->second; + pStream->SetSegmentNumber(dcount); + ret = m_selector->SelectTracks(pStream); + if (ERROR_NONE != ret) break; + } + return ret; } -void OmafDashSource::ClearStreams() -{ - std::map::iterator it; - for(it=this->mMapStream.begin(); it!=this->mMapStream.end(); it++){ - OmafMediaStream* pStream = (OmafMediaStream*)it->second; - delete pStream; - } - mMapStream.clear(); +void OmafDashSource::ClearStreams() { + std::map::iterator it; + for (it = this->mMapStream.begin(); it != this->mMapStream.end(); it++) { + OmafMediaStream* pStream = (OmafMediaStream*)it->second; + delete pStream; + } + mMapStream.clear(); } -void OmafDashSource::SeekToSeg(int seg_num) -{ - if(mMPDinfo->type != TYPE_STATIC) return; - int nStream = GetStreamCount(); - for (int i=0; itype != TYPE_STATIC) return; + int nStream = GetStreamCount(); + for (int i = 0; i < nStream; i++) { + OmafMediaStream* pStream = GetStream(i); - pStream->SeekTo(seg_num); - } - return ; + pStream->SeekTo(seg_num); + } + return; } -int OmafDashSource::SetEOS(bool eos) -{ - std::map::iterator it; - for(it = mMapStream.begin(); it != mMapStream.end(); it++) - { - OmafMediaStream *pStream = (OmafMediaStream *)it->second; - pStream->SetEOS(eos); - } +int OmafDashSource::SetEOS(bool eos) { + std::map::iterator it; + for (it = mMapStream.begin(); it != mMapStream.end(); it++) { + OmafMediaStream* pStream = (OmafMediaStream*)it->second; + pStream->SetEOS(eos); + } - return ERROR_NONE; + return ERROR_NONE; } -int OmafDashSource::DownloadInitSeg() -{ - int nStream = GetStreamCount(); - - if( 0 == nStream ){ - return ERROR_NO_STREAM; - } - - /// download initial mp4 for each stream - for (int i=0; i listExtarctors; - std::map mapExtractors = pStream->GetExtractors(); - for(auto &it:mapExtractors) - { - listExtarctors.push_back(it.second); - } - - int ret = pStream->UpdateEnabledExtractors(listExtarctors); - if( ERROR_NONE != ret ) - { - return ERROR_INVALID; - } - - ret = pStream->DownloadInitSegment(); +int OmafDashSource::DownloadInitSeg() { + int nStream = GetStreamCount(); + + if (0 == nStream) { + return ERROR_NO_STREAM; + } + + int ret = ERROR_NONE; + /// download initial mp4 for each stream + for (int i = 0; i < nStream; i++) { + OmafMediaStream* pStream = GetStream(i); + bool isExtractorEnabled = pStream->IsExtractorEnabled(); + if (isExtractorEnabled) { + std::list listExtarctors; + std::map mapExtractors = pStream->GetExtractors(); + for (auto& it : mapExtractors) { + listExtarctors.push_back(it.second); + } + + ret = pStream->UpdateEnabledExtractors(listExtarctors); + if (ERROR_NONE != ret) { + return ERROR_INVALID; + } } + ret = pStream->DownloadInitSegment(); + } - return ERROR_NONE; -} - -uint64_t OmafDashSource::GetSegmentDuration(int stream_id) -{ - return mMapStream[stream_id]->GetSegmentDuration(); + return ERROR_NONE; } -void OmafDashSource::thread_dynamic() -{ - int ret = ERROR_NONE; - bool go_on = true; +uint64_t OmafDashSource::GetSegmentDuration(int stream_id) { return mMapStream[stream_id]->GetSegmentDuration(); } - if ( STATUS_READY != GetStatus() ) { - return ; - } +void OmafDashSource::thread_dynamic() { + int ret = ERROR_NONE; + bool go_on = true; - SetStatus( STATUS_RUNNING ); + if (STATUS_READY != GetStatus()) { + return; + } - /// download initial mp4 for each stream - if( ERROR_NONE != DownloadInitSeg() ){ - SetStatus( STATUS_STOPPED ); - return ; - } + SetStatus(STATUS_RUNNING); - bool isInitSegParsed = READERMANAGER::GetInstance()->isAllInitSegParsed(); - while (!isInitSegParsed) - { - ::usleep(1000); - isInitSegParsed = READERMANAGER::GetInstance()->isAllInitSegParsed(); - } - - while((ERROR_NONE != StartReadThread())) + /// download initial mp4 for each stream + if (ERROR_NONE != DownloadInitSeg()) { + SetStatus(STATUS_STOPPED); + return; + } + uint32_t wait_time = 3000; + uint32_t current_wait_time = 0; + bool isInitSegParsed = omaf_reader_mgr_->IsInitSegmentsParsed(); + while (!isInitSegParsed) { + ::usleep(1000); + current_wait_time++; + if (current_wait_time > wait_time) { - ::usleep(1000); + SetStatus(STATUS_STOPPED); + OMAF_LOG(LOG_ERROR, " Time out for waiting init segment parse!\n"); + return; } + isInitSegParsed = omaf_reader_mgr_->IsInitSegmentsParsed(); + } - uint32_t uLastUpdateTime = sys_clock(); - uint32_t uLastSegTime = 0; - bool bFirst = false; - /// main loop: update mpd; download segment according to timeline - while (go_on) { - - if(STATUS_EXITING == GetStatus()){ - break; - } + while ((ERROR_NONE != StartReadThread())) { + ::usleep(1000); + } - // Update viewport and select Adaption Set according to pose change - ret = TimedSelectSegements( ); + uint32_t uLastUpdateTime = sys_clock(); + uint32_t uLastSegTime = 0; + bool bFirst = false; + /// main loop: update mpd; download segment according to timeline + while (go_on) { + if (STATUS_EXITING == GetStatus()) { + break; + } - if(ERROR_NONE != ret) continue; + // Update viewport and select Adaption Set according to pose change + ret = TimedSelectSegements(); - uint32_t timer = sys_clock() - uLastUpdateTime; + if (ERROR_NONE != ret) continue; - if(mMPDinfo->minimum_update_period && (timer > mMPDinfo->minimum_update_period)){ - TimedUpdateMPD(); - uLastUpdateTime = sys_clock(); - } + uint32_t timer = sys_clock() - uLastUpdateTime; - if( 0 == uLastSegTime ){ - uLastSegTime = sys_clock(); - bFirst = true; - }else{ - bFirst = false; - } + if (mMPDinfo->minimum_update_period && (timer > mMPDinfo->minimum_update_period)) { + TimedUpdateMPD(); + uLastUpdateTime = sys_clock(); + } - TimedDownloadSegment(bFirst); + if (0 == uLastSegTime) { + uLastSegTime = sys_clock(); + bFirst = true; + } else { + bFirst = false; + } - uint32_t interval = sys_clock() - uLastSegTime; + TimedDownloadSegment(bFirst); - /// 1/2 segment duration ahead of time to fetch segment. - //uint32_t wait_time = (info.max_segment_duration * 3) / 4 - interval; - uint32_t wait_time = mMPDinfo->max_segment_duration > interval ? mMPDinfo->max_segment_duration - interval : 0; + uint32_t interval = sys_clock() - uLastSegTime; - ::usleep(wait_time*1000); + /// 1/2 segment duration ahead of time to fetch segment. + // uint32_t wait_time = (info.max_segment_duration * 3) / 4 - interval; + uint32_t wait_time = mMPDinfo->max_segment_duration > interval ? mMPDinfo->max_segment_duration - interval : 0; - uLastSegTime = sys_clock(); + ::usleep(wait_time * 1000); - } + uLastSegTime = sys_clock(); + } - SetStatus(STATUS_STOPPED); + SetStatus(STATUS_STOPPED); - return ; + return; } -void OmafDashSource::thread_static() -{ - int ret = ERROR_NONE; - bool go_on = true; +void OmafDashSource::thread_static() { + int ret = ERROR_NONE; + bool go_on = true; - if ( STATUS_READY != GetStatus() ) { - return ; - } + if (STATUS_READY != GetStatus()) { + return; + } - SetStatus( STATUS_RUNNING ); + SetStatus(STATUS_RUNNING); - /// download initial mp4 for each stream - if( ERROR_NONE != DownloadInitSeg() ){ - SetStatus( STATUS_STOPPED ); - return ; - } - - bool isInitSegParsed = READERMANAGER::GetInstance()->isAllInitSegParsed(); - while (!isInitSegParsed) - { - ::usleep(1000); - isInitSegParsed = READERMANAGER::GetInstance()->isAllInitSegParsed(); - } - - while((ERROR_NONE != StartReadThread())) + /// download initial mp4 for each stream + if (ERROR_NONE != DownloadInitSeg()) { + SetStatus(STATUS_STOPPED); + return; + } + uint32_t wait_time = 3000; + uint32_t current_wait_time = 0; + bool isInitSegParsed = omaf_reader_mgr_->IsInitSegmentsParsed(); + while (!isInitSegParsed) { + ::usleep(1000); + current_wait_time++; + if (current_wait_time > wait_time) { - ::usleep(1000); + SetStatus(STATUS_STOPPED); + OMAF_LOG(LOG_ERROR, " Time out for waiting init segment parse!\n"); + return; } + isInitSegParsed = omaf_reader_mgr_->IsInitSegmentsParsed(); + } - // -0.1 for framerate.den is 1001 - if (GetSegmentDuration(0) == 0) - { - return; - } + while ((ERROR_NONE != StartReadThread())) { + ::usleep(1000); + } - double segmentDuration = (double)GetSegmentDuration(0); - int total_seg = segmentDuration > 0 ? (ceil((double)mMPDinfo->media_presentation_duration / segmentDuration / 1000)) : 0; + // -0.1 for framerate.den is 1001 + if (GetSegmentDuration(0) == 0) { + return; + } - int seg_count = 0; + double segmentDuration = (double)GetSegmentDuration(0); + int total_seg = + segmentDuration > 0 ? (ceil((double)mMPDinfo->media_presentation_duration / segmentDuration / 1000)) : 0; - uint32_t uLastSegTime = 0; - bool bFirst = false; - /// main loop: update mpd; download segment according to timeline - while (go_on) { + int seg_count = 0; - if(STATUS_EXITING == GetStatus()){ - break; - } - - // Update viewport and select Adaption Set according to pose change - ret = TimedSelectSegements( ); + uint32_t uLastSegTime = 0; + bool bFirst = false; + /// main loop: update mpd; download segment according to timeline + while (go_on) { + if (STATUS_EXITING == GetStatus()) { + break; + } - if(ERROR_NONE != ret) continue; + // Update viewport and select Adaption Set according to pose change + ret = TimedSelectSegements(); - if( 0 == uLastSegTime ){ - uLastSegTime = sys_clock(); - bFirst = true; - }else{ - bFirst = false; - } + if (ERROR_NONE != ret) continue; - TimedDownloadSegment(bFirst); + if (0 == uLastSegTime) { + uLastSegTime = sys_clock(); + bFirst = true; + } else { + bFirst = false; + } - uint32_t interval = sys_clock() - uLastSegTime; + TimedDownloadSegment(bFirst); - /// one segment duration ahead of time to fetch segment. - uint32_t wait_time = (mMPDinfo->max_segment_duration / 2 > interval) ? (mMPDinfo->max_segment_duration - interval) : 0; + uint32_t interval = sys_clock() - uLastSegTime; - ::usleep(wait_time*1000); + /// one segment duration ahead of time to fetch segment. + // uint32_t wait_time = (mMPDinfo->max_segment_duration / 2 > interval) ? (mMPDinfo->max_segment_duration - + // interval) : 0; + uint32_t wait_time = mMPDinfo->max_segment_duration > interval ? mMPDinfo->max_segment_duration - interval : 0; - uLastSegTime = sys_clock(); + ::usleep(wait_time * 1000); - seg_count++; + uLastSegTime = sys_clock(); - if( seg_count >= total_seg ){ - seg_count = 0; - if(mLoop){ - SeekToSeg(1); - } - else{ - mEOS = true; - SetEOS(true); - break; - } - } + seg_count++; + if (seg_count >= total_seg) { + seg_count = 0; + if (mLoop) { + SeekToSeg(1); + } else { + mEOS = true; + SetEOS(true); + break; + } } + } - SetStatus(STATUS_STOPPED); + SetStatus(STATUS_STOPPED); - return ; + return; } -int OmafDashSource::TimedUpdateMPD() -{ - return ERROR_NONE; -} +int OmafDashSource::TimedUpdateMPD() { return ERROR_NONE; } VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafDashSource.h b/src/OmafDashAccess/OmafDashSource.h index de097bb3..9c865899 100644 --- a/src/OmafDashAccess/OmafDashSource.h +++ b/src/OmafDashAccess/OmafDashSource.h @@ -26,7 +26,6 @@ * */ - //! //! \file: OmafDashSource.h //! \brief: @@ -43,146 +42,144 @@ #include "MediaPacket.h" #include "OmafMPDParser.h" #include "DownloadManager.h" -#include "OmafExtractorSelector.h" - +#include "OmafTracksSelector.h" +#include "OmafTilesStitch.h" +#include using namespace VCD::OMAF; VCD_OMAF_BEGIN -typedef enum{ - STATUS_CREATED=0, - STATUS_READY, - STATUS_RUNNING, - STATUS_EXITING, - STATUS_STOPPED, - STATUS_UNKNOWN, -}DASH_STATUS; - typedef std::list ListStream; class OmafDashSource : public OmafMediaSource, Threadable { -public: - //! - //! \brief construct - //! - OmafDashSource(); - - //! - //! \brief de-construct - //! - virtual ~OmafDashSource(); - -public: - //! - //! \brief Interface implementation from base class: OmafMediaSource - //! - virtual int OpenMedia(std::string url, std::string cacheDir = "", bool enablePredictor=false); - virtual int CloseMedia(); - virtual int GetPacket( int streamID, std::list* pkts, bool needParams, bool clearBuf ); - virtual int GetStatistic(DashStatisticInfo* dsInfo); - virtual int SetupHeadSetInfo(HeadSetInfo* clientInfo); - virtual int ChangeViewport(HeadPose* pose); - virtual int GetMediaInfo( DashMediaInfo* media_info ); - virtual int GetTrackCount(); - virtual int SelectSpecialSegments(int extractorTrackIdx); - //! - //! \brief Interface implementation from base class: Threadable - //! - virtual void Run(); + public: + //! + //! \brief construct + //! + OmafDashSource(); + + //! + //! \brief de-construct + //! + virtual ~OmafDashSource(); + + public: + //! + //! \brief Interface implementation from base class: OmafMediaSource + //! + virtual int OpenMedia(std::string url, std::string cacheDir, void* externalLog, PluginDef i360scvp_plugin, bool enableExtractor = true, + bool enablePredictor = false, std::string predictPluginName = "", std::string libPath = ""); + virtual int StartStreaming(); + virtual int CloseMedia(); + virtual int GetPacket(int streamID, std::list* pkts, bool needParams, bool clearBuf); + virtual int GetStatistic(DashStatisticInfo* dsInfo); + virtual int SetupHeadSetInfo(HeadSetInfo* clientInfo); + virtual int ChangeViewport(HeadPose* pose); + virtual int GetMediaInfo(DashMediaInfo* media_info); + virtual int GetTrackCount(); + virtual int SelectSpecialSegments(int extractorTrackIdx); + //! + //! \brief Interface implementation from base class: Threadable + //! + virtual void Run(); + + private: + //! + //! \brief TimedSelect extractors or adaptation set for streams + //! + int TimedSelectSegements(); + + //! + //! \brief + //! + void StopThread(); + + //! + //! \brief update mpd in dynamic mode + //! + int TimedUpdateMPD(); + + //! + //! \brief Download Segment in dynamic mode + //! + int TimedDownloadSegment(bool bFirst); + + //! + //! \brief run thread for dynamic mpd processing + //! + void thread_dynamic(); + + //! + //! \brief run thread for static mpd processing + //! + void thread_static(); + + //! + //! \brief ClearStreams + //! + void ClearStreams(); + + //! + //! \brief SeekToSeg + //! + void SeekToSeg(int seg_num); + + //! + //! \brief SetEOS + //! + int SetEOS(bool eos); + + //! + //! \brief Download init Segment + //! + int DownloadInitSeg(); + + //! + //! \brief GetSegmentDuration + //! + uint64_t GetSegmentDuration(int stream_id); + + //! + //! \brief Get and Set current status + //! + DASH_STATUS GetStatus() { return mStatus; }; + void SetStatus(DASH_STATUS status) { + std::lock_guard lock(mMutex); + mStatus = status; + }; + + //! + //! \brief Get MPD information + //! + MPDInfo* GetMPDInfo() { + if (!mMPDParser) return nullptr; + + return mMPDParser->GetMPDInfo(); + }; + + int SyncTime(std::string url); + + int StartReadThread(); private: - //! - //! \brief TimedSelect extractors or adaptation set for streams - //! - int TimedSelectSegements( ); - - //! - //! \brief - //! - void StopThread(); - - //! - //! \brief update mpd in dynamic mode - //! - int TimedUpdateMPD(); - - //! - //! \brief Download Segment in dynamic mode - //! - int TimedDownloadSegment( bool bFirst ); - - //! - //! \brief run thread for dynamic mpd processing - //! - void thread_dynamic(); - - //! - //! \brief run thread for static mpd processing - //! - void thread_static(); - - //! - //! \brief ClearStreams - //! - void ClearStreams(); - - //! - //! \brief SeekToSeg - //! - void SeekToSeg(int seg_num); - - //! - //! \brief SetEOS - //! - int SetEOS(bool eos); - - //! - //! \brief Download init Segment - //! - int DownloadInitSeg(); - - //! - //! \brief GetSegmentDuration - //! - uint64_t GetSegmentDuration(int stream_id); - - //! - //! \brief Get and Set current status - //! - DASH_STATUS GetStatus(){ return mStatus; }; - void SetStatus(DASH_STATUS status){ - pthread_mutex_lock(&mMutex); - mStatus = status; - pthread_mutex_unlock(&mMutex); - }; - - //! - //! \brief Get MPD information - //! - MPDInfo* GetMPDInfo() - { - if(!mMPDParser) - return nullptr; - - return mMPDParser->GetMPDInfo(); - }; - - int SyncTime(std::string url); - - int StartReadThread(); - -private: - OmafMPDParser* mMPDParser; // dash_client_; + std::shared_ptr omaf_reader_mgr_; + bool mIsLocalMedia; }; VCD_OMAF_END; #endif /* OMAFSOURCE_H */ - diff --git a/src/OmafDashAccess/OmafExtractor.cpp b/src/OmafDashAccess/OmafExtractor.cpp index c8c0c0a8..0910b232 100644 --- a/src/OmafDashAccess/OmafExtractor.cpp +++ b/src/OmafDashAccess/OmafExtractor.cpp @@ -37,53 +37,42 @@ VCD_OMAF_BEGIN -OmafExtractor::OmafExtractor() -{ -} +OmafExtractor::OmafExtractor() {} -OmafExtractor::~OmafExtractor() -{ -} +OmafExtractor::~OmafExtractor() {} -OmafExtractor::OmafExtractor( AdaptationSetElement* pAdaptationSet ) -:OmafAdaptationSet(pAdaptationSet) -{ -} +OmafExtractor::OmafExtractor(AdaptationSetElement* pAdaptationSet, ProjectionFormat pf, bool isExtractorTrack) + : OmafAdaptationSet(pAdaptationSet, pf, isExtractorTrack) {} -void OmafExtractor::AddDependAS(OmafAdaptationSet* as) -{ - std::vector vecID; +void OmafExtractor::AddDependAS(OmafAdaptationSet* as) { + std::vector vecID; - if(NULL != this->mPreselID){ - vecID = mPreselID->SelAsIDs; - } - if(1 < this->mDependIDs.size()){ - vecID = mDependIDs; - } + if (nullptr != this->mPreselID) { + vecID = mPreselID->SelAsIDs; + } + if (1 < this->mDependIDs.size()) { + vecID = mDependIDs; + } - for(auto it = vecID.begin(); it != vecID.end(); it++){ - int id = int (*it); - if( id == as->GetID() ){ - this->mAdaptationSets[id] = as; - return; - } + for (auto it = vecID.begin(); it != vecID.end(); it++) { + int id = int(*it); + if (id == as->GetID()) { + this->mAdaptationSets[id] = as; + return; } + } } -std::list OmafExtractor::GetDependTrackID() -{ - std::list trackList; - for(auto it=mAdaptationSets.begin(); it!=mAdaptationSets.end(); it++){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - trackList.push_back(pAS->GetTrackNumber()); - } +std::list OmafExtractor::GetDependTrackID() { + std::list trackList; + for (auto it = mAdaptationSets.begin(); it != mAdaptationSets.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + trackList.push_back(pAS->GetTrackNumber()); + } - return trackList; + return trackList; } -SampleData* OmafExtractor::ReadSample( ) -{ - return NULL; -} +SampleData* OmafExtractor::ReadSample() { return nullptr; } VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafExtractor.h b/src/OmafDashAccess/OmafExtractor.h index cbf26da2..5c4a7f2c 100644 --- a/src/OmafDashAccess/OmafExtractor.h +++ b/src/OmafDashAccess/OmafExtractor.h @@ -45,56 +45,54 @@ VCD_OMAF_BEGIN //! \brief: Extractor derived from OmafAdaptationSet since Extractor is also //! one adpationSet in MPD //! -class OmafExtractor : public OmafAdaptationSet{ -public: - //! - //! \brief construct - //! - OmafExtractor(); - - //! - //! \brief construct from AdaptationSetElement - //! - OmafExtractor( AdaptationSetElement* pAdaptationSet ); - - //! - //! \brief de-construct - //! - virtual ~OmafExtractor(); - -public: - //! - //! \brief Reading a Sample from sample list. - //! \return the SampleData read from Sample list - //! - virtual SampleData* ReadSample( ); - - virtual OmafExtractor* GetClassType(){ - return this; - }; - - //! - //! \brief add Omaf Adaptation Set which is used by the extractor, it will - //! be called by OmafMediaStream when it is initialization - //! - void AddDependAS(OmafAdaptationSet* as); - - //! - //! \brief Get all depended adaptation sets - //! - std::map GetDependAdaptationSets() { return mAdaptationSets; }; - - //! - //! \brief get the list of depended track IDs - //! - std::list GetDependTrackID(); -private: - std::map mAdaptationSets; // GetDependAdaptationSets() { return mAdaptationSets; }; + + //! + //! \brief get the list of depended track IDs + //! + std::list GetDependTrackID(); + + //bool IsExtractor() override { return true; } + + private: + std::map mAdaptationSets; // -#include -#include -#include - -VCD_OMAF_BEGIN - -OmafExtractorSelector::OmafExtractorSelector( int size ) -{ - pthread_mutex_init(&mMutex, NULL); - mSize = size; - m360ViewPortHandle = nullptr; - mParamViewport = nullptr; - mCurrentExtractor = nullptr; - mPose = nullptr; - mUsePrediction = false; -} - -OmafExtractorSelector::~OmafExtractorSelector() -{ - pthread_mutex_destroy( &mMutex ); - - if(m360ViewPortHandle) - { - genViewport_unInit(m360ViewPortHandle); - m360ViewPortHandle = nullptr; - } - - if(mParamViewport) - { - SAFE_DELETE(mParamViewport->m_pUpLeft); - SAFE_DELETE(mParamViewport->m_pDownRight); - } - SAFE_DELETE(mParamViewport); - - if(mPoseHistory.size()) - { - for(auto &p:mPoseHistory) - { - SAFE_DELETE(p.pose); - } - } - - mUsePrediction = false; -} - -int OmafExtractorSelector::SelectExtractors(OmafMediaStream* pStream) -{ - OmafExtractor* pSelectedExtrator = GetExtractorByPose( pStream ); - - if(NULL == pSelectedExtrator && !mCurrentExtractor) - return ERROR_NULL_PTR; - - mCurrentExtractor = pSelectedExtrator ? pSelectedExtrator : mCurrentExtractor; - - ListExtractor extractors; - - if(mUsePrediction) - { - extractors = GetExtractorByPosePrediction( pStream ); - } - - extractors.push_front(mCurrentExtractor); - - if(pSelectedExtrator || extractors.size() > 1) - { - list trackIDs; - for(auto &it: extractors) - { - trackIDs.push_back(it->GetTrackNumber()); - } - READERMANAGER::GetInstance()->RemoveTrackFromPacketQueue(trackIDs); - } - - int ret = pStream->UpdateEnabledExtractors(extractors); - - return ret; -} - -int OmafExtractorSelector::UpdateViewport(HeadPose* pose) -{ - if (!pose) - return ERROR_NULL_PTR; - - pthread_mutex_lock(&mMutex); - - PoseInfo pi; - pi.pose = new HeadPose; - memcpy(pi.pose, pose, sizeof(HeadPose)); - std::chrono::high_resolution_clock clock; - pi.time = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); - mPoseHistory.push_front(pi); - if( mPoseHistory.size() > (uint32_t)(this->mSize) ) - { - auto pit = mPoseHistory.back(); - SAFE_DELETE(pit.pose); - mPoseHistory.pop_back(); - } - - pthread_mutex_unlock(&mMutex); - return ERROR_NONE; -} - -int OmafExtractorSelector::SetInitialViewport( std::vector& pView, HeadSetInfo* headSetInfo, OmafMediaStream* pStream) -{ - if(!headSetInfo || !headSetInfo->viewPort_hFOV || !headSetInfo->viewPort_vFOV - || !headSetInfo->viewPort_Width || !headSetInfo->viewPort_Height) - { - return ERROR_INVALID; - } - - mParamViewport = new generateViewPortParam; - mParamViewport->m_iViewportWidth = headSetInfo->viewPort_Width; - mParamViewport->m_iViewportHeight = headSetInfo->viewPort_Height; - mParamViewport->m_viewPort_fPitch = headSetInfo->pose->pitch; - mParamViewport->m_viewPort_fYaw = headSetInfo->pose->yaw; - mParamViewport->m_viewPort_hFOV = headSetInfo->viewPort_hFOV; - mParamViewport->m_viewPort_vFOV = headSetInfo->viewPort_vFOV; - mParamViewport->m_output_geoType = headSetInfo->output_geoType; - mParamViewport->m_input_geoType = headSetInfo->input_geoType; - - mParamViewport->m_iInputWidth = pStream->GetStreamWidth(); - mParamViewport->m_iInputHeight = pStream->GetStreamHeight(); - - mParamViewport->m_tileNumRow = pStream->GetRowSize(); - mParamViewport->m_tileNumCol = pStream->GetColSize(); - mParamViewport->m_pUpLeft = new point[6]; - mParamViewport->m_pDownRight = new point[6]; - - m360ViewPortHandle = genViewport_Init(mParamViewport); - if(!m360ViewPortHandle) - return ERROR_NULL_PTR; - - //set current Pose; - mPose = new HeadPose; - memcpy(mPose, headSetInfo->pose, sizeof(HeadPose)); - - return UpdateViewport(mPose); -} - -bool OmafExtractorSelector::IsDifferentPose(HeadPose* pose1, HeadPose* pose2) -{ - // return false if two pose is same - if(pose1->yaw == pose2->yaw && pose1->pitch == pose2->pitch) - return false; - - return true; -} - -OmafExtractor* OmafExtractorSelector::GetExtractorByPose( OmafMediaStream* pStream ) -{ - pthread_mutex_lock(&mMutex); - if(mPoseHistory.size() == 0) - { - pthread_mutex_unlock(&mMutex); - return NULL; - } - - HeadPose* previousPose = mPose; - int64_t historySize = 0; - - mPose = mPoseHistory.front().pose; - mPoseHistory.pop_front(); - - if(!mPose) - { - pthread_mutex_unlock(&mMutex); - return nullptr; - } - - historySize = mPoseHistory.size(); - - pthread_mutex_unlock(&mMutex); - - // won't get viewport if pose hasn't changed - if( previousPose && mPose && !IsDifferentPose( previousPose, mPose ) && historySize > 1) - { - LOG(INFO)<<"pose hasn't changed!"<yaw<<","<pitch<<") to ("<yaw<<","<pitch<<") ! extractor id is: "<GetID()<yaw, pose->pitch); - if(ret != 0) - return NULL; - ret = genViewport_process(mParamViewport, m360ViewPortHandle); - if(ret != 0) - return NULL; - - // get Content Coverage from 360SCVP library - CCDef* outCC = new CCDef; - ret = genViewport_getContentCoverage(m360ViewPortHandle, outCC); - if(ret != 0) - return NULL; - - // get the extractor with largest intersection - OmafExtractor *selectedExtractor = GetNearestExtractor(pStream, outCC); - - if(outCC) - { - delete outCC; - outCC = nullptr; - } - - return selectedExtractor; -} - -OmafExtractor* OmafExtractorSelector::GetNearestExtractor(OmafMediaStream* pStream, CCDef* outCC) -{ - // calculate which extractor should be chosen - OmafExtractor *selectedExtractor = nullptr; - float leastDistance = FLT_MAX; - std::map extractors = pStream->GetExtractors(); - for(auto &ie: extractors) - { - ContentCoverage* cc = ie.second->GetContentCoverage(); - if(!cc) - continue; - - int32_t ca = cc->coverage_infos[0].centre_azimuth; - int32_t ce = cc->coverage_infos[0].centre_elevation; - - // for now, every extractor has the same azimuth_range and elevation_range - // , so we just calculate least Euclidean distance between centres to find the - // extractor with largest intersection - float distance = sqrt( pow((ca - outCC->centreAzimuth), 2) + pow((ce - outCC->centreElevation), 2) ); - if(distance < leastDistance) - { - leastDistance = distance; - selectedExtractor = ie.second; - } - } - - return selectedExtractor; -} - -ListExtractor OmafExtractorSelector::GetExtractorByPosePrediction( OmafMediaStream* pStream ) -{ - ListExtractor extractors; - pthread_mutex_lock(&mMutex); - if(mPoseHistory.size() <= 1) - { - pthread_mutex_unlock(&mMutex); - return extractors; - } - - PoseInfo pf, pb; - pf = mPoseHistory.front(); - pb = mPoseHistory.back(); - pthread_mutex_unlock(&mMutex); - - std::chrono::high_resolution_clock clock; - uint64_t time = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); - - // simple prediction, assuming the move is uniform - float yaw = (pb.pose->yaw - pf.pose->yaw)/(pb.time - pf.time) * (time - pb.time + 1000) + pb.pose->yaw; - float pitch = (pb.pose->pitch - pf.pose->pitch)/(pb.time - pf.time) * (time - pb.time + 1000) + pb.pose->pitch; - HeadPose* pose = new HeadPose; - pose->yaw = yaw; - pose->pitch = pitch; - // to select extractor; - OmafExtractor *selectedExtractor = SelectExtractor(pStream, pose); - if(selectedExtractor) - extractors.push_back(selectedExtractor); - SAFE_DELETE(pose); - return extractors; -} - -VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafExtractorTracksSelector.cpp b/src/OmafDashAccess/OmafExtractorTracksSelector.cpp new file mode 100644 index 00000000..9b327b55 --- /dev/null +++ b/src/OmafDashAccess/OmafExtractorTracksSelector.cpp @@ -0,0 +1,433 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ + +/* + * File: OmafExtractorTracksSelector.cpp + * Author: media + * + * Created on May 28, 2019, 1:19 PM + */ + +#include "OmafExtractorTracksSelector.h" +#include +#include +#include +#include +#include "OmafMediaStream.h" +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include "../trace/MtHQ_tp.h" +#endif +#endif + +VCD_OMAF_BEGIN + +OmafExtractorTracksSelector::~OmafExtractorTracksSelector() { mCurrentExtractor = nullptr; } + +int OmafExtractorTracksSelector::SelectTracks(OmafMediaStream* pStream) { + + DashStreamInfo *streamInfo = pStream->GetStreamInfo(); + int ret = ERROR_NONE; + if (streamInfo->stream_type == MediaType_Video) + { + OmafExtractor* pSelectedExtrator = NULL; + if (mUsePrediction) { + ListExtractor predict_extractors = GetExtractorByPosePrediction(pStream); + if (predict_extractors.empty()) { + pSelectedExtrator = GetExtractorByPose(pStream); + } else + pSelectedExtrator = predict_extractors.front(); + } else { + pSelectedExtrator = GetExtractorByPose(pStream); + } + + if (NULL == pSelectedExtrator && !mCurrentExtractor) return ERROR_NULL_PTR; + + bool isExtractorChanged = false; + // not first time and changed and change to different extractor + if (mCurrentExtractor && pSelectedExtrator && mCurrentExtractor != pSelectedExtrator) { + isExtractorChanged = true; + } + + mCurrentExtractor = pSelectedExtrator ? pSelectedExtrator : mCurrentExtractor; + + ListExtractor extractors; + + extractors.push_front(mCurrentExtractor); + + if (isExtractorChanged || extractors.size() > 1) //? + { + list trackIDs; + for (auto& it : extractors) { + trackIDs.push_back(it->GetTrackNumber()); + } + // READERMANAGER::GetInstance()->RemoveTrackFromPacketQueue(trackIDs); + } + + ret = pStream->UpdateEnabledExtractors(extractors); + } + else if (streamInfo->stream_type == MediaType_Audio) + { + ret = pStream->EnableAllAudioTracks(); + } + + return ret; +} + +bool OmafExtractorTracksSelector::IsDifferentPose(HeadPose* pose1, HeadPose* pose2) { + // return false if two pose is same + if (abs(pose1->yaw - pose2->yaw) < 1e-3 && abs(pose1->pitch - pose2->pitch) < 1e-3) { + OMAF_LOG(LOG_INFO, "pose has not changed!\n"); + return false; + } + return true; +} + +OmafExtractor* OmafExtractorTracksSelector::GetExtractorByPose(OmafMediaStream* pStream) { + HeadPose* previousPose = NULL; + int64_t historySize = 0; + { + std::lock_guard lock(mMutex); + if (mPoseHistory.size() == 0) { + return NULL; + } + + previousPose = mPose; + + mPose = mPoseHistory.front(); + mPoseHistory.pop_front(); + + if (!mPose) { + return nullptr; + } + + historySize = mPoseHistory.size(); + } + + // won't get viewport if pose hasn't changed + if (previousPose && mPose && !IsDifferentPose(previousPose, mPose) && historySize > 1 && !mUsePrediction) { + OMAF_LOG(LOG_INFO, "pose hasn't changed!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 0); +#endif +#endif + return NULL; + } + + // to select extractor; + OMAF_LOG(LOG_INFO, "Start to select extractor tracks!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T1_select_tracks, "extractortrack"); +#endif +#endif + OmafExtractor* selectedExtractor = SelectExtractor(pStream, mPose); + if (selectedExtractor && previousPose) { + OMAF_LOG(LOG_INFO, "For extractor track %d\n", selectedExtractor->GetID()); + OMAF_LOG(LOG_INFO, "pose has changed from yaw %f, pitch %f\n", previousPose->yaw, previousPose->pitch); + OMAF_LOG(LOG_INFO, "to yaw %f, pitch %f\n", mPose->yaw, mPose->pitch); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 1); +#endif +#endif + } + + if (previousPose != mPose) SAFE_DELETE(previousPose); + + return selectedExtractor; +} + +static bool IsIncluded(std::list first, std::list second) +{ + bool included = false; + if (first.size() > second.size()) + { + included = false; + } + else + { + uint64_t includedNum = 0; + std::list::iterator it1; + for (it1 = first.begin(); it1 != first.end(); it1++) + { + int id1 = *it1; + std::list::iterator it2; + for (it2 = second.begin(); it2 != second.end(); it2++) + { + int id2 = *it2; + if (id2 == id1) + { + includedNum++; + break; + } + } + } + if (includedNum == first.size()) + { + included = true; + } + else + { + included = false; + } + } + return included; +} + +OmafExtractor* OmafExtractorTracksSelector::SelectExtractor(OmafMediaStream* pStream, HeadPose* pose) { + // to select extractor; + int ret = I360SCVP_setViewPort(m360ViewPortHandle, (float)(round(pose->yaw)), (float)(round(pose->pitch))); + if (ret != 0) return NULL; + ret = I360SCVP_process(mParamViewport, m360ViewPortHandle); + if (ret != 0) return NULL; + + TileDef *tilesInViewport = new TileDef[1024]; + Param_ViewportOutput paramViewportOutput; + int32_t selectedTilesNum = I360SCVP_getTilesInViewport( + tilesInViewport, ¶mViewportOutput, m360ViewPortHandle); + std::list selectedTracks; + + std::map asMap = pStream->GetMediaAdaptationSet(); + std::map::iterator itAS; + + if (mProjFmt == ProjectionFormat::PF_ERP) + { + for (int32_t index = 0; index < selectedTilesNum; index++) + { + int32_t left = tilesInViewport[index].x; + int32_t top = tilesInViewport[index].y; + + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + OmafSrd *srd = adaptationSet->GetSRD(); + int32_t tileLeft = srd->get_X(); + int32_t tileTop = srd->get_Y(); + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + + if ((qualityRanking == HIGHEST_QUALITY_RANKING) && (tileLeft == left) && (tileTop == top)) + { + int trackID = adaptationSet->GetID(); + selectedTracks.push_back(trackID); + break; + } + } + } + } + else if (mProjFmt == ProjectionFormat::PF_CUBEMAP) + { + for (int32_t index = 0; index < selectedTilesNum; index++) + { + int32_t left = tilesInViewport[index].x; + int32_t top = tilesInViewport[index].y; + int32_t faceId = tilesInViewport[index].faceId; + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + + if (qualityRanking == HIGHEST_QUALITY_RANKING) + { + TileDef *tileInfo = adaptationSet->GetTileInfo(); + if (!tileInfo) + { + OMAF_LOG(LOG_ERROR, "NULL tile information for Cubemap !\n"); + DELETE_ARRAY(tilesInViewport); + return NULL; + } + int32_t tileLeft = tileInfo->x; + int32_t tileTop = tileInfo->y; + int32_t tileFaceId = tileInfo->faceId; + if ((tileLeft == left) && (tileTop == top) && (tileFaceId == faceId)) + { + int trackID = adaptationSet->GetID(); + selectedTracks.push_back(trackID); + break; + } + } + } + } + } + // get Content Coverage from 360SCVP library + CCDef* outCC = new CCDef; + ret = I360SCVP_getContentCoverage(m360ViewPortHandle, outCC); + if (ret != 0) + { + SAFE_DELETE(outCC); + return NULL; + } + + OmafExtractor* selectedExtractor = NULL; + bool included = false; + std::map extractors = pStream->GetExtractors(); + std::map::iterator ie; + for (ie = extractors.begin(); ie != extractors.end(); ie++) + { + std::list refTracks = ie->second->GetDependTrackID(); + included = IsIncluded(selectedTracks, refTracks); + if (included) + break; + } + if (ie == extractors.end()) + { + OMAF_LOG(LOG_WARNING, "Couldn't find matched extractor track. There is error in packing !\n"); + selectedExtractor = GetNearestExtractor(pStream, outCC); + } + else + { + selectedExtractor = ie->second; + } + + SAFE_DELETE(outCC); + delete [] tilesInViewport; + tilesInViewport = NULL; + + return selectedExtractor; +} + +OmafExtractor* OmafExtractorTracksSelector::GetNearestExtractor(OmafMediaStream* pStream, CCDef* outCC) { + // calculate which extractor should be chosen + OmafExtractor* selectedExtractor = nullptr; + float leastDistance = FLT_MAX; + std::map extractors = pStream->GetExtractors(); + for (auto& ie : extractors) { + ContentCoverage* cc = ie.second->GetContentCoverage(); + if (!cc) continue; + + int32_t ca = cc->coverage_infos[0].centre_azimuth; + int32_t ce = cc->coverage_infos[0].centre_elevation; + + // for now, every extractor has the same azimuth_range and elevation_range + // , so we just calculate least Euclidean distance between centres to find the + // extractor with largest intersection + float distance = sqrt(pow((ca - outCC->centreAzimuth), 2) + pow((ce - outCC->centreElevation), 2)); + if (distance < leastDistance) { + leastDistance = distance; + selectedExtractor = ie.second; + } + } + + return selectedExtractor; +} + +ListExtractor OmafExtractorTracksSelector::GetExtractorByPosePrediction(OmafMediaStream* pStream) { + ListExtractor extractors; + int64_t historySize = 0; + { + std::lock_guard lock(mMutex); + if (mPoseHistory.size() <= 1) { + return extractors; + } + } + + HeadPose* previousPose = mPose; + { + std::lock_guard lock(mMutex); + mPose = mPoseHistory.front(); + mPoseHistory.pop_front(); + if (!mPose) { + return extractors; + } + historySize = mPoseHistory.size(); + } + // won't get viewport if pose hasn't changed + if (previousPose && mPose && !IsDifferentPose(previousPose, mPose) && historySize > 1) { + OMAF_LOG(LOG_INFO, "pose hasn't changed!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 0); +#endif +#endif + SAFE_DELETE(previousPose); + return extractors; + } + // if viewport changed, then predict viewport using pose history. + if (mPredictPluginMap.size() == 0) + { + OMAF_LOG(LOG_ERROR,"predict plugin map is empty!\n"); + return extractors; + } + // 1. figure out the pts of predicted angle + uint32_t current_segment_num = pStream->GetSegmentNumber(); + + DashStreamInfo *stream_info = pStream->GetStreamInfo(); + if (stream_info == nullptr) return extractors; + + int32_t stream_frame_rate = stream_info->framerate_num / stream_info->framerate_den; + uint64_t first_predict_pts = current_segment_num > 0 ? (current_segment_num - 1) * stream_frame_rate : 0; + // 2. predict process + ViewportPredictPlugin *plugin = mPredictPluginMap.at(mPredictPluginName); + std::map predict_angles; + plugin->Predict(first_predict_pts, predict_angles); + if (predict_angles.empty()) + { + OMAF_LOG(LOG_INFO, "predictPose_func return an invalid value!\n"); + return extractors; + } + // to select extractor, only support SingleViewport mode in prediction. + HeadPose* predictPose = new HeadPose; + ViewportAngle *angle = predict_angles[first_predict_pts]; + predictPose->yaw = angle->yaw; + predictPose->pitch = angle->pitch; + OMAF_LOG(LOG_INFO, "Start to select extractor tracks by prediction!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T1_select_tracks, "extractortrack"); +#endif +#endif + OmafExtractor* selectedExtractor = SelectExtractor(pStream, predictPose); + if (selectedExtractor && previousPose) { + extractors.push_back(selectedExtractor); + OMAF_LOG(LOG_INFO, "For extractor track %d\n", selectedExtractor->GetID()); + OMAF_LOG(LOG_INFO, "pose has changed from yaw %f, pitch %f\n", previousPose->yaw, previousPose->pitch); + OMAF_LOG(LOG_INFO, "to yaw %f, pitch %f\n", mPose->yaw, mPose->pitch); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 1); +#endif +#endif + } + for (auto pre_angle : predict_angles) + { + SAFE_DELETE(pre_angle.second); + } + SAFE_DELETE(previousPose); + SAFE_DELETE(predictPose); + predict_angles.clear(); + return extractors; +} + +VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafExtractorSelector.h b/src/OmafDashAccess/OmafExtractorTracksSelector.h similarity index 63% rename from src/OmafDashAccess/OmafExtractorSelector.h rename to src/OmafDashAccess/OmafExtractorTracksSelector.h index b932d76f..c2f8024a 100644 --- a/src/OmafDashAccess/OmafExtractorSelector.h +++ b/src/OmafDashAccess/OmafExtractorTracksSelector.h @@ -26,76 +26,84 @@ * */ //! -//! \file: OmafExtractorSelector.h -//! \brief: -//! \detail: +//! \file: OmafExtractorTracksSelector.h +//! \brief: Extractor tracks selector class definition +//! \detail: Define the data and the operation of extractor tracks selector +//! based on viewport +//! //! Created on May 28, 2019, 1:19 PM //! -#ifndef OMAFEXTRACTORSELECTOR_H -#define OMAFEXTRACTORSELECTOR_H +#ifndef OMAFEXTRACTORTRACKSSELECTOR_H +#define OMAFEXTRACTORTRACKSSELECTOR_H -#include "general.h" #include "OmafExtractor.h" -#include "OmafMediaStream.h" -#include "360SCVPViewportAPI.h" +#include "OmafTracksSelector.h" using namespace VCD::OMAF; VCD_OMAF_BEGIN -#define POSE_SIZE 20 - typedef std::list ListExtractor; -typedef struct POSEINFO{ - HeadPose *pose; - uint64_t time; -}PoseInfo; - -class OmafExtractorSelector { +class OmafExtractorTracksSelector : public OmafTracksSelector +{ public: //! //! \brief construct //! - OmafExtractorSelector(int size = POSE_SIZE); + OmafExtractorTracksSelector(int size = POSE_SIZE) : OmafTracksSelector(size) + { + mCurrentExtractor = nullptr; + }; //! //! \brief de-construct //! - virtual ~OmafExtractorSelector(); + virtual ~OmafExtractorTracksSelector(); public: //! - //! \brief SelectExtractor for the stream which has extractors. each time - //! the selector will select extractor based on the latest pose. the + //! \brief SelectTracks for the stream which has extractor tracks. each time + //! the selector will select extractor track based on the latest pose. the //! information stored in mPoseHistory can be used for prediction for //! further movement //! - int SelectExtractors(OmafMediaStream* pStream); + virtual int SelectTracks(OmafMediaStream* pStream); //! //! \brief update Viewport; each time pose update will be recorded, but only - //! the latest will be used when SelectExtractors is called. + //! the latest will be used when SelectTracks is called. //! int UpdateViewport(HeadPose* pose); //! //! \brief Set Init viewport //! - int SetInitialViewport( std::vector& pView, HeadSetInfo* headSetInfo, OmafMediaStream* pStream); + int SetInitialViewport( + std::vector& pView, + HeadSetInfo* headSetInfo, + OmafMediaStream* pStream); - void EnablePosePrediction(){mUsePrediction = true;}; + //! + //! \brief Load viewport prediction plugin + //! + int EnablePosePrediction(std::string predictPluginName, std::string libPath); + + //! + //! \brief Get the priority of the segment + //! + //virtual int GetSegmentPriority(OmafSegment *segment); private: //! - //! \brief Get Extractor based on latest Pose + //! \brief Get Extractor Track based on latest Pose //! OmafExtractor* GetExtractorByPose( OmafMediaStream* pStream ); //! - //! \brief predict Extractor based history Poses + //! \brief predict Extractor Tracks based history Poses //! ListExtractor GetExtractorByPosePrediction( OmafMediaStream* pStream ); @@ -106,17 +114,9 @@ class OmafExtractorSelector { OmafExtractor* SelectExtractor(OmafMediaStream* pStream, HeadPose* pose); private: - std::list mPoseHistory; // +#include #include -#include // std::ifstream #include -#include - -void ConvertFourCC(VCD::OMAF::FourCC& cc, MP4VR::FourCC mpcc) -{ - VCD::OMAF::FourCC* tmp = new VCD::OMAF::FourCC(mpcc.value); - cc = *tmp; - delete tmp; -} - -void AssignSampleType(VCD::OMAF::SampleType vSamT, MP4VR::SampleType mSamT) -{ - std::map valueMap; - - valueMap[MP4VR::OUTPUT_NON_REFERENCE_FRAME] = VCD::OMAF::OUTPUT_NON_REF_FRAME; - valueMap[MP4VR::OUTPUT_REFERENCE_FRAME] = VCD::OMAF::OUTPUT_REF_FRAME; - valueMap[MP4VR::NON_OUTPUT_REFERENCE_FRAME] = VCD::OMAF::NON_OUTPUT_REF_FRAME; - - vSamT = valueMap[mSamT]; -} - -void AssignTrackSampleType(VCD::OMAF::TrackSampleType vTSamT, MP4VR::TrackSampleType mTSamT) -{ - std::map valueMap; - valueMap[MP4VR::out_ref] = VCD::OMAF::out_ref; - valueMap[MP4VR::out_non_ref] = VCD::OMAF::out_non_ref; - valueMap[MP4VR::non_out_ref] = VCD::OMAF::non_out_ref; - valueMap[MP4VR::display] = VCD::OMAF::display; - valueMap[MP4VR::samples] = VCD::OMAF::samples; - - vTSamT = valueMap[mTSamT]; -} +#include "../isolib/dash_parser/Mp4ReaderImpl.h" +#include "../isolib/dash_parser/Mp4StreamIO.h" VCD_OMAF_BEGIN -class SegmentStream : public MP4VR::StreamInterface { -public: - SegmentStream(){ - mSegment = NULL; - }; - SegmentStream(OmafSegment* seg){ - mSegment = seg; - mFileStream.open( seg->GetSegmentCacheFile().c_str(), ios_base::binary | ios_base::in ); - }; - ~SegmentStream(){ - mSegment = NULL; - mFileStream.close(); - }; -public: - /** Returns the number of bytes read. The value of 0 indicates end - of file. - @param [buffer] The buffer to write the data into - @param [size] The number of bytes to read from the stream - @returns The number of bytes read, or 0 on EOF. - */ - virtual offset_t read(char* buffer, offset_t size){ - if(NULL == mSegment) return -1; - - mFileStream.read(buffer, size); - std::streamsize readCnt = mFileStream.gcount(); - return (offset_t)readCnt; - }; - - /** Seeks to the given offset. Should the offset be erronous we'll - find it out also by the next read that will signal EOF. - - Seeking to the point after the last input byte is permissable; - so seeking to offset 0 of an empty file should be OK as well - as seeking to offset 1 of a 1-byte file. The next read would - indicate EOF, though. - - @param [offset] Offset to seek into - @returns true if the seek was successful - */ - virtual bool absoluteSeek(offset_t offset){ - if(NULL == mSegment) return false; - if (mFileStream.tellg() == -1) - { - mFileStream.clear(); - mFileStream.seekg(0, ios_base::beg); - } - - mFileStream.seekg( offset ); - - return true; - }; - - /** Retrieve the current offset of the file. - @returns The current offset of the file. - */ - virtual offset_t tell(){ - - if(NULL == mSegment) return -1; - offset_t offset1 = mFileStream.tellg(); - return offset1; - }; - - /** Retrieve the size of the current file. - - @returns The current size of the file. Return - StreamInterface::IndeterminateSize if the file size cannot be determined. - */ - virtual offset_t size(){ - //return MP4VR::StreamInterface::IndeterminateSize; - mFileStream.seekg(0, ios_base::end); - int64_t size = mFileStream.tellg(); - mFileStream.seekg(0, ios_base::beg); - return size; - }; - -private: - OmafSegment* mSegment; - std::ifstream mFileStream; +class SegmentStream : public VCD::MP4::StreamIO { + public: + SegmentStream(OmafSegment* seg) { mSegment = seg; }; + ~SegmentStream() { mSegment = nullptr; }; + + public: + //! + //! \brief Read the stream according to the offset + //! + //! \param [in] buffer + //! The buffer to be written the data into + //! \param [in] size + //! The number of bytes to be read from the stream + //! + //! \return offset_t + //! the actual number of bytes read from the stream + virtual offset_t ReadStream(char* buffer, offset_t size) { + if (nullptr == mSegment) return -1; + + return mSegment->ReadStream(buffer, size); + }; + + //! + //! \brief Seek to the given offset. Seeking to offset 0 of an empty + //! file is OK and seeking to offset 1 of a 1-byte file is also + //! OK. + //! + //! \param [in] offset + //! offset to seek into + //! + //! \return bool + //! true if the seeking is successful + virtual bool SeekAbsoluteOffset(offset_t offset) { + if (nullptr == mSegment) return false; + + return mSegment->SeekAbsoluteOffset(offset); + }; + + //! + //! \brief Get the current offset position of the read file + //! + //! \return offset_t + //! the actual current offset of the read file + virtual offset_t TellOffset() { + if (nullptr == mSegment) return -1; + + return mSegment->TellOffset(); + }; + + //! + //! \brief Get the size of current read file + //! + //! \return offset_t + //! the size of the file, or StreamIO::IndeterminateSize if the + //! size can't be determined + virtual offset_t GetStreamSize() { + if (nullptr == mSegment) return -1; + + return mSegment->GetStreamSize(); + }; + + private: + OmafSegment* mSegment = nullptr; }; -OmafMP4VRReader::OmafMP4VRReader() -{ - mMP4ReaderImpl = (void*) MP4VR::MP4VRFileReaderInterface::Create(); -} +OmafMP4VRReader::OmafMP4VRReader() { mMP4ReaderImpl = (void*)VCD::MP4::Mp4Reader::Create(); } + +OmafMP4VRReader::OmafMP4VRReader(OmafMP4VRReader&& other) { mMP4ReaderImpl = std::move(other.mMP4ReaderImpl); } -OmafMP4VRReader::~OmafMP4VRReader() -{ - if(mMP4ReaderImpl) - MP4VR::MP4VRFileReaderInterface::Destroy((MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl); +OmafMP4VRReader::~OmafMP4VRReader() { + if (mMP4ReaderImpl) { + VCD::MP4::Mp4Reader::Destroy((VCD::MP4::Mp4Reader*)mMP4ReaderImpl); + mMP4ReaderImpl = nullptr; + } } -int32_t OmafMP4VRReader::initialize( OmafSegment* pSeg) -{ - if(NULL == mMP4ReaderImpl){ - mMP4ReaderImpl = (void*) MP4VR::MP4VRFileReaderInterface::Create(); - } +int32_t OmafMP4VRReader::initialize(OmafSegment* pSeg) { + if (nullptr == mMP4ReaderImpl) { + mMP4ReaderImpl = (void*)VCD::MP4::Mp4Reader::Create(); + } - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->initialize( new SegmentStream(pSeg) ); + return pReader->Initialize(new SegmentStream(pSeg)); } -void OmafMP4VRReader::close() -{ - if(NULL == mMP4ReaderImpl){ - LOG(ERROR) << "OmafMP4VRReader::close NULL != mMP4ReaderImpl" << endl; - return ; - } - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +void OmafMP4VRReader::close() { + if (nullptr == mMP4ReaderImpl) { + OMAF_LOG(LOG_ERROR, "OmafMP4VRReader::close nullptr != mMP4ReaderImpl\n"); + return; + } + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - pReader->close(); + pReader->Close(); } -int32_t OmafMP4VRReader::getMajorBrand(FourCC& majorBrand, - uint32_t initializationSegmentId, - uint32_t segmentId ) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - MP4VR::FourCC brand; - int ret = pReader->getMajorBrand(brand, initializationSegmentId, segmentId); +int32_t OmafMP4VRReader::getMajorBrand(FourCC& majorBrand, uint32_t initializationSegmentId, uint32_t segmentId) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; + VCD::MP4::FourCC brand; + int ret = pReader->GetMajorBrand(brand, initializationSegmentId, segmentId); - if( 0!=ret ) return ERROR_INVALID; + if (0 != ret) return ERROR_INVALID; - ConvertFourCC(majorBrand, brand); + majorBrand = brand; - return ERROR_NONE; + return ERROR_NONE; } -int32_t OmafMP4VRReader::getMinorVersion(uint32_t& minorVersion, - uint32_t initializationSegmentId, - uint32_t segmentId ) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getMinorVersion(uint32_t& minorVersion, uint32_t initializationSegmentId, + uint32_t segmentId) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getMinorVersion(minorVersion, initializationSegmentId, segmentId); + return pReader->GetMinorVersion(minorVersion, initializationSegmentId, segmentId); } int32_t OmafMP4VRReader::getCompatibleBrands(std::vector& compatibleBrands, - uint32_t initializationSegmentId, - uint32_t segmentId ) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - MP4VR::DynArray brands; - - int32_t ret = pReader->getCompatibleBrands(brands, initializationSegmentId, segmentId); - - for(uint32_t i=0; i brands; + + int32_t ret = pReader->GetCompatibleBrands(brands, initializationSegmentId, segmentId); + + for (uint32_t i = 0; i < brands.size; i++) { + VCD::OMAF::FourCC* cc = new VCD::OMAF::FourCC(brands[i].item); + compatibleBrands.push_back(cc); + } + + return ret; +} + +void OmafMP4VRReader::SelectedTrackInfos(std::vector& trackInfos, + std::vector middleTrackInfos) const { + std::map mapInitTrack = getMapInitTrk(); + if (mapInitTrack.size() != 0) { + vector> needIDs; + for (auto mapId = mapInitTrack.begin(); mapId != mapInitTrack.end(); mapId++) { + uint32_t currentInitSegId = mapId->first; + uint32_t currentTrackId = mapId->second; + for (auto itTrack = middleTrackInfos.begin(); itTrack != middleTrackInfos.end(); itTrack++) { + TrackInformation* track = *itTrack; + if (currentInitSegId == track->initSegmentId && currentTrackId == (track->trackId & 0xffff)) { + trackInfos.push_back(track); + needIDs.push_back(make_pair(currentInitSegId, currentTrackId)); + break; + } + } } - - return ret; -} - -void OmafMP4VRReader::SelectedTrackInfos(std::vector& trackInfos, std::vector middleTrackInfos) const -{ - std::map mapInitTrack = getMapInitTrk(); - if (mapInitTrack.size() != 0) - { - vector> needIDs; - for (auto mapId = mapInitTrack.begin(); mapId != mapInitTrack.end(); mapId++) - { - uint32_t currentInitSegId = mapId->first; - uint32_t currentTrackId = mapId->second; - for (auto itTrack = middleTrackInfos.begin(); itTrack != middleTrackInfos.end(); itTrack++) - { - TrackInformation *track = *itTrack; - if (currentInitSegId == track->initSegId && currentTrackId == (track->trackId & 0xffff)) - { - trackInfos.push_back(track); - needIDs.push_back(make_pair(currentInitSegId, currentTrackId)); - break; - } - } + for (auto itTrack = middleTrackInfos.begin(); itTrack != middleTrackInfos.end(); itTrack++) { + TrackInformation* track = *itTrack; + pair tmpID = make_pair(track->initSegmentId, track->trackId & 0xffff); + if (find(needIDs.begin(), needIDs.end(), tmpID) == needIDs.end()) { + SAFE_DELETE(track); + } + } + } else { + std::vector clearTrackInfoArr; + for (auto itTrack = middleTrackInfos.begin(); itTrack != middleTrackInfos.end(); itTrack++) { + TrackInformation* track = *itTrack; + if (!track) continue; + auto itRefTrack = track->referenceTrackIds[0]; + TypeToTrackIDs* refTrackIds = &(itRefTrack); + + if (refTrackIds->trackIds.size != 0) { + trackInfos.push_back(track); + continue; + } else { + uint32_t initSegIndex = track->initSegmentId; + uint32_t combinedTrackId = track->trackId; + auto itTrack2 = middleTrackInfos.begin(); + for (; itTrack2 != middleTrackInfos.end(); itTrack2++) { + if (!(*itTrack2)) continue; + if (((*itTrack2)->initSegmentId == initSegIndex) && ((*itTrack2)->trackId != combinedTrackId)) { + break; + } } - for (auto itTrack = middleTrackInfos.begin(); itTrack != middleTrackInfos.end(); itTrack++) - { - TrackInformation *track = *itTrack; - pair tmpID = make_pair(track->initSegId, track->trackId & 0xffff); - if (find(needIDs.begin(), needIDs.end(), tmpID) == needIDs.end()) - { - for(auto &it : track->samplePropertyArrays) - { - SAFE_DELETE(it); - } - track->samplePropertyArrays.clear(); - SAFE_DELETE(track); - } + + if (itTrack2 != middleTrackInfos.end()) { + clearTrackInfoArr.push_back(track); + continue; + } else { + trackInfos.push_back(track); } + } } - else + for (uint32_t i = 0; i < clearTrackInfoArr.size(); i++) { - for (auto itTrack = middleTrackInfos.begin(); itTrack != middleTrackInfos.end(); itTrack++) - { - TrackInformation *track = *itTrack; - if(!track) continue; - auto itRefTrack = track->referenceTrackIdArrays.begin(); - TypeToTrackIDs *refTrackIds = &(*itRefTrack); - - if (refTrackIds->trackIds.size() != 0) - { - trackInfos.push_back(track); - //LOG(INFO)<<"track "<<(track->trackId& 0xffff)<<" property size = "<sampleProperties.size()<initSegId; - uint32_t combinedTrackId = track->trackId; - auto itTrack2 = middleTrackInfos.begin(); - for ( ; itTrack2 != middleTrackInfos.end(); itTrack2++) - { - if(!(*itTrack2)) continue; - if (((*itTrack2)->initSegId == initSegIndex) && - ((*itTrack2)->trackId != combinedTrackId)) - { - break; - } - } - - if (itTrack2 != middleTrackInfos.end()) - { - for(auto &it : track->samplePropertyArrays) - { - SAFE_DELETE(it); - } - track->samplePropertyArrays.clear(); - SAFE_DELETE(track); - continue; - } - else - { - trackInfos.push_back(track); - //LOG(INFO)<<"track "<<(track->trackId& 0xffff)<<" property size = "<sampleProperties.size()<& trackInfos) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getTrackInformations(std::vector& trackInfos) const { + double dResult; + clock_t lBefore = clock(); + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; +#if 1 + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - if(trackInfos.size() != 0) trackInfos.clear(); + if (trackInfos.size() != 0) trackInfos.clear(); - std::vector middleTrackInfos; + VCD::MP4::VarLenArray* Infos = new VCD::MP4::VarLenArray; + pReader->GetTrackInformation(*Infos); - MP4VR::DynArray *Infos = new MP4VR::DynArray; + for (uint32_t i = 0; i < (*Infos).size; i++) { + TrackInformation* trackInfo = new TrackInformation; + *trackInfo = (*Infos)[i]; - pReader->getTrackInformations(*Infos); + trackInfos.push_back(trackInfo); + } - uint32_t idx = 0; + if (Infos) + { + delete Infos; + Infos = NULL; + } - for( uint32_t i=0; i<(*Infos).size; i++){ - TrackInformation *trackInfo = new TrackInformation; - trackInfo->trackId = (*Infos)[i].trackId; - trackInfo->initSegId = (*Infos)[i].initSegmentId; + dResult = (double)(clock() - lBefore) * 1000 / CLOCKS_PER_SEC; + OMAF_LOG(LOG_INFO, "Total Time for OmafMP4VRReader GetTrackInformation is %f ms\n", dResult); +#else + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - trackInfo->alternateGroupId = (*Infos)[i].alternateGroupId; - trackInfo->featureBM = (*Infos)[i].features; - trackInfo->vrFeatureBM = (*Infos)[i].vrFeatures; - trackInfo->maxSampleSize = (*Infos)[i].maxSampleSize; - trackInfo->timeScale = (*Infos)[i].timeScale; - trackInfo->hasTypeInformation = (*Infos)[i].hasTypeInformation; - trackInfo->frameRate.den = (*Infos)[i].frameRate.den; - trackInfo->frameRate.num = (*Infos)[i].frameRate.num; + if (trackInfos.size() != 0) trackInfos.clear(); - for(idx=0; idx<(*Infos)[i].trackURI.size; idx++){ - trackInfo->trackURI.push_back((*Infos)[i].trackURI[idx]); - } + std::vector middleTrackInfos; - for(idx=0; idx<(*Infos)[i].alternateTrackIds.size; idx++){ - trackInfo->alternateTrackIdArrays.push_back((*Infos)[i].alternateTrackIds[idx]); - } + VCD::MP4::VarLenArray* Infos = new VCD::MP4::VarLenArray; - for(idx=0; idx<(*Infos)[i].referenceTrackIds.size; idx++){ - TypeToTrackIDs referenceTrackId; - for(uint32_t j=0; j<(*Infos)[i].referenceTrackIds[idx].trackIds.size; j++){ - referenceTrackId.trackIds.push_back((*Infos)[i].referenceTrackIds[idx].trackIds[j]); - } - ConvertFourCC(referenceTrackId.type, (*Infos)[i].referenceTrackIds[idx].type); - trackInfo->referenceTrackIdArrays.push_back(referenceTrackId); - } + pReader->GetTrackInformations(*Infos); - for(idx=0; idx<(*Infos)[i].trackGroupIds.size; idx++){ - TypeToTrackIDs trackGroupIds; - for(uint32_t j=0; j<(*Infos)[i].trackGroupIds[idx].trackIds.size; j++){ - trackGroupIds.trackIds.push_back((*Infos)[i].trackGroupIds[idx].trackIds[j]); - } - ConvertFourCC(trackGroupIds.type, (*Infos)[i].trackGroupIds[idx].type); - trackInfo->trackGroupIdArrays.push_back(trackGroupIds); - } + for (uint32_t i = 0; i < (*Infos).size; i++) { + TrackInformation* trackInfo = new TrackInformation; + *trackInfo = (*Infos)[i]; - for(idx=0; idx<(*Infos)[i].sampleProperties.size; idx++){ - SampleInformation *info = new SampleInformation; - info->earliestTimestamp = (*Infos)[i].sampleProperties[idx].earliestTimestamp; - info->earliestTimestampTS = (*Infos)[i].sampleProperties[idx].earliestTimestampTS; - info->descriptionIndex = (*Infos)[i].sampleProperties[idx].sampleDescriptionIndex; - info->initSegmentId = (*Infos)[i].sampleProperties[idx].initSegmentId; - info->durationTS = (*Infos)[i].sampleProperties[idx].sampleDurationTS; - ConvertFourCC(info->entryType, (*Infos)[i].sampleProperties[idx].sampleEntryType); - info->id = (*Infos)[i].sampleProperties[idx].sampleId; - info->segmentId = (*Infos)[i].sampleProperties[idx].segmentId; - AssignSampleType(info->type, (*Infos)[i].sampleProperties[idx].sampleType); - info->flags.flagsAsUInt = (*Infos)[i].sampleProperties[idx].sampleFlags.flagsAsUInt; - info->flags.flags.isLeading = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.is_leading; - info->flags.flags.reserved = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.reserved; - info->flags.flags.sampleDegradationPriority = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.sample_degradation_priority; - info->flags.flags.sampleHasRedundancy = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.sample_has_redundancy; - info->flags.flags.sampleDependsOn = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.sample_depends_on; - info->flags.flags.sampleIsNonSyncSample = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.sample_is_non_sync_sample; - info->flags.flags.samplePaddingValue = (*Infos)[i].sampleProperties[idx].sampleFlags.flags.sample_padding_value; - trackInfo->samplePropertyArrays.push_back(info); - } + middleTrackInfos.push_back(trackInfo); + } + SelectedTrackInfos(trackInfos, middleTrackInfos); + middleTrackInfos.clear(); - middleTrackInfos.push_back(trackInfo); - } - SelectedTrackInfos(trackInfos, middleTrackInfos); - middleTrackInfos.clear(); + delete Infos; +#endif - delete Infos; - return ERROR_NONE; + return ERROR_NONE; } -int32_t OmafMP4VRReader::getDisplayWidth(uint32_t trackId, uint32_t& displayWidth) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getDisplayWidth(uint32_t trackId, uint32_t& displayWidth) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getDisplayWidth(trackId, displayWidth); + return pReader->GetDisplayWidth(trackId, displayWidth); } -int32_t OmafMP4VRReader::getDisplayHeight(uint32_t trackId, uint32_t& displayHeight) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getDisplayHeight(uint32_t trackId, uint32_t& displayHeight) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getDisplayHeight(trackId, displayHeight); + return pReader->GetDisplayHeight(trackId, displayHeight); } -int32_t OmafMP4VRReader::getDisplayWidthFP(uint32_t trackId, uint32_t& displayWidth) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getDisplayWidthFP(uint32_t trackId, uint32_t& displayWidth) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getDisplayWidthFP(trackId, displayWidth); + return pReader->GetDisplayWidthFP(trackId, displayWidth); } -int32_t OmafMP4VRReader::getDisplayHeightFP(uint32_t trackId, uint32_t& displayHeight) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getDisplayHeightFP(uint32_t trackId, uint32_t& displayHeight) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getDisplayHeightFP(trackId, displayHeight); + return pReader->GetDisplayHeightFP(trackId, displayHeight); } -int32_t OmafMP4VRReader::getWidth(uint32_t trackId, uint32_t sampleId, uint32_t& width) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getWidth(uint32_t trackId, uint32_t sampleId, uint32_t& width) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getWidth(trackId, sampleId, width); + return pReader->GetWidth(trackId, sampleId, width); } -int32_t OmafMP4VRReader::getHeight(uint32_t trackId, uint32_t sampleId, uint32_t& height) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getHeight(uint32_t trackId, uint32_t sampleId, uint32_t& height) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getHeight(trackId, sampleId, height); + return pReader->GetHeight(trackId, sampleId, height); } -int32_t OmafMP4VRReader::getPlaybackDurationInSecs(uint32_t trackId, double& durationInSecs) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getDims(uint32_t trackId, uint32_t sampleId, uint32_t& width, uint32_t& height) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getPlaybackDurationInSecs(trackId, durationInSecs); + return pReader->GetDims(trackId, sampleId, width, height); } -int32_t OmafMP4VRReader::getTrackSampleListByType(uint32_t trackId, - VCD::OMAF::TrackSampleType sampleType, - std::vector& sampleIds) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getPlaybackDurationInSecs(uint32_t trackId, double& durationInSecs) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - MP4VR::TrackSampleType type; + return pReader->GetPlaybackDurationInSecs(trackId, durationInSecs); +} - std::map valueMap; - valueMap[VCD::OMAF::out_ref] = MP4VR::out_ref; - valueMap[VCD::OMAF::out_non_ref] = MP4VR::out_non_ref; - valueMap[VCD::OMAF::non_out_ref] = MP4VR::non_out_ref; - valueMap[VCD::OMAF::display] = MP4VR::display; - valueMap[VCD::OMAF::samples] = MP4VR::samples; +int32_t OmafMP4VRReader::getTrackSampleListByType(uint32_t trackId, VCD::OMAF::TrackSampleType sampleType, + std::vector& sampleIds) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - type = valueMap[sampleType]; + VCD::MP4::SampleFrameType type; - MP4VR::DynArray ids; + type = sampleType; - int32_t ret = pReader->getTrackSampleListByType(trackId, type, ids); + VCD::MP4::VarLenArray ids; - for(uint32_t idx=0; idxGetSampListByType(trackId, type, ids); - return ret; + for (uint32_t idx = 0; idx < ids.size; idx++) { + sampleIds.push_back(ids[idx]); + } + return ret; } -int32_t OmafMP4VRReader::getTrackSampleType(uint32_t trackId, uint32_t sampleId, VCD::OMAF::FourCC& trackSampleBoxType) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getTrackSampleType(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::FourCC& trackSampleBoxType) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - MP4VR::FourCC cc; + VCD::MP4::FourCC cc; - if (NULL == pReader) - { - return ERROR_NULL_PTR; - } + if (nullptr == pReader) { + return ERROR_NULL_PTR; + } - int32_t ret = pReader->getTrackSampleType(trackId, sampleId, cc); + int32_t ret = pReader->GetSampType(trackId, sampleId, cc); - ConvertFourCC(trackSampleBoxType, cc); + trackSampleBoxType = cc; - return ret; + return ret; } -int32_t OmafMP4VRReader::getExtractorTrackSampleData(uint32_t trackId, - uint32_t sampleId, - char* memoryBuffer, - uint32_t& memoryBufferSize, - bool videoByteStreamHeaders) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - int ret = pReader->getExtractorTrackSampleData( trackId, sampleId, memoryBuffer, memoryBufferSize, videoByteStreamHeaders); +int32_t OmafMP4VRReader::getExtractorTrackSampleData(uint32_t trackId, uint32_t sampleId, char* memoryBuffer, + uint32_t& memoryBufferSize, bool videoByteStreamHeaders) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; + int ret = + pReader->GetExtractorTrackSampData(trackId, sampleId, memoryBuffer, memoryBufferSize, videoByteStreamHeaders); - if(ret == MP4VR::MP4VRFileReaderInterface::MEMORY_TOO_SMALL_BUFFER) - ret = OMAF_MEMORY_TOO_SMALL_BUFFER; - return ret; + return ret; } -int32_t OmafMP4VRReader::getTrackSampleData(uint32_t trackId, - uint32_t sampleId, - char* memoryBuffer, - uint32_t& memoryBufferSize, - bool videoByteStreamHeaders) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - int ret = pReader->getTrackSampleData( trackId, sampleId, memoryBuffer, memoryBufferSize, videoByteStreamHeaders); +int32_t OmafMP4VRReader::getTrackSampleData(uint32_t trackId, uint32_t sampleId, char* memoryBuffer, + uint32_t& memoryBufferSize, bool videoByteStreamHeaders) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; + int ret = pReader->GetSampData(trackId, sampleId, memoryBuffer, memoryBufferSize, videoByteStreamHeaders); - if(ret == MP4VR::MP4VRFileReaderInterface::MEMORY_TOO_SMALL_BUFFER) - ret = OMAF_MEMORY_TOO_SMALL_BUFFER; - return ret; + return ret; } -int32_t OmafMP4VRReader::getTrackSampleOffset(uint32_t trackId, uint32_t sampleId, uint64_t& sampleOffset, uint32_t& sampleLength) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getTrackSampleOffset(uint32_t trackId, uint32_t sampleId, uint64_t& sampleOffset, + uint32_t& sampleLength) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getTrackSampleOffset(trackId, sampleId, sampleOffset, sampleLength); + return pReader->GetSampOffset(trackId, sampleId, sampleOffset, sampleLength); } -int32_t OmafMP4VRReader::getDecoderConfiguration(uint32_t trackId, uint32_t sampleId, std::vector& decoderInfos) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getDecoderConfiguration(uint32_t trackId, uint32_t sampleId, + std::vector& decoderInfos) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - std::map valueMap; - valueMap[MP4VR::AVC_SPS] = VCD::OMAF::AVC_SPS; - valueMap[MP4VR::AVC_PPS] = VCD::OMAF::AVC_PPS; - valueMap[MP4VR::HEVC_VPS] = VCD::OMAF::HEVC_VPS; - valueMap[MP4VR::HEVC_SPS] = VCD::OMAF::HEVC_SPS; - valueMap[MP4VR::HEVC_PPS] = VCD::OMAF::HEVC_PPS; - valueMap[MP4VR::AudioSpecificConfig] = VCD::OMAF::AudioSpecificConfig; + VCD::MP4::VarLenArray* Infos = new VCD::MP4::VarLenArray; - MP4VR::DynArray *Infos = new MP4VR::DynArray; + int32_t ret = pReader->GetCodecSpecInfo(trackId, sampleId, *Infos); - int32_t ret = pReader->getDecoderConfiguration( trackId, sampleId, *Infos ); + for (uint32_t i = 0; i < (*Infos).size; i++) { + DecoderSpecificInfo info; + info = (*Infos)[i]; - for(uint32_t i=0; i<(*Infos).size; i++){ - DecoderSpecificInfo info; - info.decodeSpecInfoType = valueMap[(*Infos)[i].decSpecInfoType]; - for(uint32_t j=0; j<(*Infos)[i].decSpecInfoData.size; j++) - { - //info.decSpecInfoData[j] = Infos[i].decSpecInfoData[j]; - info.decodeSpecInfoData.push_back((*Infos)[i].decSpecInfoData[j]); - } + decoderInfos.push_back(info); + } - decoderInfos.push_back(info); - } - - delete Infos; - return ret; + delete Infos; + return ret; } -int32_t OmafMP4VRReader::getTrackTimestamps(uint32_t trackId, std::vector& timestamps) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getTrackTimestamps(uint32_t trackId, + std::vector& timestamps) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - MP4VR::DynArray id_pairs; + VCD::MP4::VarLenArray id_pairs; - int32_t ret = pReader->getTrackTimestamps( trackId, id_pairs ); + int32_t ret = pReader->GetTrackTStamps(trackId, id_pairs); - for(uint32_t i=0; i& timestamps) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getTimestampsOfSample(uint32_t trackId, uint32_t sampleId, + std::vector& timestamps) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - MP4VR::DynArray tms; + VCD::MP4::VarLenArray tms; - int32_t ret = pReader->getTimestampsOfSample( trackId, sampleId, tms ); + int32_t ret = pReader->GetSampTStamps(trackId, sampleId, tms); - for(uint32_t i=0; i& sampleDecodingOrder) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getSamplesInDecodingOrder(uint32_t trackId, + std::vector& sampleDecodingOrder) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - MP4VR::DynArray id_pairs; + VCD::MP4::VarLenArray id_pairs; - int32_t ret = pReader->getSamplesInDecodingOrder( trackId, id_pairs ); + int32_t ret = pReader->GetSampInDecSeq(trackId, id_pairs); - for(uint32_t i=0; igetDecoderCodeType(trackId, sampleId, cc); + int32_t ret = pReader->GetDecoderCodeType(trackId, sampleId, cc); - ConvertFourCC(decoderCodeType, cc); + decoderCodeType = cc; - return ret; + return ret; } -int32_t OmafMP4VRReader::getSampleDuration(uint32_t trackId, uint32_t sampleId, uint32_t& sampleDuration) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getSampleDuration(uint32_t trackId, uint32_t sampleId, uint32_t& sampleDuration) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->getSampleDuration(trackId, sampleId, sampleDuration); + return pReader->GetDurOfSamp(trackId, sampleId, sampleDuration); } -int32_t OmafMP4VRReader::getPropertyChnl(uint32_t trackId, uint32_t sampleId, VCD::OMAF::chnlProperty& chProperty) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - MP4VR::chnlProperty chProp; - - int32_t ret = pReader->getPropertyChnl(trackId, sampleId, chProp); - - chProperty.channelNumber = chProp.channelCount; - chProperty.definedLayout = chProp.definedLayout; - chProperty.objectNumber = chProp.objectCount; - chProperty.omittedChannelsMap = chProp.omittedChannelsMap; - chProperty.streamStruct = chProp.streamStructure; +int32_t OmafMP4VRReader::getPropertyChnl(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::chnlProperty& chProperty) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - for(uint32_t i=0; iGetAudioChnlProp(trackId, sampleId, chProp); + chProperty = chProp; - return ret; + return ret; } -int32_t OmafMP4VRReader::getPropertySpatialAudio(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SpatialAudioProperty& spatialaudioproperty) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - MP4VR::SpatialAudioProperty spProp; +int32_t OmafMP4VRReader::getPropertySpatialAudio(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::SpatialAudioProperty& spatialaudioproperty) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - int32_t ret = pReader->getPropertySpatialAudio(trackId, sampleId, spProp); + VCD::MP4::SpatialAudioProperty spProp; - spatialaudioproperty.ambisonicChannelOrder = spProp.ambisonicChannelOrdering; - spatialaudioproperty.ambisonicNorm = spProp.ambisonicNormalization; - spatialaudioproperty.ambisonicOrder = spProp.ambisonicOrder; - spatialaudioproperty.ambisonicType = spProp.ambisonicType; - spatialaudioproperty.version = spProp.version; + int32_t ret = pReader->GetSpatAudioProp(trackId, sampleId, spProp); + spatialaudioproperty = spProp; - for(uint32_t i=0; i valueMap; - valueMap[MP4VR::StereoScopic3DProperty::MONOSCOPIC] = VCD::OMAF::StereoScopic3DProperty::MONO; - valueMap[MP4VR::StereoScopic3DProperty::STEREOSCOPIC_TOP_BOTTOM] = VCD::OMAF::StereoScopic3DProperty::STEREO_TOP_BOTTOM; - valueMap[MP4VR::StereoScopic3DProperty::STEREOSCOPIC_LEFT_RIGHT] = VCD::OMAF::StereoScopic3DProperty::STEREO_LEFT_RIGHT; - valueMap[MP4VR::StereoScopic3DProperty::STEREOSCOPIC_STEREO_CUSTOM] = VCD::OMAF::StereoScopic3DProperty::STEREO_STEREO; + int32_t ret = pReader->GetSteScop3DProp(trackId, sampleId, ssProp); + stereoscopicproperty = ssProp; - int32_t ret = pReader->getPropertyStereoScopic3D(trackId, sampleId, ssProp); - - stereoscopicproperty = valueMap[ssProp]; - - return ret; + return ret; } -int32_t OmafMP4VRReader::getPropertySphericalVideoV1(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SphericalVideoV1Property& sphericalproperty) const -{ - return 0; +int32_t OmafMP4VRReader::getPropertySphericalVideoV1(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::SphericalVideoV1Property& sphericalproperty) const { + return 0; } -int32_t OmafMP4VRReader::getPropertySphericalVideoV2(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SphericalVideoV2Property& sphericalproperty) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - MP4VR::SphericalVideoV2Property sv2Prop; +int32_t OmafMP4VRReader::getPropertySphericalVideoV2(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::SphericalVideoV2Property& sphericalproperty) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - int32_t ret = pReader->getPropertySphericalVideoV2(trackId, sampleId, sv2Prop); + VCD::MP4::SphericalVideoV2Property sv2Prop; - sphericalproperty.pose.pitchFP = sv2Prop.pose.pitchFP; - sphericalproperty.pose.rollFP = sv2Prop.pose.rollFP; - sphericalproperty.pose.yawFP = sv2Prop.pose.yawFP; + int32_t ret = pReader->GetSpheV2Prop(trackId, sampleId, sv2Prop); - sphericalproperty.projection.cubemap.layout = sv2Prop.projection.cubemap.layout; - sphericalproperty.projection.cubemap.padding = sv2Prop.projection.cubemap.padding; + sphericalproperty = sv2Prop; - sphericalproperty.projection.equirectangular.bottomFP = sv2Prop.projection.equirectangular.boundsBottomFP; - sphericalproperty.projection.equirectangular.leftFP = sv2Prop.projection.equirectangular.boundsLeftFP; - sphericalproperty.projection.equirectangular.rightFP = sv2Prop.projection.equirectangular.boundsRightFP; - sphericalproperty.projection.equirectangular.topFP = sv2Prop.projection.equirectangular.boundsTopFP; + return ret; +} - std::map valueMap; - valueMap[MP4VR::ProjectionType::UNKOWN] = VCD::OMAF::ProjectionType::UNKOWN; - valueMap[MP4VR::ProjectionType::CUBEMAP] = VCD::OMAF::ProjectionType::CUBEMAP; - valueMap[MP4VR::ProjectionType::EQUIRECTANGULAR] = VCD::OMAF::ProjectionType::EQUIRECTANGULAR; - valueMap[MP4VR::ProjectionType::MESH] = VCD::OMAF::ProjectionType::MESH; +int32_t OmafMP4VRReader::getPropertyRegionWisePacking(uint32_t trackId, uint32_t sampleId, + RegionWisePacking* rwpk) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - sphericalproperty.projectionType = valueMap[sv2Prop.projectionType]; + VCD::MP4::RWPKProperty rwpkProp; - return ret; -} + int32_t ret = pReader->GetRWPKProp(trackId, sampleId, rwpkProp); -int32_t OmafMP4VRReader::getPropertyRegionWisePacking(uint32_t trackId, uint32_t sampleId, RegionWisePacking *rwpk) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; + rwpk->constituentPicMatching = rwpkProp.constituentPictureMatching; + rwpk->packedPicHeight = rwpkProp.packedPicHeight; + rwpk->packedPicWidth = rwpkProp.packedPicWidth; + rwpk->projPicHeight = rwpkProp.projPicHeight; + rwpk->projPicWidth = rwpkProp.projPicWidth; - MP4VR::RegionWisePackingProperty rwpkProp; + rwpk->numRegions = rwpkProp.regions.size; - int32_t ret = pReader->getPropertyRegionWisePacking(trackId, sampleId, rwpkProp); + rwpk->rectRegionPacking = new RectangularRegionWisePacking[rwpk->numRegions]; - rwpk->constituentPicMatching = rwpkProp.constituentPictureMatchingFlag; - rwpk->packedPicHeight = rwpkProp.packedPictureHeight; - rwpk->packedPicWidth = rwpkProp.packedPictureWidth; - rwpk->projPicHeight = rwpkProp.projPictureHeight; - rwpk->projPicWidth = rwpkProp.projPictureWidth; + for (uint32_t i = 0; i < rwpkProp.regions.size; i++) { + rwpk->rectRegionPacking[i].guardBandFlag = rwpkProp.regions[i].guardBandFlag; + rwpk->rectRegionPacking[i].transformType = rwpkProp.regions[i].region.rectReg.transformType; + rwpk->rectRegionPacking[i].bottomGbHeight = rwpkProp.regions[i].region.rectReg.bottomGbHeight; + rwpk->rectRegionPacking[i].gbNotUsedForPredFlag = rwpkProp.regions[i].region.rectReg.gbNotUsedForPredFlag; + rwpk->rectRegionPacking[i].gbType0 = rwpkProp.regions[i].region.rectReg.gbType0; + rwpk->rectRegionPacking[i].gbType1 = rwpkProp.regions[i].region.rectReg.gbType1; + rwpk->rectRegionPacking[i].gbType2 = rwpkProp.regions[i].region.rectReg.gbType2; + rwpk->rectRegionPacking[i].gbType3 = rwpkProp.regions[i].region.rectReg.gbType3; + rwpk->rectRegionPacking[i].leftGbWidth = rwpkProp.regions[i].region.rectReg.leftGbWidth; + rwpk->rectRegionPacking[i].packedRegHeight = rwpkProp.regions[i].region.rectReg.packedRegHeight; + rwpk->rectRegionPacking[i].packedRegLeft = rwpkProp.regions[i].region.rectReg.packedRegLeft; + rwpk->rectRegionPacking[i].packedRegTop = rwpkProp.regions[i].region.rectReg.packedRegTop; + rwpk->rectRegionPacking[i].packedRegWidth = rwpkProp.regions[i].region.rectReg.packedRegWidth; + rwpk->rectRegionPacking[i].projRegHeight = rwpkProp.regions[i].region.rectReg.projRegHeight; + rwpk->rectRegionPacking[i].projRegLeft = rwpkProp.regions[i].region.rectReg.projRegLeft; + rwpk->rectRegionPacking[i].projRegTop = rwpkProp.regions[i].region.rectReg.projRegTop; + rwpk->rectRegionPacking[i].projRegWidth = rwpkProp.regions[i].region.rectReg.projRegWidth; + rwpk->rectRegionPacking[i].rightGbWidth = rwpkProp.regions[i].region.rectReg.rightGbWidth; + rwpk->rectRegionPacking[i].topGbHeight = rwpkProp.regions[i].region.rectReg.topGbHeight; + } - rwpk->numRegions = rwpkProp.regions.size; - - rwpk->rectRegionPacking = new RectangularRegionWisePacking[rwpk->numRegions]; + return ret; +} +int32_t OmafMP4VRReader::getPropertyCoverageInformation(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::CoverageInformationProperty& coviProperty) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - for(uint32_t i = 0; i < rwpkProp.regions.size; i++){ + VCD::MP4::COVIInformation ccProp; - rwpk->rectRegionPacking[i].guardBandFlag = rwpkProp.regions[i].guardBandFlag; - rwpk->rectRegionPacking[i].transformType = (uint8_t)RegionWisePackingType::RECTANGULAR; - rwpk->rectRegionPacking[i].bottomGbHeight = rwpkProp.regions[i].region.rectangular.bottomGbHeight; - rwpk->rectRegionPacking[i].gbNotUsedForPredFlag = rwpkProp.regions[i].region.rectangular.gbNotUsedForPredFlag; - rwpk->rectRegionPacking[i].gbType0 = rwpkProp.regions[i].region.rectangular.gbType0; - rwpk->rectRegionPacking[i].gbType1 = rwpkProp.regions[i].region.rectangular.gbType1; - rwpk->rectRegionPacking[i].gbType2 = rwpkProp.regions[i].region.rectangular.gbType2; - rwpk->rectRegionPacking[i].gbType3 = rwpkProp.regions[i].region.rectangular.gbType3; - rwpk->rectRegionPacking[i].leftGbWidth = rwpkProp.regions[i].region.rectangular.leftGbWidth; - rwpk->rectRegionPacking[i].packedRegHeight = rwpkProp.regions[i].region.rectangular.packedRegHeight; - rwpk->rectRegionPacking[i].packedRegLeft = rwpkProp.regions[i].region.rectangular.packedRegLeft; - rwpk->rectRegionPacking[i].packedRegTop = rwpkProp.regions[i].region.rectangular.packedRegTop; - rwpk->rectRegionPacking[i].packedRegWidth = rwpkProp.regions[i].region.rectangular.packedRegWidth; - rwpk->rectRegionPacking[i].projRegHeight = rwpkProp.regions[i].region.rectangular.projRegHeight; - rwpk->rectRegionPacking[i].projRegLeft = rwpkProp.regions[i].region.rectangular.projRegLeft; - rwpk->rectRegionPacking[i].projRegTop = rwpkProp.regions[i].region.rectangular.projRegTop; - rwpk->rectRegionPacking[i].projRegWidth = rwpkProp.regions[i].region.rectangular.projRegWidth; - rwpk->rectRegionPacking[i].rightGbWidth = rwpkProp.regions[i].region.rectangular.rightGbWidth; - rwpk->rectRegionPacking[i].topGbHeight = rwpkProp.regions[i].region.rectangular.topGbHeight; - } + int32_t ret = pReader->GetCOVIInfoProp(trackId, sampleId, ccProp); - return ret; -} -int32_t OmafMP4VRReader::getPropertyCoverageInformation(uint32_t trackId, uint32_t sampleId, VCD::OMAF::CoverageInformationProperty& coviProperty) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - MP4VR::CoverageInformationProperty ccProp; - - int32_t ret = pReader->getPropertyCoverageInformation(trackId, sampleId, ccProp); - - std::map valueMap_CS; - valueMap_CS[MP4VR::CoverageShapeType::FOUR_GREAT_CIRCLES] = VCD::OMAF::CoverageShapeType::FOUR_CIRCLES; - valueMap_CS[MP4VR::CoverageShapeType::TWO_AZIMUTH_AND_TWO_ELEVATION_CIRCLES] = VCD::OMAF::CoverageShapeType::TWO_AZIMUTH_TWO_ELEVATION_CIRCLES; - - std::map valueMap; - valueMap[MP4VR::ViewIdc::MONOSCOPIC] = VCD::OMAF::ViewIdc::MONOSCOPIC; - valueMap[MP4VR::ViewIdc::LEFT] = VCD::OMAF::ViewIdc::LEFT; - valueMap[MP4VR::ViewIdc::RIGHT] = VCD::OMAF::ViewIdc::RIGHT; - valueMap[MP4VR::ViewIdc::LEFT_AND_RIGHT] = VCD::OMAF::ViewIdc::LEFT_AND_RIGHT; - valueMap[MP4VR::ViewIdc::INVALID] = VCD::OMAF::ViewIdc::INVALID; - - coviProperty.covShapeType = valueMap_CS[ccProp.coverageShapeType]; - coviProperty.viewIdc = valueMap[ccProp.defaultViewIdc]; - coviProperty.viewIdcPresenceFlag = ccProp.viewIdcPresenceFlag; - - for(uint32_t i=0; igetPropertyProjectionFormat(trackId, sampleId, pfProp); + int32_t ret = pReader->GetProjFrmtProp(trackId, sampleId, pfProp); - std::map valueMap; - valueMap[MP4VR::EQUIRECTANGULAR] = VCD::OMAF::EQUIRECTANGULAR; - valueMap[MP4VR::CUBEMAP] = VCD::OMAF::CUBEMAP; + projectionFormatProperty.format = pfProp.format; - projectionFormatProperty.format = valueMap[pfProp.format]; - - return ret; + return ret; } -int32_t OmafMP4VRReader::getPropertySchemeTypes(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SchemeTypesProperty& schemeTypesProperty) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - MP4VR::SchemeTypesProperty stProp; +int32_t OmafMP4VRReader::getPropertySchemeTypes(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::SchemeTypesProperty& schemeTypesProperty) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - int32_t ret = pReader->getPropertySchemeTypes(trackId, sampleId, stProp); + VCD::MP4::SchemeTypesProperty stProp; - ConvertFourCC(schemeTypesProperty.mainScheme.type, stProp.mainScheme.type); - schemeTypesProperty.mainScheme.version = stProp.mainScheme.version; - for(uint32_t i=0; iGetScheTypesProp(trackId, sampleId, stProp); - for(uint32_t j=0; j valueMap; - valueMap[MP4VR::TOP_BOTTOM_PACKING] = VCD::OMAF::TOP_BOTTOM_PACKING; - valueMap[MP4VR::SIDE_BY_SIDE_PACKING] = VCD::OMAF::SIDE_BY_SIDE_PACKING; - valueMap[MP4VR::TEMPORAL_INTERLEAVING] = VCD::OMAF::TEMPORAL_INTERLEAVING; - valueMap[MP4VR::MONOSCOPIC] = VCD::OMAF::MONOSCOPIC; + VCD::MP4::VideoFramePackingType psConf; - int32_t ret = pReader->getPropertyStereoVideoConfiguration(trackId, sampleId, psConf); + int32_t ret = pReader->GetStereVideoProp(trackId, sampleId, psConf); - stereoVideoProperty = valueMap[psConf]; + stereoVideoProperty = psConf; - return ret; + return ret; } -int32_t OmafMP4VRReader::getPropertyRotation(uint32_t trackId, uint32_t sampleId, VCD::OMAF::Rotation& rotationProperty) const -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::getPropertyRotation(uint32_t trackId, uint32_t sampleId, + VCD::OMAF::Rotation& rotationProperty) const { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - MP4VR::Rotation rot; + VCD::MP4::Rotation rot; - int32_t ret = pReader->getPropertyRotation(trackId, sampleId, rot); + int32_t ret = pReader->GetRotateProp(trackId, sampleId, rot); - rotationProperty.pitch = rot.pitch; - rotationProperty.roll = rot.roll; - rotationProperty.yaw = rot.yaw; + rotationProperty = rot; - - return ret; + return ret; } +int32_t OmafMP4VRReader::parseInitializationSegment(OmafSegment* streamInterface, uint32_t initSegmentId) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; -int32_t OmafMP4VRReader::parseInitializationSegment(OmafSegment* streamInterface, uint32_t initSegmentId) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - - SegmentStream *segment = new SegmentStream(streamInterface); - if (NULL == segment) return ERROR_NULL_PTR; + SegmentStream* segment = new SegmentStream(streamInterface); + if (nullptr == segment) return ERROR_NULL_PTR; - //return pReader->parseInitializationSegment(new SegmentStream(streamInterface), initSegmentId); - return pReader->parseInitializationSegment(segment, initSegmentId); + return pReader->ParseInitSeg(segment, initSegmentId); } -int32_t OmafMP4VRReader::invalidateInitializationSegment(uint32_t initSegmentId) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::invalidateInitializationSegment(uint32_t initSegmentId) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - return pReader->invalidateInitializationSegment(initSegmentId); + return pReader->DisableInitSeg(initSegmentId); } -int32_t OmafMP4VRReader::parseSegment( OmafSegment* streamInterface, - uint32_t initSegmentId, - uint32_t segmentId, - uint64_t earliestPTSinTS ) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; +int32_t OmafMP4VRReader::parseSegment(OmafSegment* streamInterface, uint32_t initSegmentId, uint32_t segmentId, + uint64_t earliestPTSinTS) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; - SegmentStream *segment = new SegmentStream(streamInterface); - if (NULL == segment) return ERROR_NULL_PTR; + SegmentStream* segment = new SegmentStream(streamInterface); + if (nullptr == segment) return ERROR_NULL_PTR; - return pReader->parseSegment(segment, initSegmentId, segmentId, earliestPTSinTS); + return pReader->ParseSeg(segment, initSegmentId, segmentId, earliestPTSinTS); } -int32_t OmafMP4VRReader::invalidateSegment(uint32_t initSegmentId, uint32_t segmentId) -{ - if(NULL == mMP4ReaderImpl) return ERROR_NULL_PTR; - MP4VR::MP4VRFileReaderInterface* pReader = (MP4VR::MP4VRFileReaderInterface*)mMP4ReaderImpl; - return pReader->invalidateSegment(initSegmentId, segmentId); +int32_t OmafMP4VRReader::invalidateSegment(uint32_t initSegmentId, uint32_t segmentId) { + if (nullptr == mMP4ReaderImpl) return ERROR_NULL_PTR; + VCD::MP4::Mp4Reader* pReader = (VCD::MP4::Mp4Reader*)mMP4ReaderImpl; + return pReader->DisableSeg(initSegmentId, segmentId); } VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafMP4VRReader.h b/src/OmafDashAccess/OmafMP4VRReader.h index 6173771f..f467f47e 100644 --- a/src/OmafDashAccess/OmafMP4VRReader.h +++ b/src/OmafDashAccess/OmafMP4VRReader.h @@ -41,6 +41,8 @@ VCD_OMAF_BEGIN class OmafMP4VRReader : public OmafReader{ public: OmafMP4VRReader(); + OmafMP4VRReader(OmafMP4VRReader&& other); + OmafMP4VRReader& operator=(const OmafMP4VRReader&) = default; virtual ~OmafMP4VRReader(); public: @@ -76,7 +78,9 @@ class OmafMP4VRReader : public OmafReader{ virtual int32_t getHeight(uint32_t trackId, uint32_t sampleId, uint32_t& height) const ; - virtual int32_t getPlaybackDurationInSecs(uint32_t trackId, double& durationInSecs) const ; + virtual int32_t getDims(uint32_t trackId, uint32_t sampleId, uint32_t& width, uint32_t& height) const; + + virtual int32_t getPlaybackDurationInSecs(uint32_t trackId, double& durationInSecs) const; virtual int32_t getTrackSampleListByType(uint32_t trackId, VCD::OMAF::TrackSampleType sampleType, std::vector& sampleIds) const ; diff --git a/src/OmafDashAccess/OmafMPDParser.cpp b/src/OmafDashAccess/OmafMPDParser.cpp index 74696687..166b0f14 100644 --- a/src/OmafDashAccess/OmafMPDParser.cpp +++ b/src/OmafDashAccess/OmafMPDParser.cpp @@ -27,221 +27,284 @@ */ #include "OmafMPDParser.h" -#include "OmafExtractor.h" #include +#include "OmafExtractor.h" VCD_OMAF_BEGIN -OmafMPDParser::OmafMPDParser() -{ - mParser = nullptr; - this->mMpd = NULL; - this->mMPDURL = ""; - this->mLock = new ThreadLock(); - mMPDInfo = nullptr; - mPF = PF_UNKNOWN; +OmafMPDParser::OmafMPDParser() { + mParser = nullptr; + this->mMpd = nullptr; + this->mMPDURL = ""; + this->mCacheDir = ""; + // this->mLock = new ThreadLock(); + mMPDInfo = nullptr; + mPF = PF_UNKNOWN; + this->mTmpAS = nullptr; + this->mTmpStream = nullptr; + this->mQualityRanksNum = 0; } -OmafMPDParser::~OmafMPDParser() -{ - SAFE_DELETE(mParser); - //SAFE_DELETE(mMpd); - SAFE_DELETE(mLock); +OmafMPDParser::~OmafMPDParser() { + SAFE_DELETE(mParser); + mTwoDQualityInfos.clear(); + // SAFE_DELETE(mMpd); + // SAFE_DELETE(mLock); } -int OmafMPDParser::ParseMPD( std::string mpd_file, OMAFSTREAMS& listStream ) -{ - int ret = ERROR_NONE; - - if(nullptr == mParser) - mParser = new OmafXMLParser(); +int OmafMPDParser::ParseMPD(std::string mpd_file, OMAFSTREAMS& listStream) { + int ret = ERROR_NONE; - mLock->lock(); + if (nullptr == mParser) { + mParser = new OmafXMLParser(); + mParser->SetOmafHttpParams(omaf_dash_params_.http_proxy_, omaf_dash_params_.http_params_); + } - mMPDURL = mpd_file; + // mLock->lock(); + std::lock_guard lock(mLock); - ODStatus st = mParser->Generate(const_cast(mMPDURL.c_str())); - if(st != OD_STATUS_SUCCESS) - { - mLock->unlock(); - LOG(INFO)<<"failed to parse MPD file."<GetGeneratedMPD(); + OMAF_LOG(LOG_INFO, "To parse the mpd file: %s\n", mMPDURL.c_str()); - if(NULL == mMpd){ - mLock->unlock(); - return ERROR_PARSE; - } + ODStatus st = mParser->Generate(const_cast(mMPDURL.c_str()), mCacheDir); + if (st != OD_STATUS_SUCCESS) { + // mLock->unlock(); + OMAF_LOG(LOG_ERROR, "Failed to load MPD file: %s\n", mpd_file.c_str()); + return st; + } - ret = ParseMPDInfo(); - - if(ret != ERROR_NONE) { - mLock->unlock(); - return ret; - } - - ret = ParseStreams(listStream); - - if(ret != ERROR_NONE) { - mLock->unlock(); - return ret; - } - - mLock->unlock(); + mMpd = mParser->GetGeneratedMPD(); + if (nullptr == mMpd) { + OMAF_LOG(LOG_ERROR, "Failed to get the generated mpd!\n"); + return ERROR_PARSE; + } + ret = ParseMPDInfo(); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to parse MPD file: %s\n", mpd_file.c_str()); return ret; -} + } -int OmafMPDParser::ParseMPDInfo() -{ - mMPDInfo = new MPDInfo; - if(!mMPDInfo) - return ERROR_NULL_PTR; - - auto baseUrl = mMpd->GetBaseUrls().back(); - mMPDInfo->mpdPathBaseUrl = baseUrl->GetPath(); - mMPDInfo->profiles = mMpd->GetProfiles(); - mMPDInfo->type = mMpd->GetType(); - - mMPDInfo->media_presentation_duration = parse_duration( mMpd->GetMediaPresentationDuration().c_str() ); - mMPDInfo->availabilityStartTime = parse_date ( mMpd->GetAvailabilityStartTime().c_str() ); - mMPDInfo->availabilityEndTime = parse_date ( mMpd->GetAvailabilityEndTime().c_str() ); - mMPDInfo->max_segment_duration = parse_duration ( mMpd->GetMaxSegmentDuration().c_str() ); - mMPDInfo->min_buffer_time = parse_duration ( mMpd->GetMinBufferTime().c_str() ); - mMPDInfo->minimum_update_period = parse_duration ( mMpd->GetMinimumUpdatePeriod().c_str() ); - mMPDInfo->suggested_presentation_delay = parse_int ( mMpd->GetSuggestedPresentationDelay().c_str() ); - mMPDInfo->time_shift_buffer_depth = parse_duration ( mMpd->GetTimeShiftBufferDepth().c_str() ); - - mBaseUrls = mMpd->GetBaseUrls(); - // Get all base urls except the last one - for(uint32_t i = 0; i < mBaseUrls.size() - 1 ; i++) - { - mMPDInfo->baseURL.push_back(mBaseUrls[i]->GetPath()); - } + ret = ParseStreams(listStream); + if (ret != ERROR_NONE) { + if (ret != OMAF_INVALID_EXTRACTOR_ENABLEMENT) + OMAF_LOG(LOG_ERROR, "Failed to parse media streams from MPD file: %s\n", mpd_file.c_str()); + return ret; + } - mPF = mMpd->GetProjectionFormat(); + // mLock->unlock(); - return ERROR_NONE; + return ERROR_NONE; } -int OmafMPDParser::UpdateMPD(OMAFSTREAMS& listStream) -{ - return ParseMPD(this->mMPDURL, listStream); +int OmafMPDParser::ParseMPDInfo() { + mMPDInfo = new MPDInfo; + if (!mMPDInfo) return ERROR_NULL_PTR; + + auto baseUrl = mMpd->GetBaseUrls().back(); + mMPDInfo->mpdPathBaseUrl = baseUrl->GetPath(); + mMPDInfo->profiles = mMpd->GetProfiles(); + mMPDInfo->type = mMpd->GetType(); + + if (!mMpd->GetMediaPresentationDuration().empty()) { + mMPDInfo->media_presentation_duration = parse_duration(mMpd->GetMediaPresentationDuration().c_str()); + } + + if (!mMpd->GetAvailabilityStartTime().empty()) { + mMPDInfo->availabilityStartTime = parse_date(mMpd->GetAvailabilityStartTime().c_str()); + } + if (!mMpd->GetAvailabilityEndTime().empty()) { + mMPDInfo->availabilityEndTime = parse_date(mMpd->GetAvailabilityEndTime().c_str()); + } + if (!mMpd->GetMaxSegmentDuration().empty()) { + mMPDInfo->max_segment_duration = parse_duration(mMpd->GetMaxSegmentDuration().c_str()); + } + if (!mMpd->GetMinBufferTime().empty()) { + mMPDInfo->min_buffer_time = parse_duration(mMpd->GetMinBufferTime().c_str()); + } + if (!mMpd->GetMinimumUpdatePeriod().empty()) { + mMPDInfo->minimum_update_period = parse_duration(mMpd->GetMinimumUpdatePeriod().c_str()); + } + if (!mMpd->GetSuggestedPresentationDelay().empty()) { + mMPDInfo->suggested_presentation_delay = parse_int(mMpd->GetSuggestedPresentationDelay().c_str()); + } + if (!mMpd->GetTimeShiftBufferDepth().empty()) { + mMPDInfo->time_shift_buffer_depth = parse_duration(mMpd->GetTimeShiftBufferDepth().c_str()); + } + + mBaseUrls = mMpd->GetBaseUrls(); + // Get all base urls except the last one + for (uint32_t i = 0; i < mBaseUrls.size() - 1; i++) { + mMPDInfo->baseURL.push_back(mBaseUrls[i]->GetPath()); + } + + mPF = mMpd->GetProjectionFormat(); + + return ERROR_NONE; } -MPDInfo* OmafMPDParser::GetMPDInfo() -{ - return this->mMPDInfo; -} +int OmafMPDParser::UpdateMPD(OMAFSTREAMS& listStream) { return ParseMPD(this->mMPDURL, listStream); } + +MPDInfo* OmafMPDParser::GetMPDInfo() { return this->mMPDInfo; } //! //! \brief construct media streams. //! -int OmafMPDParser::ParseStreams( OMAFSTREAMS& listStream ) -{ - int ret = ERROR_NONE; +int OmafMPDParser::ParseStreams(OMAFSTREAMS& listStream) { + int ret = ERROR_NONE; - std::vector Periods = mMpd->GetPeriods(); - if(Periods.size() == 0) - return ERROR_NO_VALUE; + std::vector Periods = mMpd->GetPeriods(); + if (Periods.size() == 0) return ERROR_NO_VALUE; - //processing only the first period; - PeriodElement *pPeroid = Periods[0]; + // processing only the first period; + PeriodElement* pPeroid = Periods[0]; - TYPE_OMAFADAPTATIONSETS adapt_sets; + TYPE_OMAFADAPTATIONSETS adapt_sets; - ret = GroupAdaptationSet( pPeroid, adapt_sets ); + ret = GroupAdaptationSet(pPeroid, adapt_sets); - ret = BuildStreams( adapt_sets, listStream ); + ret = BuildStreams(adapt_sets, listStream); - return ret; + return ret; } -int OmafMPDParser::GroupAdaptationSet(PeriodElement* pPeriod, TYPE_OMAFADAPTATIONSETS& mapAdaptationSets ) -{ - ADAPTATIONSETS AdaptationSets = pPeriod->GetAdaptationSets(); +int OmafMPDParser::GroupAdaptationSet(PeriodElement* pPeriod, TYPE_OMAFADAPTATIONSETS& mapAdaptationSets) { + ADAPTATIONSETS AdaptationSets = pPeriod->GetAdaptationSets(); - /// so far, we supposed that there will be only one viewpoint in the mpd, - /// so all Adaptation sets are belong to the same audio-visual content. - /// FIXIT, if there are multiple viewpoints. - for(auto it = AdaptationSets.begin(); it != AdaptationSets.end(); it++ ){ - AdaptationSetElement *pAS = (AdaptationSetElement*) (*it); - OmafAdaptationSet* pOmafAS = CreateAdaptationSet(pAS); + /// so far, we supposed that there will be only one viewpoint in the mpd, + /// so all Adaptation sets are belong to the same audio-visual content. + /// FIXIT, if there are multiple viewpoints. + for (auto it = AdaptationSets.begin(); it != AdaptationSets.end(); it++) { + AdaptationSetElement* pAS = (AdaptationSetElement*)(*it); + mTmpAS = CreateAdaptationSet(pAS, mPF); + if (mTmpAS == NULL) return ERROR_INVALID; + /// catalog the Adaptation according to the media type: video, audio, etc + std::string type = GetSubstr(mTmpAS->GetMimeType(), '/', true); + OMAF_LOG(LOG_INFO, "Create one AS with type %s\n", type.c_str()); - /// catalog the Adaptation according to the media type: video, audio, etc - std::string type = GetSubstr(pOmafAS->GetMimeType(), '/', true); - - mapAdaptationSets[type].push_back(pOmafAS); - } + mapAdaptationSets[type].push_back(mTmpAS); + } - return ERROR_NONE; + return ERROR_NONE; } -int OmafMPDParser::BuildStreams( TYPE_OMAFADAPTATIONSETS mapAdaptationSets, OMAFSTREAMS& listStream ) -{ - int ret = ERROR_NONE; - for(auto it = mapAdaptationSets.begin(); it != mapAdaptationSets.end(); it++){ - OMAFADAPTATIONSETS ASs = it->second; - std::string type = it->first; - - OmafMediaStream* pStream = new OmafMediaStream(); - auto mainASit = ASs.begin(); - - for(auto as_it = ASs.begin(); as_it != ASs.end(); as_it++){ - OmafAdaptationSet* pOmafAs = (OmafAdaptationSet*)(*as_it); - pOmafAs->SetBaseURL(mBaseUrls); - if( typeid(*(pOmafAs->GetClassType()) ) == typeid( OmafExtractor ) ){ - OmafExtractor *tmpOmafAs = (OmafExtractor*)pOmafAs; - pStream->AddExtractor(tmpOmafAs); - pStream->SetExtratorAdaptationSet(tmpOmafAs); - }else{ - pStream->AddAdaptationSet(pOmafAs); - if(pOmafAs->IsMain()) - { - pOmafAs->SetProjectionFormat(mPF); - pStream->SetMainAdaptationSet(pOmafAs); - mainASit = as_it; - } +int OmafMPDParser::BuildStreams(TYPE_OMAFADAPTATIONSETS mapAdaptationSets, OMAFSTREAMS& listStream) { + int ret = ERROR_NONE; + uint32_t allExtractorCnt = 0; + uint32_t videoStrNum = 0; + std::set allVideoQualities; + std::map streamsMap; + for (auto it = mapAdaptationSets.begin(); it != mapAdaptationSets.end(); it++) { + OMAFADAPTATIONSETS ASs = it->second; + std::vector::iterator mainASit; + std::string type = it->first; + mTmpStream = new OmafMediaStream(); + if (mTmpStream == NULL) return ERROR_INVALID; + if (strncmp(type.c_str(), "video", 5) == 0) + { + mainASit = ASs.begin(); + videoStrNum++; + } + + for (auto as_it = ASs.begin(); as_it != ASs.end(); as_it++) { + OmafAdaptationSet* pOmafAs = (OmafAdaptationSet*)(*as_it); + pOmafAs->SetBaseURL(mBaseUrls); + if (typeid(*pOmafAs) == typeid(OmafExtractor)) { + if (mExtractorEnabled) { + OmafExtractor* tmpOmafAs = (OmafExtractor*)pOmafAs; + mTmpStream->AddExtractor(tmpOmafAs); + mTmpStream->SetExtratorAdaptationSet(tmpOmafAs); + } + } else { + mTmpStream->AddAdaptationSet(pOmafAs); + if (strncmp(type.c_str(), "video", 5) == 0) + { + if (pOmafAs->IsMain()) { + pOmafAs->SetProjectionFormat(mPF); + mTmpStream->SetMainAdaptationSet(pOmafAs); + mainASit = as_it; + pOmafAs->SetTwoDQualityInfos(); + mTwoDQualityInfos = pOmafAs->GetTwoDQualityInfos(); + } else { + QualityRank oneQuality = pOmafAs->GetRepresentationQualityRanking(); + allVideoQualities.insert(oneQuality); + } + } + else if (strncmp(type.c_str(), "audio", 5) == 0) + { + if (as_it == ASs.begin()) + { + mTmpStream->SetMainAdaptationSet(pOmafAs); + mainASit = as_it; } } + } + } - pStream->InitStream(type); + std::map extractors = mTmpStream->GetExtractors(); + if (extractors.size()) { + allExtractorCnt++; + } - // remove main AS from AdaptationSets for it has no real data + // remove main AS from AdaptationSets for it has no real data + if (strncmp(type.c_str(), "video", 5) == 0) + { ASs.erase(mainASit); - - listStream.push_back(pStream); } - return ret; + streamsMap.insert(std::make_pair(type, mTmpStream)); + } + + mQualityRanksNum = allVideoQualities.size(); + OMAF_LOG(LOG_INFO, "allExtractorCnt %u\n", allExtractorCnt); + OMAF_LOG(LOG_INFO, "video streams num %u\n", videoStrNum); + OMAF_LOG(LOG_INFO, "video quality ranks num %u\n", mQualityRanksNum); + //if (allExtractorCnt < mapAdaptationSets.size()) { + if (allExtractorCnt < videoStrNum) { + if (mExtractorEnabled) { + OMAF_LOG(LOG_INFO, "There isn't extractor track from MPD parsing, extractor track enablement should be false !\n"); + mExtractorEnabled = false; + ret = OMAF_INVALID_EXTRACTOR_ENABLEMENT; + } + } + std::map::iterator itStream; + for (itStream = streamsMap.begin(); itStream != streamsMap.end(); itStream++) { + std::string type = itStream->first; + OmafMediaStream* stream = itStream->second; + if (strncmp(type.c_str(), "video", 5) == 0) + { + stream->SetEnabledExtractor(mExtractorEnabled); + } + else + { + stream->SetEnabledExtractor(false); + } + stream->InitStream(type); + listStream.push_back(stream); + } + return ret; } -OmafAdaptationSet* OmafMPDParser::CreateAdaptationSet(AdaptationSetElement* pAS) -{ - if( ExtractorJudgement(pAS) ){ - return new OmafExtractor(pAS); - } - return new OmafAdaptationSet(pAS); +OmafAdaptationSet* OmafMPDParser::CreateAdaptationSet(AdaptationSetElement* pAS, ProjectionFormat pf) { + if (ExtractorJudgement(pAS)) { + return new OmafExtractor(pAS, pf, true); + } + return new OmafAdaptationSet(pAS, pf, false); } -bool OmafMPDParser::ExtractorJudgement(AdaptationSetElement* pAS) -{ - PreselValue *sel = pAS->GetPreselection(); - if(sel) - return true; +bool OmafMPDParser::ExtractorJudgement(AdaptationSetElement* pAS) { + PreselValue* sel = pAS->GetPreselection(); + if (sel) return true; - ///FIXME, if @DependencyID has multiple dependency ID, then set it as extractor. - std::vector depIDs = pAS->GetRepresentations()[0]->GetDependencyIDs(); - if( depIDs.size() > 0 ) - { - return true; - } - return false; + /// FIXME, if @DependencyID has multiple dependency ID, then set it as extractor. + std::vector depIDs = pAS->GetRepresentations()[0]->GetDependencyIDs(); + if (depIDs.size() > 0) { + return true; + } + return false; } VCD_OMAF_END - diff --git a/src/OmafDashAccess/OmafMPDParser.h b/src/OmafDashAccess/OmafMPDParser.h index dff8090d..eb39a7fa 100644 --- a/src/OmafDashAccess/OmafMPDParser.h +++ b/src/OmafDashAccess/OmafMPDParser.h @@ -32,108 +32,133 @@ //! on May 22, 2019, 4:09 PM //! - #ifndef OMAFMPDPARSER_H #define OMAFMPDPARSER_H -#include "general.h" -#include "OmafMediaStream.h" #include "OmafDashParser/OmafXMLParser.h" +#include "OmafMediaStream.h" +#include "general.h" + +#include using namespace VCD::OMAF; using namespace VCD::VRVideo; VCD_OMAF_BEGIN -typedef enum{ - MPD_NONE = 0, - MPD_STATIC, - MPD_DYNAMIC, -}MPD_TYPE; +typedef enum { + MPD_NONE = 0, + MPD_STATIC, + MPD_DYNAMIC, +} MPD_TYPE; -typedef std::vector ADAPTATIONSETS; -typedef std::vector OMAFSTREAMS; -typedef std::vector OMAFADAPTATIONSETS; -typedef std::map TYPE_OMAFADAPTATIONSETS; +typedef std::vector ADAPTATIONSETS; +typedef std::vector OMAFSTREAMS; +typedef std::vector OMAFADAPTATIONSETS; +typedef std::map TYPE_OMAFADAPTATIONSETS; //! //! \class: OmafMPDParser //! \brief: the parser for MPD file using libdash //! class OmafMPDParser { -public: - //! - //! \brief construct - //! - OmafMPDParser(); - - //! - //! \brief de-construct - //! - virtual ~OmafMPDParser(); - -public: - //! - //! \brief parse MPD and get construct media streams - //! - int ParseMPD( std::string mpd_file, OMAFSTREAMS& listStream ); - - //! - //! \brief update MPD and get construct media streams for live if needed. - //! - int UpdateMPD(OMAFSTREAMS& listStream); - - //! - //! \brief Get MPD information. - //! - MPDInfo* GetMPDInfo(); - -private: - - //! - //! \brief construct media streams. - //! - int ParseStreams( OMAFSTREAMS& listStream ); - - //! - //! \brief Parse MPD information - //! - int ParseMPDInfo(); - - //! - //! \brief group all adaptationSet based on the dependency. - //! - int GroupAdaptationSet(PeriodElement* pPeriod, TYPE_OMAFADAPTATIONSETS& mapAdaptationSets ); - - //! - //! \brief build up OmafMediaStreams based on the grouped AdaptationSets. - //! - int BuildStreams( TYPE_OMAFADAPTATIONSETS mapAdaptationSets, OMAFSTREAMS& listStream ); - - //! - //! \brief Create OmafAdaptationSet based on libDash AdaptationSetElement. - //! \param [in] pAS AdaptationSetElement - //! \return - //! - OmafAdaptationSet* CreateAdaptationSet(AdaptationSetElement* pAS); - - //! - //! \brief Judge the type of the AdaptationSet. - //! - bool ExtractorJudgement(AdaptationSetElement* pAS); - + public: + //! + //! \brief construct + //! + OmafMPDParser(); + + //! + //! \brief de-construct + //! + virtual ~OmafMPDParser(); + + public: + //! + //! \brief parse MPD and get construct media streams + //! + int ParseMPD(std::string mpd_file, OMAFSTREAMS& listStream); + + //! + //! \brief update MPD and get construct media streams for live if needed. + //! + int UpdateMPD(OMAFSTREAMS& listStream); + + //! + //! \brief Get MPD information. + //! + MPDInfo* GetMPDInfo(); + + //! + //! \brief Set cache dir. + //! + void SetCacheDir(string cache_dir) { mCacheDir = cache_dir; }; + + void SetExtractorEnabled(bool isExtractorEnabled) { mExtractorEnabled = isExtractorEnabled; }; + + bool GetExtractorEnabled() { return mExtractorEnabled; }; + void SetOmafDashParams(const OmafDashParams& params) { omaf_dash_params_ = params; } + ProjectionFormat GetProjectionFmt() { return mPF; }; + + uint32_t GetVideoQualityRanksNum() { return mQualityRanksNum; }; + + std::map GetTwoDQualityInfos() { return mTwoDQualityInfos; }; + + private: + //! + //! \brief construct media streams. + //! + int ParseStreams(OMAFSTREAMS& listStream); + + //! + //! \brief Parse MPD information + //! + int ParseMPDInfo(); + + //! + //! \brief group all adaptationSet based on the dependency. + //! + int GroupAdaptationSet(PeriodElement* pPeriod, TYPE_OMAFADAPTATIONSETS& mapAdaptationSets); + + //! + //! \brief build up OmafMediaStreams based on the grouped AdaptationSets. + //! + int BuildStreams(TYPE_OMAFADAPTATIONSETS mapAdaptationSets, OMAFSTREAMS& listStream); + + //! + //! \brief Create OmafAdaptationSet based on libDash AdaptationSetElement. + //! \param [in] pAS AdaptationSetElement + //! \return + //! + OmafAdaptationSet* CreateAdaptationSet(AdaptationSetElement* pAS, ProjectionFormat pf); + + //! + //! \brief Judge the type of the AdaptationSet. + //! + bool ExtractorJudgement(AdaptationSetElement* pAS); private: - OmafXMLParser *mParser; - MPDElement *mMpd; //!< the PTR for libdash MPD - std::string mMPDURL; //!< url of MPD - ThreadLock* mLock; //!< for synchronization - MPDInfo *mMPDInfo; //!< the information of MPD - std::vector mBaseUrls; - ProjectionFormat mPF; //!< the projection format of the video content + OmafMPDParser& operator=(const OmafMPDParser& other) { return *this; }; + OmafMPDParser(const OmafMPDParser& other) { /* do not create copies */ }; + + private: + OmafXMLParser* mParser = nullptr; + MPDElement* mMpd = nullptr; //!< the PTR for libdash MPD + std::string mMPDURL; //!< url of MPD + // ThreadLock* mLock; + std::mutex mLock; + MPDInfo* mMPDInfo; //!< the information of MPD + std::vector mBaseUrls; + ProjectionFormat mPF; //!< the projection format of the video content + std::string mCacheDir; //!< cache directory + bool mExtractorEnabled = false; //!< if extractor track is enabled + OmafDashParams omaf_dash_params_; + OmafAdaptationSet *mTmpAS; + OmafMediaStream* mTmpStream; + uint32_t mQualityRanksNum; + std::map mTwoDQualityInfos; }; VCD_OMAF_END; #endif /* MPDPARSER_H */ - diff --git a/src/OmafDashAccess/OmafMediaSource.h b/src/OmafDashAccess/OmafMediaSource.h index f6db3dba..e7434a79 100644 --- a/src/OmafDashAccess/OmafMediaSource.h +++ b/src/OmafDashAccess/OmafMediaSource.h @@ -37,198 +37,213 @@ #ifndef _MEDIASOURCE_H #define _MEDIASOURCE_H -#include "general.h" #include "OmafMediaStream.h" +#include "OmafTypes.h" +#include "general.h" VCD_OMAF_BEGIN +typedef enum { + STATUS_CREATED = 0, + STATUS_READY, + STATUS_RUNNING, + STATUS_EXITING, + STATUS_STOPPED, + STATUS_UNKNOWN, +} DASH_STATUS; + class OmafMediaSource { -public: - //! - //! \brief construct - //! - OmafMediaSource() - { - mViewPortChanged = false; - memset(&mHeadSetInfo, 0, sizeof(mHeadSetInfo)); - memset(&mPose, 0, sizeof(mPose)); - mLoop = false; - mEOS = false; - }; - - //! - //! \brief de-construct - //! - virtual ~OmafMediaSource(){}; - -public: - //! - //! \brief Open Media from special url. it's pure interface - //! - //! \param [in] url - //! the location of the mpd to be opened - //! \param [in] cacheDir - //! path to cache files it can be "" for no cache needed - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int OpenMedia(std::string url, std::string cacheDir, bool enablePredictor=false) = 0; - - //! - //! \brief Close the media. it's pure interface - //! - //! \return - //! loop status - //! - virtual int CloseMedia() = 0; - - //! - //! \brief Open Media from special url. it's pure interface - //! - //! \param [in] streamID - //! the ID of the stream to be operated - //! \param [out] pkt - //! Packet to hold the media stream - //! \param [out] clearBuf - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int GetPacket( int streamID, std::list* pkts, bool needParams, bool clearBuf ) = 0; - - //! - //! \brief Open Media from special url. it's pure interface - //! - //! \param [in] media_info - //! - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int GetMediaInfo( DashMediaInfo* media_info ) = 0; - - //! - //! \brief Open Media from special url. - //! - //! \param [in] streamID - //! - //! - //! \return - //! MediaStream* the pointer to the stream - //! - virtual OmafMediaStream* GetStream(int streamID){ - if( (uint32_t)(streamID) >= mMapStream.size() || streamID < 0 ) - return NULL; - std::map::iterator it = mMapStream.find( streamID ); - if (it != mMapStream.end()) - { - return it->second; - } - return NULL; - }; - - //! - //! \brief Get the stream number of the media - //! - //! \return - //! the total count of the stream in the media - //! - virtual int GetStreamCount(){ return mMapStream.size(); }; - - //! - //! \brief set initial viewport for the stream. it's pure interface - //! - //! \param [in] clientInfo - //! - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int SetupHeadSetInfo(HeadSetInfo* clientInfo) = 0; - - //! - //! \brief Change the viewport for the media. it's pure interface - //! - //! \param [in] pose - //! - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int ChangeViewport(HeadPose* pose) = 0; - - - //! - //! \brief Get statistic information relative to the media. it's pure interface - //! \param [out] info - //! the information of statistic, such as bandwidth - //! \return - //! the total count of the stream in the media - //! - virtual int GetStatistic(DashStatisticInfo* dsInfo) = 0; - - //! - //! \brief seek to special position of the media in VOD mode - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int SeekTo( int64_t time ){ return 0; }; - - //! - //! \brief Get total track count of the media - //! - //! \return - //! EOF status - //! - virtual int GetTrackCount() = 0; - - //! - //! \brief access the media with a loop or non-loop mode in VOD mode - //! - //! \return - //! ERROR_NONE if success, else fail reason - //! - virtual int SetLoop(bool bLoop){ - mLoop = bLoop; - return 0; + public: + //! + //! \brief construct + //! + OmafMediaSource() { + mViewPortChanged = false; + memset(&mHeadSetInfo, 0, sizeof(mHeadSetInfo)); + memset(&mPose, 0, sizeof(mPose)); + mLoop = false; + mEOS = false; + }; + + //! + //! \brief de-construct + //! + virtual ~OmafMediaSource(){}; + + public: + //! + //! \brief Open Media from special url. it's pure interface + //! + //! \param [in] url + //! the location of the mpd to be opened + //! \param [in] cacheDir + //! path to cache files it can be "" for no cache needed + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int OpenMedia(std::string url, std::string cacheDir, void* externalLog, PluginDef i360scvp_plugin, bool enableExtractor, bool enablePredictor = false, + std::string predictPluginName = "", std::string dllPath = "") = 0; + + virtual int StartStreaming() = 0; + //! + //! \brief Close the media. it's pure interface + //! + //! \return + //! loop status + //! + virtual int CloseMedia() = 0; + + //! + //! \brief Open Media from special url. it's pure interface + //! + //! \param [in] streamID + //! the ID of the stream to be operated + //! \param [out] pkt + //! Packet to hold the media stream + //! \param [out] clearBuf + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int GetPacket(int streamID, std::list* pkts, bool needParams, bool clearBuf) = 0; + + //! + //! \brief Open Media from special url. it's pure interface + //! + //! \param [in] media_info + //! + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int GetMediaInfo(DashMediaInfo* media_info) = 0; + + //! + //! \brief Open Media from special url. + //! + //! \param [in] streamID + //! + //! + //! \return + //! MediaStream* the pointer to the stream + //! + virtual OmafMediaStream* GetStream(int streamID) { + if ((uint32_t)(streamID) >= mMapStream.size() || streamID < 0) return nullptr; + std::map::iterator it = mMapStream.find(streamID); + if (it != mMapStream.end()) { + return it->second; } - - //! - //! \brief Check whether it is loop mode or not - //! - //! \return - //! loop status - //! - virtual bool IsLoop(){ - return mLoop; + return nullptr; + }; + + //! + //! \brief Get the stream number of the media + //! + //! \return + //! the total count of the stream in the media + //! + virtual int GetStreamCount() { return mMapStream.size(); }; + + //! + //! \brief set initial viewport for the stream. it's pure interface + //! + //! \param [in] clientInfo + //! + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int SetupHeadSetInfo(HeadSetInfo* clientInfo) = 0; + + //! + //! \brief Change the viewport for the media. it's pure interface + //! + //! \param [in] pose + //! + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int ChangeViewport(HeadPose* pose) = 0; + + //! + //! \brief Get statistic information relative to the media. it's pure interface + //! \param [out] info + //! the information of statistic, such as bandwidth + //! \return + //! the total count of the stream in the media + //! + virtual int GetStatistic(DashStatisticInfo* dsInfo) = 0; + + //! + //! \brief seek to special position of the media in VOD mode + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int SeekTo(int64_t time) // need to implement in later version + { + if (time < 0) { + return ERROR_INVALID; } - - //! - //! \brief Check whether it is End of stream - //! - //! \return - //! EOF status - //! - bool isEOS(){return mEOS;}; - - virtual int SelectSpecialSegments(int extractorTrackIdx) = 0; - -protected: - std::string mUrl; //!< the url of the media - std::string mCacheDir; //!< the path for cached files - std::map mMapStream; //!< map for streams in the media - bool mLoop; //!< loop status - bool mEOS; //!< EOS status - std::vector mViewPorts; //!< - HeadSetInfo mHeadSetInfo; //!< - HeadPose mPose; //!< - bool mViewPortChanged; //!< - + return ERROR_NONE; + }; + + //! + //! \brief Get total track count of the media + //! + //! \return + //! EOF status + //! + virtual int GetTrackCount() = 0; + + //! + //! \brief access the media with a loop or non-loop mode in VOD mode + //! + //! \return + //! ERROR_NONE if success, else fail reason + //! + virtual int SetLoop(bool bLoop) { + mLoop = bLoop; + return 0; + } + + //! + //! \brief Check whether it is loop mode or not + //! + //! \return + //! loop status + //! + virtual bool IsLoop() { return mLoop; } + + //! + //! \brief Check whether it is End of stream + //! + //! \return + //! EOF status + //! + bool isEOS() { return mEOS; }; + + virtual int SelectSpecialSegments(int extractorTrackIdx) = 0; + + public: + void SetOmafDashParams(OmafDashParams params) { omaf_dash_params_ = params; }; + OmafDashParams GetOmafParams() { return omaf_dash_params_; }; + + protected: + OmafDashParams omaf_dash_params_; + std::string mUrl; //!< the url of the media + std::string mCacheDir; //!< the path for cached files + std::map mMapStream; //!< map for streams in the media + bool mLoop; //!< loop status + bool mEOS; //!< EOS status + std::vector mViewPorts; //!< + HeadSetInfo mHeadSetInfo; //!< + HeadPose mPose; //!< + bool mViewPortChanged; //!< }; VCD_OMAF_END; #endif /* MEDIASOURCE_H */ - diff --git a/src/OmafDashAccess/OmafMediaStream.cpp b/src/OmafDashAccess/OmafMediaStream.cpp index 5c519f87..6cfc9cde 100644 --- a/src/OmafDashAccess/OmafMediaStream.cpp +++ b/src/OmafDashAccess/OmafMediaStream.cpp @@ -25,347 +25,1162 @@ */ +#include "OmafDashRangeSync.h" +#include "OmafReaderManager.h" +#include "OmafTileTracksSelector.h" #include "OmafMediaStream.h" +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include "../trace/MtHQ_tp.h" +#endif +#endif VCD_OMAF_BEGIN -OmafMediaStream::OmafMediaStream() -{ - //mCurrentExtractor = NULL; - mMainAdaptationSet = NULL; - mExtratorAdaptationSet = NULL; - m_pStreamInfo = NULL; - m_bEOS = false; - mStreamID = 0; - pthread_mutex_init(&mMutex, NULL); - pthread_mutex_init(&mCurrentMutex, NULL); +OmafMediaStream::OmafMediaStream() { + mMainAdaptationSet = NULL; + mExtratorAdaptationSet = NULL; + m_pStreamInfo = NULL; + m_bEOS = false; + mStreamID = 0; + m_hasTileTracksSelected = false; + m_stitchThread = 0; + m_enabledExtractor = true; + m_stitch = NULL; + m_needParams = false; + m_currFrameIdx = 0; + m_status = STATUS_UNKNOWN; + m_activeSegmentNum = 0; + m_tileSelTimeLine = 0; } -OmafMediaStream::~OmafMediaStream() -{ - SAFE_FREE(m_pStreamInfo); +OmafMediaStream::~OmafMediaStream() { + SAFE_DELETE(m_pStreamInfo->codec); + SAFE_DELETE(m_pStreamInfo->mime_type); + if (m_pStreamInfo->stream_type == MediaType_Video) + { + SAFE_DELETE(m_pStreamInfo->source_resolution); SAFE_FREE(mMainAdaptationSet); - if(mMediaAdaptationSet.size()) - { - for(auto &it: mMediaAdaptationSet) - { - SAFE_DELETE(it.second); - mMediaAdaptationSet.erase(it.first); - } - mMediaAdaptationSet.clear(); + } + SAFE_FREE(m_pStreamInfo); + std::map>::iterator itSel; + for (itSel = m_selectedTileTracks.begin(); itSel != m_selectedTileTracks.end(); ) + { + (itSel->second).clear(); + m_selectedTileTracks.erase(itSel++); + } + m_selectedTileTracks.clear(); + if (mMediaAdaptationSet.size()) { + for (auto& it : mMediaAdaptationSet) { + SAFE_DELETE(it.second); + mMediaAdaptationSet.erase(it.first); } - if(mExtractors.size()) - { - for(auto &it: mExtractors) - { - SAFE_DELETE(it.second); - mExtractors.erase(it.first); + mMediaAdaptationSet.clear(); + } + if (mExtractors.size()) { + for (auto& it : mExtractors) { + SAFE_DELETE(it.second); + mExtractors.erase(it.first); + } + mExtractors.clear(); + } + if (m_stitchThread) { + pthread_join(m_stitchThread, NULL); + m_stitchThread = 0; + } + + if (m_mergedPackets.size()) { + std::list>::iterator it; + for (it = m_mergedPackets.begin(); it != m_mergedPackets.end();) { + std::list packets = *it; + if (packets.size()) { + std::list::iterator itPacket; + for (itPacket = packets.begin(); itPacket != packets.end();) { + MediaPacket* packet = *itPacket; + SAFE_DELETE(packet); + packets.erase(itPacket++); } - mExtractors.clear(); + packets.clear(); + } + m_mergedPackets.erase(it++); } - pthread_mutex_destroy( &mMutex ); - pthread_mutex_destroy( &mCurrentMutex ); + m_mergedPackets.clear(); + } + SAFE_DELETE(m_stitch); + m_sources.clear(); } -int OmafMediaStream::AddExtractor(OmafExtractor* pAS) -{ - if( NULL != pAS ) mExtractors[pAS->GetID()] = pAS; +void OmafMediaStream::SetOmafReaderMgr(std::shared_ptr mgr) noexcept { + omaf_reader_mgr_ = std::move(mgr); - return ERROR_NONE; + std::lock_guard lock(mMutex); + for (auto it = mMediaAdaptationSet.begin(); it != mMediaAdaptationSet.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + pAS->SetOmafReaderMgr(omaf_reader_mgr_); + } + + if (m_enabledExtractor) { + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->SetOmafReaderMgr(omaf_reader_mgr_); + } + } } -int OmafMediaStream::AddAdaptationSet(OmafAdaptationSet* pAS) -{ - if( NULL != pAS ) mMediaAdaptationSet[pAS->GetID()] = pAS; - return ERROR_NONE; +void OmafMediaStream::Close() { + if (m_status != STATUS_STOPPED) { + m_status = STATUS_STOPPED; + if (m_stitchThread) { + pthread_join(m_stitchThread, NULL); + m_stitchThread = 0; + } + } } -int OmafMediaStream::InitStream(std::string type) -{ - if(NULL== m_pStreamInfo ) - m_pStreamInfo = (DashStreamInfo*)malloc(sizeof(DashStreamInfo)); +int OmafMediaStream::AddExtractor(OmafExtractor* pAS) { + if (NULL != pAS) mExtractors[pAS->GetID()] = pAS; - if (NULL == m_pStreamInfo) - { - return ERROR_NULL_PTR; - } + return ERROR_NONE; +} - if( type == "video") - { - m_pStreamInfo->stream_type = MediaType_Video; - } - else if( type == "audio") - { - m_pStreamInfo->stream_type = MediaType_Audio; - } - else - { - return ERROR_INVALID; - } +int OmafMediaStream::AddAdaptationSet(OmafAdaptationSet* pAS) { + if (NULL != pAS) { + mMediaAdaptationSet[pAS->GetID()] = pAS; + } + return ERROR_NONE; +} - UpdateStreamInfo(); +int OmafMediaStream::InitStream(std::string type) { + if (NULL == m_pStreamInfo) m_pStreamInfo = (DashStreamInfo*)malloc(sizeof(DashStreamInfo)); - SetupExtratorDependency(); + if (NULL == m_pStreamInfo) { + return ERROR_NULL_PTR; + } - return ERROR_NONE; -} + if (type == "video") { + m_pStreamInfo->stream_type = MediaType_Video; + } else if (type == "audio") { + m_pStreamInfo->stream_type = MediaType_Audio; + } else { + return ERROR_INVALID; + } + if (type == "video") + { + if (!m_enabledExtractor && !m_stitch) { + m_stitch = new OmafTilesStitch(); + if (!m_stitch) return OMAF_ERROR_NULL_PTR; + } + } + UpdateStreamInfo(); -void OmafMediaStream::UpdateStreamInfo() -{ - if(!mMediaAdaptationSet.size()) return; - - if(NULL != mMainAdaptationSet && NULL != mExtratorAdaptationSet){ - VideoInfo vi = mMainAdaptationSet->GetVideoInfo(); - AudioInfo ai = mMainAdaptationSet->GetAudioInfo(); - - m_pStreamInfo->bit_rate = vi.bit_rate; - - m_pStreamInfo->framerate_den = vi.frame_Rate.den; - m_pStreamInfo->framerate_num = vi.frame_Rate.num; - m_pStreamInfo->height = mExtratorAdaptationSet->GetVideoInfo().height; - m_pStreamInfo->width = mExtratorAdaptationSet->GetVideoInfo().width; - m_pStreamInfo->mime_type = mMainAdaptationSet->GetMimeType().c_str(); - m_pStreamInfo->codec = mMainAdaptationSet->GetCodec()[0].c_str(); - m_pStreamInfo->mFpt = (int32_t)mMainAdaptationSet->GetFramePackingType(); - m_pStreamInfo->mProjFormat = (int32_t)mMainAdaptationSet->GetProjectionFormat(); - m_pStreamInfo->segmentDuration = mMainAdaptationSet->GetSegmentDuration(); - - m_pStreamInfo->channel_bytes = ai.channel_bytes; - m_pStreamInfo->channels = ai.channels; - m_pStreamInfo->sample_rate = ai.sample_rate; - - int sourceNumber = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos.size(); - m_pStreamInfo->source_number = sourceNumber; - m_pStreamInfo->source_resolution = new SourceResolution[sourceNumber]; - for (int i=0;isource_resolution[i].qualityRanking = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[i].quality_ranking; - m_pStreamInfo->source_resolution[i].width = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[i].orig_width; - m_pStreamInfo->source_resolution[i].height = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[i].orig_height; - } - std::map::iterator itAS; - for (itAS = mMediaAdaptationSet.begin(); - itAS != mMediaAdaptationSet.end(); itAS++) - { - if (mMainAdaptationSet == (itAS->second)) - break; - } - if (itAS != mMediaAdaptationSet.end()) - { - mMediaAdaptationSet.erase(itAS); + if (type == "video") + { + SetupExtratorDependency(); + + if (!m_enabledExtractor) { + int32_t ret = StartTilesStitching(); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to start tiles stitching !\n"); + return ret; } + } + } + + return ERROR_NONE; +} + +OMAF_STATUS OmafMediaStream::UpdateStreamInfo() { + if (!mMediaAdaptationSet.size()) return OMAF_ERROR_INVALID_DATA; + + if (m_enabledExtractor) { + if (NULL != mMainAdaptationSet && NULL != mExtratorAdaptationSet) { + VideoInfo vi = mMainAdaptationSet->GetVideoInfo(); + AudioInfo ai = mMainAdaptationSet->GetAudioInfo(); + + m_pStreamInfo->bit_rate = vi.bit_rate; + + m_pStreamInfo->framerate_den = vi.frame_Rate.den; + m_pStreamInfo->framerate_num = vi.frame_Rate.num; + m_pStreamInfo->width = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[0].orig_width; + m_pStreamInfo->height = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[0].orig_height; + m_pStreamInfo->mime_type = new char[1024]; + m_pStreamInfo->codec = new char[1024]; + memcpy_s(const_cast(m_pStreamInfo->mime_type), 1024, mMainAdaptationSet->GetMimeType().c_str(), 1024); + memcpy_s(const_cast(m_pStreamInfo->codec), 1024, mMainAdaptationSet->GetCodec()[0].c_str(), 1024); + m_pStreamInfo->mFpt = (int32_t)mMainAdaptationSet->GetFramePackingType(); + m_pStreamInfo->mProjFormat = (int32_t)mMainAdaptationSet->GetProjectionFormat(); + m_pStreamInfo->segmentDuration = mMainAdaptationSet->GetSegmentDuration(); + + m_pStreamInfo->channel_bytes = ai.channel_bytes; + m_pStreamInfo->channels = ai.channels; + m_pStreamInfo->sample_rate = ai.sample_rate; + + int sourceNumber = mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos.size(); + m_pStreamInfo->source_number = sourceNumber; + m_pStreamInfo->source_resolution = new SourceResolution[sourceNumber]; + for (int i = 0; i < sourceNumber; i++) { + m_pStreamInfo->source_resolution[i].qualityRanking = static_cast( + mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[i].quality_ranking); + m_pStreamInfo->source_resolution[i].width = + mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[i].orig_width; + m_pStreamInfo->source_resolution[i].height = + mExtratorAdaptationSet->GetQualityRanking()->srqr_quality_infos[i].orig_height; + } + std::map::iterator itAS; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) { + if (mMainAdaptationSet == (itAS->second)) break; + } + if (itAS != mMediaAdaptationSet.end()) { + mMediaAdaptationSet.erase(itAS); + } } + } else { + if ((m_pStreamInfo->stream_type == MediaType_Video) && (NULL != mMainAdaptationSet)) { + VideoInfo vi = mMainAdaptationSet->GetVideoInfo(); + //AudioInfo ai = mMainAdaptationSet->GetAudioInfo(); + + m_pStreamInfo->bit_rate = vi.bit_rate; + + m_pStreamInfo->framerate_den = vi.frame_Rate.den; + m_pStreamInfo->framerate_num = vi.frame_Rate.num; + m_pStreamInfo->height = vi.height; // mExtratorAdaptationSet->GetVideoInfo().height; + m_pStreamInfo->width = vi.width; // mExtratorAdaptationSet->GetVideoInfo().width; + m_pStreamInfo->mime_type = new char[1024]; + m_pStreamInfo->codec = new char[1024]; + memcpy_s(const_cast(m_pStreamInfo->mime_type), 1024, mMainAdaptationSet->GetMimeType().c_str(), 1024); + memcpy_s(const_cast(m_pStreamInfo->codec), 1024, mMainAdaptationSet->GetCodec()[0].c_str(), 1024); + m_pStreamInfo->mFpt = (int32_t)mMainAdaptationSet->GetFramePackingType(); + m_pStreamInfo->mProjFormat = (int32_t)mMainAdaptationSet->GetProjectionFormat(); + m_pStreamInfo->segmentDuration = mMainAdaptationSet->GetSegmentDuration(); + + //m_pStreamInfo->channel_bytes = ai.channel_bytes; + //m_pStreamInfo->channels = ai.channels; + //m_pStreamInfo->sample_rate = ai.sample_rate; + + std::set allQualities; + std::map::iterator itAS; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) { + if (mMainAdaptationSet == (itAS->second)) break; + } + if (itAS != mMediaAdaptationSet.end()) { + mMediaAdaptationSet.erase(itAS); + } + + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) { + OmafAdaptationSet* adaptationSet = itAS->second; + auto qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + allQualities.insert(qualityRanking); + } + std::set::reverse_iterator itQuality; + for (itQuality = allQualities.rbegin(); itQuality != allQualities.rend(); itQuality++) { + auto quality = *itQuality; + int32_t width = 0; + int32_t height = 0; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) { + OmafAdaptationSet* adaptationSet = itAS->second; + OmafSrd* srd = adaptationSet->GetSRD(); + int32_t tileWidth = srd->get_W(); + int32_t tileHeight = srd->get_H(); + int32_t tileLeft = srd->get_X(); + int32_t tileTop = srd->get_Y(); + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + if (qualityRanking == quality) { + if (tileTop == 0) { + width += tileWidth; + } + + if (tileLeft == 0) { + height += tileHeight; + } + } + } + + SourceInfo oneSrc; + oneSrc.qualityRanking = quality; + oneSrc.width = width; + oneSrc.height = height; + m_sources.insert(make_pair(quality, oneSrc)); + } + + int sourceNumber = m_sources.size(); + m_pStreamInfo->source_number = sourceNumber; + m_pStreamInfo->source_resolution = new SourceResolution[sourceNumber]; + std::map::iterator itSrc; + itSrc = m_sources.begin(); + for (int i = 0; ((i < sourceNumber) && (itSrc != m_sources.end())); i++) { + SourceInfo oneSrc = itSrc->second; - uint32_t rowNum = 0, colNum = 0; - for(auto &it:mMediaAdaptationSet) + m_pStreamInfo->source_resolution[i].qualityRanking = oneSrc.qualityRanking; + m_pStreamInfo->source_resolution[i].width = oneSrc.width; + m_pStreamInfo->source_resolution[i].height = oneSrc.height; + itSrc++; + } + } + else if (m_pStreamInfo->stream_type == MediaType_Audio) { + AudioInfo ai = mMainAdaptationSet->GetAudioInfo(); + m_pStreamInfo->channel_bytes = ai.channel_bytes; + m_pStreamInfo->channels = ai.channels; + m_pStreamInfo->sample_rate = ai.sample_rate; + + std::map::iterator itAS; + itAS = mMediaAdaptationSet.begin(); + OmafAdaptationSet *as = itAS->second; + + m_pStreamInfo->mime_type = new char[1024]; + m_pStreamInfo->codec = new char[1024]; + memcpy_s(const_cast(m_pStreamInfo->mime_type), 1024, as->GetMimeType().c_str(), 1024); + memcpy_s(const_cast(m_pStreamInfo->codec), 1024, as->GetCodec()[0].c_str(), 1024); + m_pStreamInfo->segmentDuration = as->GetSegmentDuration(); + OMAF_LOG(LOG_INFO, "Audio mime type %s\n", m_pStreamInfo->mime_type); + OMAF_LOG(LOG_INFO, "Audio codec %s\n", m_pStreamInfo->codec); + OMAF_LOG(LOG_INFO, "Audio segment duration %ld\n", m_pStreamInfo->segmentDuration); + } + } + + if (m_pStreamInfo->stream_type == MediaType_Video) + { + uint32_t rowNum = 0, colNum = 0; + for (auto& it : mMediaAdaptationSet) { OmafAdaptationSet* as = it.second; - uint32_t qr = as->GetRepresentationQualityRanking(); + QualityRank qr = as->GetRepresentationQualityRanking(); // only calculate tile segmentation of the stream with highest resolution - if(qr == 1) - { - OmafSrd* srd = as->GetSRD(); - if(srd->get_X() == 0) - rowNum++; - if(srd->get_Y() == 0) - colNum++; + if (qr == HIGHEST_QUALITY_RANKING) { + OmafSrd* srd = as->GetSRD(); + if (srd->get_X() == 0) rowNum++; + if (srd->get_Y() == 0) colNum++; } - } - m_pStreamInfo->tileRowNum = rowNum; - m_pStreamInfo->tileColNum = colNum; + } + m_pStreamInfo->tileRowNum = rowNum; + m_pStreamInfo->tileColNum = colNum; + if (m_pStreamInfo->mProjFormat == VCD::OMAF::ProjectionFormat::PF_CUBEMAP) { + std::map::iterator itAS; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) { + OmafAdaptationSet* adaptationSet = itAS->second; + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + if (qualityRanking == HIGHEST_QUALITY_RANKING) { + OmafSrd* srd = adaptationSet->GetSRD(); + TileDef* oneTile = adaptationSet->GetTileInfo(); + if (!oneTile) { + OMAF_LOG(LOG_ERROR, "Un-matched projection format !\n"); + return OMAF_ERROR_INVALID_PROJECTIONTYPE; + } + int32_t globalX = oneTile->x; + int32_t globalY = oneTile->y; + int32_t faceWidth = m_pStreamInfo->width / 3; + int32_t faceHeight = m_pStreamInfo->height / 2; + int32_t faceColId = globalX / faceWidth; + int32_t faceRowId = globalY / faceHeight; + int32_t localX = globalX % faceWidth; + int32_t localY = globalY % faceHeight; + int32_t tileWidth = srd->get_W(); + int32_t tileHeight = srd->get_H(); + if (faceRowId == 0) { + if (faceColId == 0) { + oneTile->faceId = 2; + oneTile->x = localX; + oneTile->y = localY; + } else if (faceColId == 1) { + oneTile->faceId = 0; + oneTile->x = localX; + oneTile->y = localY; + } else if (faceColId == 2) { + oneTile->faceId = 3; + oneTile->x = localX; + oneTile->y = localY; + } + } else if (faceRowId == 1) { + if (faceColId == 0) { + oneTile->faceId = 5; + oneTile->y = localX; + oneTile->x = faceHeight - tileHeight - localY; + } else if (faceColId == 1) { + oneTile->faceId = 1; + oneTile->x = localX; + oneTile->y = localY; + } else if (faceColId == 2) { + oneTile->faceId = 4; + oneTile->y = faceWidth - tileWidth - localX; + oneTile->x = localY; + } + } + } + } + } + } + + return ERROR_NONE; } -void OmafMediaStream::SetupExtratorDependency() -{ - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - extractor->AddDependAS(pAS); - } +void OmafMediaStream::SetupExtratorDependency() { + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + for (auto it = mMediaAdaptationSet.begin(); it != mMediaAdaptationSet.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + extractor->AddDependAS(pAS); } + } } -int OmafMediaStream::UpdateStartNumber(uint64_t nAvailableStartTime) -{ - int ret = ERROR_NONE; - pthread_mutex_lock(&mMutex); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ +int OmafMediaStream::SetupSegmentSyncer(const OmafDashParams& params) { + OmafDashRangeSync::Ptr syncer; + OMAF_LOG(LOG_INFO, "Setup segment window syncer!\n"); + auto as = mMediaAdaptationSet.begin(); + if (as != mMediaAdaptationSet.end()) { + OMAF_LOG(LOG_INFO, "Create one dash window syncer!\n"); + syncer = make_omaf_syncer(*as->second, [this](SegmentSyncNode node) { + std::lock_guard lock(this->mMutex); + OMAF_LOG(LOG_INFO, "Syncer segment number to value=%lld\n", node.segment_value.number_); + for (auto it = this->mMediaAdaptationSet.begin(); it != this->mMediaAdaptationSet.end(); it++) { OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - pAS->UpdateStartNumberByTime(nAvailableStartTime); - } + pAS->UpdateSegmentNumber(node.segment_value.number_); + } - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ + for (auto extrator_it = this->mExtractors.begin(); extrator_it != this->mExtractors.end(); extrator_it++) { OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - extractor->UpdateStartNumberByTime(nAvailableStartTime); + extractor->UpdateSegmentNumber(node.segment_value.number_); + } + }); + } + + if (syncer) { + syncer_helper_.addSyncer(syncer); + + CurlParams curl_params; + curl_params.http_params_ = params.http_params_; + curl_params.http_proxy_ = params.http_proxy_; + syncer_helper_.start(curl_params); + } + + return ERROR_NONE; +} + +int OmafMediaStream::UpdateStartNumber(uint64_t nAvailableStartTime) { + int ret = ERROR_NONE; + + std::lock_guard lock(mMutex); + for (auto it = mMediaAdaptationSet.begin(); it != mMediaAdaptationSet.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + pAS->UpdateStartNumberByTime(nAvailableStartTime); + } + + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->UpdateStartNumberByTime(nAvailableStartTime); + } + return ret; +} + +int OmafMediaStream::DownloadInitSegment() { + std::lock_guard lock(mMutex); + for (auto it = mMediaAdaptationSet.begin(); it != mMediaAdaptationSet.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + pAS->DownloadInitializeSegment(); + } + + if (m_enabledExtractor) { + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->DownloadInitializeSegment(); } - pthread_mutex_unlock(&mMutex); - return ret; + } + + return ERROR_NONE; } -/* -int OmafMediaStream::LoadLocalInitSegment() -{ - int ret = ERROR_NONE; - pthread_mutex_lock(&mMutex); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - //pAS->DownloadInitializeSegment(); - pAS->LoadLocalInitSegment(); + +int OmafMediaStream::DownloadSegments() { + int ret = ERROR_NONE; + std::lock_guard lock(mMutex); + for (auto it = mMediaAdaptationSet.begin(); it != mMediaAdaptationSet.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + pAS->DownloadSegment(); + } + + // NOTE: this function should be in the same thread with UpdateEnabledExtractors + // , otherwise mCurrentExtractors need a mutex lock + // pthread_mutex_lock(&mCurrentMutex); + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->DownloadSegment(); + } + // pthread_mutex_unlock(&mCurrentMutex); + return ret; +} + +int OmafMediaStream::SeekTo(int seg_num) { + int ret = ERROR_NONE; + std::lock_guard lock(mMutex); + for (auto it = mMediaAdaptationSet.begin(); it != mMediaAdaptationSet.end(); it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); + pAS->SeekTo(seg_num); + } + + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->SeekTo(seg_num); + } + return ret; +} + +int OmafMediaStream::UpdateEnabledExtractors(std::list extractors) { + if (extractors.empty()) return ERROR_INVALID; + + int ret = ERROR_NONE; + + { + std::lock_guard lock(mMutex); + for (auto as_it1 = mMediaAdaptationSet.begin(); as_it1 != mMediaAdaptationSet.end(); as_it1++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it1->second); + pAS->Enable(false); + } + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->Enable(false); } - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - //extractor->DownloadInitializeSegment(); - extractor->LoadLocalInitSegment(); + { + std::lock_guard lock(mCurrentMutex); + mCurrentExtractors.clear(); + for (auto it = extractors.begin(); it != extractors.end(); it++) { + OmafExtractor* tmp = (OmafExtractor*)(*it); + tmp->Enable(true); + mCurrentExtractors.push_back(tmp); + std::map AS = tmp->GetDependAdaptationSets(); + for (auto as_it = AS.begin(); as_it != AS.end(); as_it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it->second); + pAS->Enable(true); + } + } } - pthread_mutex_unlock(&mMutex); - return ret; + } + + return ret; } -*/ -int OmafMediaStream::DownloadInitSegment() -{ - pthread_mutex_lock(&mMutex); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - pAS->DownloadInitializeSegment(); - } - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - extractor->DownloadInitializeSegment(); +int OmafMediaStream::EnableAllAudioTracks() { + int ret = ERROR_NONE; + + { + std::lock_guard lock(mMutex); + for (auto as_it1 = mMediaAdaptationSet.begin(); as_it1 != mMediaAdaptationSet.end(); as_it1++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it1->second); + pAS->Enable(true); } - pthread_mutex_unlock(&mMutex); - return ERROR_NONE; + } + + return ret; } -/* -int OmafMediaStream::LoadLocalSegments() -{ - int ret = ERROR_NONE; - pthread_mutex_lock(&mMutex); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - //pAS->DownloadSegment(); - pAS->LoadLocalSegment(); +int OmafMediaStream::UpdateEnabledTileTracks(std::map selectedTiles) { + if (selectedTiles.empty()) return ERROR_INVALID; + + int ret = ERROR_NONE; + + { + std::lock_guard lock(mMutex); + for (auto as_it1 = mMediaAdaptationSet.begin(); as_it1 != mMediaAdaptationSet.end(); as_it1++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it1->second); + pAS->Enable(false); + } + for (auto extrator_it = mExtractors.begin(); extrator_it != mExtractors.end(); extrator_it++) { + OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); + extractor->Enable(false); } - // NOTE: this function should be in the same thread with UpdateEnabledExtractors - // , otherwise mCurrentExtractors need a mutex lock - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - //extractor->DownloadSegment(); - extractor->LoadLocalSegment(); + { + std::lock_guard lock(mCurrentMutex); + //m_selectedTileTracks.clear(); + OMAF_LOG(LOG_INFO, "Will insert tiles selection for time line %ld\n", m_tileSelTimeLine); + std::map oneSelection; + for (auto itAS = selectedTiles.begin(); itAS != selectedTiles.end(); itAS++) { + OmafAdaptationSet* adaptationSet = itAS->second; + adaptationSet->Enable(true); + OMAF_LOG(LOG_INFO, "Insert track %d for time line %ld\n", itAS->first, m_tileSelTimeLine); + oneSelection.insert(make_pair(itAS->first, itAS->second)); + } + //m_selectedTileTracks.push_back(oneSelection); + m_selectedTileTracks.insert(make_pair(m_tileSelTimeLine, oneSelection)); + m_tileSelTimeLine++; + m_hasTileTracksSelected = true; } - pthread_mutex_unlock(&mMutex); - return ret; + } + + return ret; } -*/ -int OmafMediaStream::DownloadSegments() -{ - int ret = ERROR_NONE; - pthread_mutex_lock(&mMutex); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - pAS->DownloadSegment(); - } - // NOTE: this function should be in the same thread with UpdateEnabledExtractors - // , otherwise mCurrentExtractors need a mutex lock - //pthread_mutex_lock(&mCurrentMutex); - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - extractor->DownloadSegment(); +int OmafMediaStream::GetTrackCount() { + int tracksCnt = 0; + if (m_enabledExtractor) { + tracksCnt = this->mMediaAdaptationSet.size() + this->mExtractors.size(); + } else { + tracksCnt = this->mMediaAdaptationSet.size(); + } + return tracksCnt; +} + +int32_t OmafMediaStream::StartTilesStitching() { + int32_t ret = pthread_create(&m_stitchThread, NULL, TilesStitchingThread, this); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to create tiles stitching thread !\n"); + return OMAF_ERROR_CREATE_THREAD; + } + + return ERROR_NONE; +} + +void* OmafMediaStream::TilesStitchingThread(void* pThis) { + OmafMediaStream* pStream = (OmafMediaStream*)pThis; + + pStream->TilesStitching(); + + return NULL; +} + +static bool IsSelectionChanged(TracksMap selection1, TracksMap selection2) { + bool isChanged = false; + + if (selection1.size() && selection2.size()) { + if (selection1.size() != selection2.size()) { + isChanged = true; + } else { + std::map::iterator it1; + for (it1 = selection1.begin(); it1 != selection1.end(); it1++) { + OmafAdaptationSet* as1 = it1->second; + std::map::iterator it2; + for (it2 = selection2.begin(); it2 != selection2.end(); it2++) { + OmafAdaptationSet* as2 = it2->second; + if (as1 == as2) { + break; + } + } + if (it2 == selection2.end()) { + isChanged = true; + break; + } + } } - //pthread_mutex_unlock(&mCurrentMutex); - pthread_mutex_unlock(&mMutex); - return ret; + } + + return isChanged; } -int OmafMediaStream::SeekTo( int seg_num) -{ - int ret = ERROR_NONE; - pthread_mutex_lock(&mMutex); - for(auto it = mMediaAdaptationSet.begin(); - it != mMediaAdaptationSet.end(); - it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(it->second); - pAS->SeekTo(seg_num); +int32_t OmafMediaStream::TilesStitching() { + if (!m_stitch) { + OMAF_LOG(LOG_ERROR, "Tiles stitching handle hasn't been created !\n"); + return OMAF_ERROR_NULL_PTR; + } + int ret = ERROR_NONE; + bool selectedFlag = false; + uint32_t wait_time = 30000; + uint32_t current_wait_time = 0; + + do + { + { + std::lock_guard lock(mCurrentMutex); + selectedFlag = m_hasTileTracksSelected; + } + usleep(100); + current_wait_time++; + if (current_wait_time > wait_time) + { + OMAF_LOG(LOG_ERROR, "Time out for tile track select!\n"); + return ERROR_INVALID; } + }while (!selectedFlag); - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - extractor->SeekTo(seg_num); + uint64_t currFramePTS = 0; + uint64_t currSegTimeLine = 0; + std::map mapSelectedAS; + bool isEOS = false; + uint32_t waitTimes = 1000; + uint32_t waitTimes_GetPacket = 100; + uint32_t selectionWaitTimes = 10000; + bool prevPoseChanged = false; + std::map prevSelectedAS; + bool segmentEnded = false; + size_t samplesNumPerSeg = 0; + size_t aveSamplesNumPerSeg = 0; + bool skipFrames = false; + bool beginNewSeg = false; + while (!isEOS && m_status != STATUS_STOPPED) { + beginNewSeg = false; + + if (aveSamplesNumPerSeg && !skipFrames) + currFramePTS++; + + skipFrames = false; + if (aveSamplesNumPerSeg) + { + if ((currFramePTS / aveSamplesNumPerSeg + 1) > currSegTimeLine) + { + beginNewSeg = true; + } + currSegTimeLine = currFramePTS / aveSamplesNumPerSeg + 1; } - pthread_mutex_unlock(&mMutex); - return ret; -} + // begin to generate tiles merged media packets for each frame + OMAF_LOG(LOG_INFO, "Begin stitch frame %ld from segment %ld\n", currFramePTS, currSegTimeLine); + OMAF_LOG(LOG_INFO, "Begin new seg %d and samples num per seg %ld\n", beginNewSeg, samplesNumPerSeg); + uint32_t currWaitTimes = 0; + std::map updatedSelectedAS; + + if (prevSelectedAS.empty() || beginNewSeg) + { + if (prevSelectedAS.empty()) + { + while (currWaitTimes < selectionWaitTimes) + { + { + std::lock_guard lock(mCurrentMutex); + if (m_selectedTileTracks.size() >= 2) + break; + } + usleep(50); + currWaitTimes++; + } + if (currWaitTimes >= selectionWaitTimes) + { + OMAF_LOG(LOG_ERROR, "Wait too much time for tiles selection, timed out !\n"); + break; + } + currWaitTimes = 0; + + { + std::lock_guard lock(mCurrentMutex); -int OmafMediaStream::UpdateEnabledExtractors(std::list extractors) -{ - if( extractors.empty() ) return ERROR_INVALID; + //m_selectedTileTracks.pop_front(); //At the beginning, there are two same tiles selection in m_selectedTileTracks due to previous process in StartReadThread, so remove repeated one + updatedSelectedAS = m_selectedTileTracks[1]; //At the beginning, there are two same tiles selection in m_selectedTileTracks due to previous process in StartReadThread, so remove repeated one + } + currSegTimeLine = 1; + } + else + { + while (currWaitTimes < selectionWaitTimes) + { + { + std::lock_guard lock(mCurrentMutex); + std::map>::iterator it; + it = m_selectedTileTracks.find(currSegTimeLine); + if (it != m_selectedTileTracks.end()) + break; + } - int ret = ERROR_NONE; + usleep((m_pStreamInfo->segmentDuration * 1000000) / selectionWaitTimes); + currWaitTimes++; + } - pthread_mutex_lock(&mMutex); - for(auto as_it1 = mMediaAdaptationSet.begin(); as_it1 != mMediaAdaptationSet.end(); as_it1++){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it1->second); - pAS->Enable(false); + { + std::lock_guard lock(mCurrentMutex); + if (currWaitTimes < selectionWaitTimes) + { + updatedSelectedAS = m_selectedTileTracks[currSegTimeLine]; + } + else + { + if (m_status == STATUS_STOPPED) + { + OMAF_LOG(LOG_INFO, "Status Stopped !\n"); + break; + } + else + { + updatedSelectedAS = prevSelectedAS; + OMAF_LOG(LOG_WARNING, "Tile tracks selection result for current time line hasn't come, Still use previous selected AS !\n"); + } + } + } + currWaitTimes = 0; + } + mapSelectedAS = updatedSelectedAS; + OMAF_LOG(LOG_INFO, "For frame next to frame %ld, Use updated viewport !\n", currFramePTS); } - for(auto extrator_it = mExtractors.begin(); - extrator_it != mExtractors.end(); - extrator_it++ ){ - OmafExtractor* extractor = (OmafExtractor*)(extrator_it->second); - extractor->Enable(false); + else + { + mapSelectedAS = prevSelectedAS; + OMAF_LOG(LOG_INFO, "For frame next to frame %ld, Use last viewport !\n", currFramePTS); } - pthread_mutex_lock(&mCurrentMutex); - mCurrentExtractors.clear(); - for( auto it = extractors.begin(); it != extractors.end(); it++ ){ - OmafExtractor* tmp = (OmafExtractor*) (*it); - tmp->Enable(true); - mCurrentExtractors.push_back(tmp); - std::map AS = tmp->GetDependAdaptationSets(); - for(auto as_it = AS.begin(); as_it != AS.end(); as_it++ ){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it->second); - pAS->Enable(true); + prevPoseChanged = prevSelectedAS.empty() ? false : IsSelectionChanged(mapSelectedAS, prevSelectedAS); + + prevSelectedAS = mapSelectedAS; + bool hasPktOutdated = false; + std::map selectedPackets; + for (auto as_it = mapSelectedAS.begin(); as_it != mapSelectedAS.end(); as_it++) { + OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it->second); + int32_t trackID = pAS->GetTrackNumber(); + MediaPacket* onePacket = NULL; + if (!(m_stitch->IsInitialized())) m_needParams = true; + + if (prevPoseChanged) m_needParams = true; + + // ret = READERMANAGER::GetInstance()->GetNextFrame(trackID, onePacket, m_needParams); + if (as_it != mapSelectedAS.begin()) { + uint64_t pts = omaf_reader_mgr_->GetOldestPacketPTSForTrack(trackID); + if (pts > currFramePTS) { + OMAF_LOG(LOG_INFO, "For current PTS %ld :\n", currFramePTS); + OMAF_LOG(LOG_INFO, "Outdated PTS %ld from track %d\n", pts, trackID); + hasPktOutdated = true; + if (samplesNumPerSeg == aveSamplesNumPerSeg && samplesNumPerSeg != 0) + { + if (pts % samplesNumPerSeg) + { + pts = ((pts / samplesNumPerSeg) + 1) * samplesNumPerSeg; + } + } + else //most likely current segment is the last segment + { + pts = aveSamplesNumPerSeg * (currSegTimeLine - 1) + samplesNumPerSeg; + } + + currFramePTS = pts; + beginNewSeg = true; + skipFrames = true; + break; + } else if (pts < currFramePTS) { + + if (pts == 0) + { + while((!pts) && (currWaitTimes < waitTimes) && (m_status != STATUS_STOPPED)) + { + usleep(((m_pStreamInfo->segmentDuration * 1000000) / 2) / waitTimes); + currWaitTimes++; + pts = omaf_reader_mgr_->GetOldestPacketPTSForTrack(trackID); + } + if (currWaitTimes >= waitTimes) + { + OMAF_LOG(LOG_INFO, "Wait times has timed out for frame %ld from track %d\n", currFramePTS, trackID); + } + currWaitTimes = 0; + if (pts > currFramePTS) + { + OMAF_LOG(LOG_INFO, "After wait for a moment, outdated PTS %ld from track %d\n", pts, trackID); + hasPktOutdated = true; + if (samplesNumPerSeg == aveSamplesNumPerSeg && samplesNumPerSeg != 0) + { + if (pts % samplesNumPerSeg) + { + pts = ((pts / samplesNumPerSeg) + 1) * samplesNumPerSeg; + } + } + else //most likely current segment is the last segment + { + pts = aveSamplesNumPerSeg * (currSegTimeLine - 1) + samplesNumPerSeg; + } + + currFramePTS = pts; + skipFrames = true; + break; + } else if (pts < currFramePTS) { + omaf_reader_mgr_->RemoveOutdatedPacketForTrack(trackID, currFramePTS); + pts = omaf_reader_mgr_->GetOldestPacketPTSForTrack(trackID); + if (pts > currFramePTS) + { + OMAF_LOG(LOG_INFO, "After wait for a moment, outdated PTS %ld from track %d\n", pts, trackID); + hasPktOutdated = true; + if (samplesNumPerSeg == aveSamplesNumPerSeg && samplesNumPerSeg != 0) + { + if (pts % samplesNumPerSeg) + { + pts = ((pts / samplesNumPerSeg) + 1) * samplesNumPerSeg; + } + } + else //most likely current segment is the last segment + { + pts = aveSamplesNumPerSeg * (currSegTimeLine - 1) + samplesNumPerSeg; + } + + currFramePTS = pts; + skipFrames = true; + break; + } + else if (pts < currFramePTS) + { + OMAF_LOG(LOG_INFO, "After waiting for a while, pts %ld still diff from current PTS %ld\n", pts, currFramePTS); + hasPktOutdated = true; + break; + } + } + } else { + omaf_reader_mgr_->RemoveOutdatedPacketForTrack(trackID, currFramePTS); + pts = omaf_reader_mgr_->GetOldestPacketPTSForTrack(trackID); + + if (pts > currFramePTS) + { + OMAF_LOG(LOG_INFO, "After wait for a moment, outdated PTS %ld from track %d\n", pts, trackID); + hasPktOutdated = true; + if (samplesNumPerSeg == aveSamplesNumPerSeg && samplesNumPerSeg != 0) + { + if (pts % samplesNumPerSeg) + { + pts = ((pts / samplesNumPerSeg) + 1) * samplesNumPerSeg; + } + } + else //most likely current segment is the last segment + { + pts = aveSamplesNumPerSeg * (currSegTimeLine - 1) + samplesNumPerSeg; + } + + currFramePTS = pts; + skipFrames = true; + break; + } + else if (pts < currFramePTS) + { + OMAF_LOG(LOG_INFO, "After waiting for a while, pts %ld still diff from current PTS %ld\n", pts, currFramePTS); + hasPktOutdated = true; + break; + } + } } + } + + ret = omaf_reader_mgr_->GetNextPacketWithPTS(trackID, currFramePTS, onePacket, m_needParams); + + OMAF_LOG(LOG_INFO, "Get next packet !\n"); + currWaitTimes = 0; + + while ((ret == ERROR_NULL_PACKET) && (currWaitTimes < waitTimes_GetPacket) && m_status != STATUS_STOPPED) { + + usleep((m_pStreamInfo->segmentDuration * 1000000 / 2) / waitTimes_GetPacket); + currWaitTimes++; + //OMAF_LOG(LOG_INFO, "To get packet %ld for track %d\n", currFramePTS, trackID); + ret = omaf_reader_mgr_->GetNextPacketWithPTS(trackID, currFramePTS, onePacket, m_needParams); + } + + if (ret == ERROR_NONE) { + if (onePacket->GetEOS()) { + OMAF_LOG(LOG_INFO, "EOS has been gotten !\n"); + isEOS = true; + selectedPackets.insert(std::make_pair((uint32_t)(trackID), onePacket)); + break; + } + samplesNumPerSeg = omaf_reader_mgr_->GetSamplesNumPerSegmentForTimeLine(currSegTimeLine); + if (!aveSamplesNumPerSeg) + { + aveSamplesNumPerSeg = samplesNumPerSeg; + } + if (as_it == mapSelectedAS.begin()) { + segmentEnded = onePacket->GetSegmentEnded(); + OMAF_LOG(LOG_INFO, "For frame %ld, segmentEnded %d\n", currFramePTS, segmentEnded); + } + OMAF_LOG(LOG_INFO, "To insert packet %ld for track %d\n", currFramePTS, trackID); + selectedPackets.insert(std::make_pair((uint32_t)(trackID), onePacket)); + } else if (ret == ERROR_NULL_PACKET) { + hasPktOutdated = true; + OMAF_LOG(LOG_INFO, "Still can't get frame %ld for track %d\n", currFramePTS, trackID); + break; + } + } + + if (hasPktOutdated) { + std::list allPackets; + for (auto it1 = selectedPackets.begin(); it1 != selectedPackets.end();) { + MediaPacket* pkt = it1->second; + std::list::iterator pktIter; + pktIter = std::find(allPackets.begin(), allPackets.end(), pkt); + if (pktIter == allPackets.end()) + { + allPackets.push_back(pkt); + SAFE_DELETE(pkt); + } + selectedPackets.erase(it1++); + } + selectedPackets.clear(); + allPackets.clear(); + if (currFramePTS > 0) + { + std::map::iterator itAS; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) + { + OmafAdaptationSet *oneAS = itAS->second; + int32_t trkID = oneAS->GetTrackNumber(); + omaf_reader_mgr_->RemoveOutdatedPacketForTrack(trkID, (currFramePTS)); + } + } + if (!skipFrames) + { + if (beginNewSeg) + { + OMAF_LOG(LOG_INFO, "Current frame %ld is key frame but has outdated, drop frames till next key frame !\n", currFramePTS); + currFramePTS += samplesNumPerSeg; + skipFrames = true; + usleep((m_pStreamInfo->segmentDuration * 1000000) / 2); + } + else + { + OMAF_LOG(LOG_INFO, "Frame %ld can't be stitched, move to next segment !\n", currFramePTS); + + if (samplesNumPerSeg == aveSamplesNumPerSeg && samplesNumPerSeg != 0) + { + currFramePTS = ((currFramePTS / samplesNumPerSeg) + 1) * samplesNumPerSeg; + } + else //most likely current segment is the last segment + { + currFramePTS = aveSamplesNumPerSeg * (currSegTimeLine - 1) + samplesNumPerSeg; + } + skipFrames = true; + usleep((m_pStreamInfo->segmentDuration * 1000000) / 2); + } + } + + continue; + } + + OMAF_LOG(LOG_INFO, "Start to stitch packets! and pts is %ld\n", currFramePTS); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T6_stitch_start_time, currFramePTS); +#endif +#endif + if (!isEOS && (selectedPackets.size() != mapSelectedAS.size()) && (currWaitTimes >= waitTimes)) { + OMAF_LOG(LOG_INFO, "Incorrect selected tile tracks packets number for tiles stitching !\n"); + + std::list allPackets; + for (auto it1 = selectedPackets.begin(); it1 != selectedPackets.end();) { + MediaPacket* pkt = it1->second; + std::list::iterator pktIter; + pktIter = std::find(allPackets.begin(), allPackets.end(), pkt); + if (pktIter == allPackets.end()) + { + allPackets.push_back(pkt); + SAFE_DELETE(pkt); + } + selectedPackets.erase(it1++); + } + selectedPackets.clear(); + allPackets.clear(); + if (currFramePTS > 0) + { + std::map::iterator itAS; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) + { + OmafAdaptationSet *oneAS = itAS->second; + int32_t trkID = oneAS->GetTrackNumber(); + omaf_reader_mgr_->RemoveOutdatedPacketForTrack(trkID, (currFramePTS)); + } + } + + continue; + } + + if (!isEOS && !(m_stitch->IsInitialized())) { + ret = m_stitch->Initialize(selectedPackets, m_needParams, + (VCD::OMAF::ProjectionFormat)(m_pStreamInfo->mProjFormat), m_sources); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to initialize stitch class !\n"); + std::list allPackets; + for (auto it1 = selectedPackets.begin(); it1 != selectedPackets.end();) { + MediaPacket* pkt = it1->second; + std::list::iterator pktIter; + pktIter = std::find(allPackets.begin(), allPackets.end(), pkt); + if (pktIter == allPackets.end()) + { + allPackets.push_back(pkt); + SAFE_DELETE(pkt); + } + selectedPackets.erase(it1++); + } + allPackets.clear(); + selectedPackets.clear(); + return ret; + } + } else { + if (!isEOS && m_status != STATUS_STOPPED) { + ret = m_stitch->UpdateSelectedTiles(selectedPackets, m_needParams); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to update media packets for tiles merge !\n"); + std::list allPackets; + for (auto it1 = selectedPackets.begin(); it1 != selectedPackets.end();) { + MediaPacket* pkt = it1->second; + std::list::iterator pktIter; + pktIter = std::find(allPackets.begin(), allPackets.end(), pkt); + if (pktIter == allPackets.end()) + { + allPackets.push_back(pkt); + SAFE_DELETE(pkt); + } + selectedPackets.erase(it1++); + } + allPackets.clear(); + selectedPackets.clear(); + return ret; + } + + if (currFramePTS > 0) + { + std::map::iterator itAS; + for (itAS = mMediaAdaptationSet.begin(); itAS != mMediaAdaptationSet.end(); itAS++) + { + OmafAdaptationSet *oneAS = itAS->second; + int32_t trkID = oneAS->GetTrackNumber(); + omaf_reader_mgr_->RemoveOutdatedPacketForTrack(trkID, (currFramePTS)); + } + } + } + } + + std::list mergedPackets; + + if (isEOS) { + std::map::iterator itPacket1; + for (itPacket1 = selectedPackets.begin(); itPacket1 != selectedPackets.end(); itPacket1++) { + MediaPacket* packet = itPacket1->second; + mergedPackets.push_back(packet); + } + } else { + mergedPackets = m_stitch->GetTilesMergedPackets(); + } + + { + std::lock_guard lock(m_packetsMutex); + m_mergedPackets.push_back(mergedPackets); + } + std::list::iterator it = mergedPackets.begin(); + if (it == mergedPackets.end()) + { + OMAF_LOG(LOG_ERROR, "Failed to generate tiles stitched media packet !\n"); + selectedPackets.clear(); + return ERROR_INVALID; + } + + MediaPacket *one = NULL; + one = *it; + if (!one) + { + OMAF_LOG(LOG_ERROR, "Tiles stitched media packet is NULL !\n"); + selectedPackets.clear(); + return ERROR_NULL_PTR; } - pthread_mutex_unlock(&mCurrentMutex); - pthread_mutex_unlock(&mMutex); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T7_stitch_end_time, one->GetSegID(), currFramePTS, mergedPackets.size()); +#endif +#endif + OMAF_LOG(LOG_INFO, "Finish to stitch packets for packet segment id %d\n", one->GetSegID()); + OMAF_LOG(LOG_INFO, "packet pts is %ld and video number is %lld\n", one->GetPTS(), mergedPackets.size()); + selectedPackets.clear(); + prevPoseChanged = false; + } - return ret; + return ERROR_NONE; } -int OmafMediaStream::GetTrackCount() -{ - return this->mMediaAdaptationSet.size() + this->mExtractors.size(); +std::list OmafMediaStream::GetOutTilesMergedPackets() { + std::list outPackets; + std::lock_guard lock(m_packetsMutex); + if (m_mergedPackets.size()) { + outPackets = m_mergedPackets.front(); + m_mergedPackets.pop_front(); + } + // correct the video id + uint32_t video_id = 0; + for (auto packet : outPackets) { + packet->SetVideoID(video_id++); + } + return outPackets; } -VCD_OMAF_END +VCD_OMAF_END; diff --git a/src/OmafDashAccess/OmafMediaStream.h b/src/OmafDashAccess/OmafMediaStream.h index bd1f32bd..d5756b15 100644 --- a/src/OmafDashAccess/OmafMediaStream.h +++ b/src/OmafDashAccess/OmafMediaStream.h @@ -32,198 +32,281 @@ //! Created on May 22, 2019, 2:22 PM //! - #ifndef OMAFMEDIASTREAM_H #define OMAFMEDIASTREAM_H +#include + #include "general.h" -#include "OmafReader.h" + +#include "MediaPacket.h" #include "OmafAdaptationSet.h" #include "OmafExtractor.h" -#include "MediaPacket.h" +#include "OmafReader.h" +#include "OmafTilesStitch.h" +#include VCD_OMAF_BEGIN -class OmafMediaStream{ -public: - //! - //! \brief construct - //! - OmafMediaStream(); - - //! - //! \brief de-construct - //! - virtual ~OmafMediaStream(); - -public: - - //! - //! \brief update the start number of the segment for dynamical mode - //! \param nAvailableStartTime used to calculate start number when accessed - //! mpd the first: (now - nAvailableStartTime)/segment_duration + Adaption_Strart_number - //! \return - int UpdateStartNumber(uint64_t nAvailableStartTime); - - //! - //! \brief download initialize segment for each AdaptationSet - //! - int DownloadInitSegment(); - - //! - //! \brief download all segments for all AdaptationSets. - //! - int DownloadSegments(); - - //! - //! \brief Add extractor Adaptation Set - //! - int AddExtractor(OmafExtractor* pAS); - - //! - //! \brief Add normal Adaptation Set - //! - int AddAdaptationSet(OmafAdaptationSet* pAS); - - //! - //! \brief Initialize the stream - //! - int InitStream(std::string type); - - //! - //! \brief SetMainAdaptationSet if there is - //! - void SetMainAdaptationSet(OmafAdaptationSet* as){ - mMainAdaptationSet = as; +class OmafReaderManager; +class OmafDashSegmentClient; + +class OmafMediaStream { + public: + //! + //! \brief construct + //! + OmafMediaStream(); + + //! + //! \brief de-construct + //! + virtual ~OmafMediaStream(); + + public: + void SetOmafReaderMgr(std::shared_ptr mgr) noexcept; + //! + //! \brief update the start number of the segment for dynamical mode + //! \param nAvailableStartTime used to calculate start number when accessed + //! mpd the first: (now - nAvailableStartTime)/segment_duration + Adaption_Strart_number + //! \return + int UpdateStartNumber(uint64_t nAvailableStartTime); + + int SetupSegmentSyncer(const OmafDashParams& params); + //! + //! \brief download initialize segment for each AdaptationSet + //! + int DownloadInitSegment(); + + //! + //! \brief download all segments for all AdaptationSets. + //! + int DownloadSegments(); + + //! + //! \brief Add extractor Adaptation Set + //! + int AddExtractor(OmafExtractor* pAS); + + //! + //! \brief Add normal Adaptation Set + //! + int AddAdaptationSet(OmafAdaptationSet* pAS); + + //! + //! \brief Initialize the stream + //! + int InitStream(std::string type); + + //! + //! \brief SetMainAdaptationSet if there is + //! + void SetMainAdaptationSet(OmafAdaptationSet* as) { mMainAdaptationSet = as; } + + //! + //! \brief SetExtratorAdaptationSet if there is + //! + void SetExtratorAdaptationSet(OmafAdaptationSet* as) { mExtratorAdaptationSet = as; } + + //! + //! \brief Seek to special segment and is is valid in static mode + //! + int SeekTo(int seg_num); + + //! + //! \brief Set EOS for the stream + //! + int SetEOS(bool eos) { + m_bEOS = eos; + return 0; + }; + + //! + //! \brief get all extractors relative to this stream + //! + std::map GetExtractors() { return mExtractors; }; + + //! + //! \brief get all Adaptation set relative to this stream + //! + std::map GetMediaAdaptationSet() { return mMediaAdaptationSet; }; + + //! + //! \brief Update selected extractor after viewport changed + //! + int UpdateEnabledExtractors(std::list extractors); + + //! + //! \brief Update selected tile tracks after viewport changed + //! + int UpdateEnabledTileTracks(std::map selectedTiles); + + int EnableAllAudioTracks(); + + //! + //! \brief Get count of tracks + //! + int GetTrackCount(); + + //! + //! \brief Get/Set stream ID + //! + void SetStreamID(int streamID) { mStreamID = streamID; }; + int GetStreamID() { return mStreamID; }; + + //! + //! \brief get Stream information + //! + DashStreamInfo* GetStreamInfo() { return m_pStreamInfo; }; + + //! + //! \brief get current selected extractors + //! + std::list GetEnabledExtractor() { + std::lock_guard lock(mCurrentMutex); + std::list enabledExtractor(mCurrentExtractors.begin(), mCurrentExtractors.end()); + return enabledExtractor; + }; + + //std::map GetSelectedTileTracks() { + // std::lock_guard lock(mCurrentMutex); + // std::map selectedTileTracks = m_selectedTileTracks.front(); + // return selectedTileTracks; + //} + + int32_t GetExtractorSize() { + std::lock_guard lock(mCurrentMutex); + int32_t size = mCurrentExtractors.size(); + return size; + }; + + int32_t GetTotalExtractorSize() { return mExtractors.size(); }; + + void ClearEnabledExtractors() { mCurrentExtractors.clear(); }; + + OmafExtractor* AddEnabledExtractor(int extractorTrackIdx) { + auto it = mExtractors.find(extractorTrackIdx); + if (it != mExtractors.end()) { + mCurrentExtractors.push_back(it->second); + return (it->second); + } else { + return NULL; } + }; - //! - //! \brief SetExtratorAdaptationSet if there is - //! - void SetExtratorAdaptationSet(OmafAdaptationSet* as){ - mExtratorAdaptationSet = as; - } + //! + //! \brief Check whether extractor tracks exists + //! + bool HasExtractor() { return !(0 == mExtractors.size()); }; - //! - //! \brief Seek to special segment and is is valid in static mode - //! - int SeekTo( int seg_num ); - - //! - //! \brief Set EOS for the stream - //! - int SetEOS(bool eos) { m_bEOS = eos; return 0;}; - - //! - //! \brief get all extractors relative to this stream - //! - std::map GetExtractors() { - return mExtractors; - }; - - //! - //! \brief get all Adaptation set relative to this stream - //! - std::map GetMediaAdaptationSet() { - return mMediaAdaptationSet; - }; - - //! - //! \brief Update selected extractor after viewport changed - //! - int UpdateEnabledExtractors(std::list extractors); - - //! - //! \brief Get count of tracks - //! - int GetTrackCount(); - - //! - //! \brief Get/Set stream ID - //! - void SetStreamID(int streamID) { mStreamID = streamID; }; - int GetStreamID() { return mStreamID; }; - - //! - //! \brief get Stream information - //! - DashStreamInfo* GetStreamInfo(){ return m_pStreamInfo; }; - - //! - //! \brief get current selected extractors - //! - std::list GetEnabledExtractor() - { - pthread_mutex_lock(&mCurrentMutex); - std::list enabledExtractor (mCurrentExtractors.begin(), mCurrentExtractors.end()); - pthread_mutex_unlock(&mCurrentMutex); - return enabledExtractor; - }; - - int32_t GetExtractorSize() {return mCurrentExtractors.size(); }; - - int32_t GetTotalExtractorSize() {return mExtractors.size(); }; - - void ClearEnabledExtractors() { mCurrentExtractors.clear(); }; - - OmafExtractor* AddEnabledExtractor(int extractorTrackIdx) - { - auto it = mExtractors.find(extractorTrackIdx); - if (it != mExtractors.end()) - { - mCurrentExtractors.push_back(it->second); - return (it->second); - } - else - { - return NULL; - } - }; - - //! - //! \brief Check whether extractor tracks exists - //! - bool HasExtractor(){ return !( 0==mExtractors.size()); }; - - //! - //! \brief Get segment duration - //! - uint64_t GetSegmentDuration() { return m_pStreamInfo ? m_pStreamInfo->segmentDuration : 0; }; - - uint32_t GetStreamWidth() {return m_pStreamInfo ? m_pStreamInfo->width : 0;}; - - uint32_t GetStreamHeight() {return m_pStreamInfo ? m_pStreamInfo->height : 0;}; - - uint32_t GetRowSize(){return m_pStreamInfo ? m_pStreamInfo->tileRowNum : 0;}; - - uint32_t GetColSize(){return m_pStreamInfo ? m_pStreamInfo->tileColNum : 0;}; + //! + //! \brief Get segment duration + //! + uint64_t GetSegmentDuration() { return m_pStreamInfo ? m_pStreamInfo->segmentDuration : 0; }; -private: - //! - //! \brief UpdateStreamInfo - //! - void UpdateStreamInfo( ); + uint32_t GetStreamWidth() { return m_pStreamInfo ? m_pStreamInfo->width : 0; }; - //! - //! \brief SetupExtratorDependency - //! - void SetupExtratorDependency(); + uint32_t GetStreamHeight() { return m_pStreamInfo ? m_pStreamInfo->height : 0; }; -private: - std::map mMediaAdaptationSet; // mExtractors; // mCurrentExtractors; //source_resolution[0].width : 0; }; + + uint32_t GetStreamHighResHeight() { return m_pStreamInfo ? m_pStreamInfo->source_resolution[0].height : 0; }; + + uint32_t GetRowSize() { return m_pStreamInfo ? m_pStreamInfo->tileRowNum : 0; }; + + uint32_t GetColSize() { return m_pStreamInfo ? m_pStreamInfo->tileColNum : 0; }; + + uint32_t GetSegmentNumber() { return m_activeSegmentNum;}; + + bool IsExtractorEnabled() { return m_enabledExtractor; }; + void SetEnabledExtractor(bool enabledExtractor) { m_enabledExtractor = enabledExtractor; }; + + void SetSources(std::map sources) { m_sources = sources; }; + + void SetNeedVideoParams(bool needParams) { m_needParams = needParams; }; + + void SetMaxStitchResolution(uint32_t width, uint32_t height) { m_stitch->SetMaxStitchResolution(width, height); }; + + void SetSegmentNumber( uint32_t seg_num ) { m_activeSegmentNum = seg_num; } ; + + std::list GetOutTilesMergedPackets(); + + MediaType GetStreamMediaType() { return m_pStreamInfo->stream_type; }; + + void Close(); + + private: + //! + //! \brief UpdateStreamInfo + //! + OMAF_STATUS UpdateStreamInfo(); + + //! + //! \brief SetupExtratorDependency + //! + void SetupExtratorDependency(); + + int32_t StartTilesStitching(); + + static void* TilesStitchingThread(void* pThis); + + int32_t TilesStitching(); + +private: + OmafMediaStream& operator=(const OmafMediaStream& other) { return *this; }; + OmafMediaStream(const OmafMediaStream& other) { /* do not create copies */ }; + + private: + // mMediaAdaptationSet; + // mExtractors; + // mCurrentExtractors; + // omaf_reader_mgr_; + //> m_selectedTileTracks; + uint64_t m_tileSelTimeLine; + std::map> m_selectedTileTracks; + + bool m_hasTileTracksSelected; + // m_sources; + //> m_mergedPackets; + + bool m_needParams; + //& - //! compatible Brands information - //! [in] uint32_t - //! initialization Segment Id - //! [in] uint32_t - //! segment Id //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getCompatibleBrands(std::vector& compatibleBrands, uint32_t initializationSegmentId = 0, uint32_t segmentId = UINT32_MAX) const = 0; //! - //! \brief Get Track Informations + //! \brief Get the track information for all tracks + //! after one segment file is parsed and OmafReader + //! is initialized //! - //! \param [out] std::vector& - //! Track Informations + //! \param [out] trackInfos + //! track information for all tracks //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTrackInformations(std::vector& trackInfos) const = 0; //! - //! \brief Get Display Width + //! \brief Get picture display width //! - //! \param [in] uint32_t - //! trackId - //! [out] uint32_t& - //! Display Width + //! \param [in] trackId + //! index of specific track + //! \param [out] displayWidth + //! picture display width //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getDisplayWidth(uint32_t trackId, uint32_t& displayWidth) const = 0; //! - //! \brief Get Display Height + //! \brief Get picture display height //! - //! \param [in] uint32_t - //! trackId - //! [out] uint32_t& - //! Display Height + //! \param [in] trackId + //! index of specific track + //! \param [out] displayHeight + //! picture display height //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getDisplayHeight(uint32_t trackId, uint32_t& displayHeight) const = 0; //! - //! \brief Get Display Width FP + //! \brief Get fixed point picture display width //! - //! \param [in] uint32_t - //! trackId - //! [out] uint32_t& - //! Display Width + //! \param [in] trackId + //! index of specific track + //! \param [out] displayWidth + //! fixed point picture display width //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getDisplayWidthFP(uint32_t trackId, uint32_t& displayWidth) const = 0; //! - //! \brief Get Display Height FP + //! \brief Get fixed point picture display height //! - //! \param [in] uint32_t - //! trackId - //! [out] uint32_t& - //! Display Height + //! \param [in] trackId + //! index of specific track + //! \param [out] displayHeight + //! fixed point picture display height //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getDisplayHeightFP(uint32_t trackId, uint32_t& displayHeight) const = 0; //! - //! \brief Get Width of a sample + //! \brief Get width of specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample Id - //! [out] uint32_t& - //! Width + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] width + //! width of specified sample in pixels //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getWidth(uint32_t trackId, uint32_t sampleId, uint32_t& width) const = 0; //! - //! \brief Get Height of a sample + //! \brief Get height of specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample Id - //! [out] uint32_t& - //! Height + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] height + //! height of specified sample in pixels //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getHeight(uint32_t trackId, uint32_t sampleId, uint32_t& height) const = 0; + virtual int32_t getDims(uint32_t trackId, uint32_t sampleId, uint32_t& width, uint32_t& height) const = 0; //! - //! \brief Get Playback Duration In Seconds + //! \brief Get playback duration of specified track, + //! and unit is second //! - //! \param [in] uint32_t - //! track Id - //! [out] double& - //! duration In Seconds + //! \param [in] trackId + //! index of specific track + //! \param [out] durationInSecs + //! playback duration in second //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPlaybackDurationInSecs(uint32_t trackId, double& durationInSecs) const = 0; //! - //! \brief Get Track Sample List By Type + //! \brief Get samples list of specified sample frame type from + //! the specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] VCD::OMAF::TrackSampleType - //! sample Types - //! [out] std::vector& - //! sample Id array + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleType + //! specified sample frame type, like reference frame type + //! \param [out] sampleIds + //! samples list //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTrackSampleListByType(uint32_t trackId, VCD::OMAF::TrackSampleType sampleType, std::vector& sampleIds) const = 0; //! - //! \brief Get Track Sample Type + //! \brief Get sample type of specified sample in + //! the specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::FourCC& - //! track sample box type + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] trackSampleBoxType + //! detailed sample type //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTrackSampleType(uint32_t trackId, uint32_t sampleId, VCD::OMAF::FourCC& trackSampleBoxType) const = 0; //! - //! \brief Get Extractor Track Sample data + //! \brief Get complete data for specified sample in + //! the specified extractor track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] char* - //! memory Buffer - //! [out] uint32_t& - //! memory Buffer Size - //! [in] bool - //! has video Byte Stream Headers or not + //! \param [in] trackId + //! index of specific extractor track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] memoryBuffer + //! pointer to the allocated memory to store + //! the sample data + //! \param [out] memoryBufferSize + //! size of sample data + //! \param [in] videoByteStreamHeaders + //! whether to insert NAL unit start codes into + //! sample data //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getExtractorTrackSampleData(uint32_t trackId, uint32_t sampleId, @@ -275,21 +307,24 @@ class OmafReader { bool videoByteStreamHeaders = true ) = 0; //! - //! \brief Get Track Sample data + //! \brief Get complete data for specified sample in + //! the specified normal track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] char* - //! memory Buffer - //! [out] uint32_t& - //! memory Buffer Size - //! [in] bool - //! has video Byte Stream Headers or not + //! \param [in] trackId + //! index of specific normal track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] memoryBuffer + //! pointer to the allocated memory to store + //! the sample data + //! \param [out] memoryBufferSize + //! size of sample data + //! \param [in] videoByteStreamHeaders + //! whether to insert NAL unit start codes into + //! sample data //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTrackSampleData(uint32_t trackId, uint32_t sampleId, @@ -298,322 +333,350 @@ class OmafReader { bool videoByteStreamHeaders = true) = 0; //! - //! \brief Get Track Sample offset + //! \brief Get track sample data offset and length for + //! the specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] uint64_t& - //! sample Offset - //! [out] uint32_t& - //! sample Length + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] sampleOffset + //! output sample offset + //! \param [out] sampleLength + //! output sample length //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTrackSampleOffset(uint32_t trackId, uint32_t sampleId, uint64_t& sampleOffset, uint32_t& sampleLength) = 0; //! - //! \brief Get Decoder Configuration + //! \brief Get media codec related specific information, + //! like SPS, PPS and so on, for specified sample in + //! specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] std::vector& - //! decoder Information + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] decoderInfos + //! output media codec related specific information //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getDecoderConfiguration(uint32_t trackId, uint32_t sampleId, std::vector& decoderInfos) const = 0; //! - //! \brief Get Track Time stamps + //! \brief Get display time stamp of each sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [out] std::vector& - //! timestamps + //! \param [in] trackId + //! index of specific track + //! \param [out] timestamps + //! output time stamps for each sample //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTrackTimestamps(uint32_t trackId, std::vector& timestamps) const = 0; //! - //! \brief Get Track Time stamps of sample + //! \brief Get display time stamp of the specified sample + //! in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] std::vector& - //! timestamps + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of the specified sample + //! \param [out] timestamps + //! output time stamps for the sample //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getTimestampsOfSample(uint32_t trackId, uint32_t sampleId, std::vector& timestamps) const = 0; //! - //! \brief Get Samples In Decoding Order + //! \brief Get samples in decoding sequence in specified track, + //! gotten samples are presented by TimestampIDPair structure, + //! that is pair //! - //! \param [in] uint32_t - //! track Id - //! [out] std::vector& - //! sample Decoding Order + //! \param [in] trackId + //! index of specific track + //! \param [out] sampleDecodingOrder + //! output samples in decoding sequence //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getSamplesInDecodingOrder(uint32_t trackId, std::vector& sampleDecodingOrder) const = 0; //! - //! \brief Get decoder code type + //! \brief Get decoder code type for specified sample + //! in specified track, like "hvc1" and so on. //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] FourCC& - //! decoder Code Type + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] decoderCodeType + //! output decoder code type //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getDecoderCodeType(uint32_t trackId, uint32_t sampleId, FourCC& decoderCodeType) const = 0; //! - //! \brief Get sample duration + //! \brief Get duration for specified sample + //! in specified track, in milliseconds. //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] uint32_t& - //! sample Duration + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] sampleDuration + //! output sample duration //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getSampleDuration(uint32_t trackId, uint32_t sampleId, uint32_t& sampleDuration) const = 0; public: //! - //! \brief Get sample duration + //! \brief Get audio channel layout box information for + //! specified sample in specified 'chnl' track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] uint32_t& - //! sample Duration + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] chProperty + //! output channel property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyChnl(uint32_t trackId, uint32_t sampleId, VCD::OMAF::chnlProperty& chProperty) const = 0; - + //! - //! \brief Get Property Spatial Audio + //! \brief Get spatial audio box information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::SpatialAudioProperty& - //! spatial audio property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] spatialaudioproperty + //! output spatial audio property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertySpatialAudio(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SpatialAudioProperty& spatialaudioproperty) const = 0; //! - //! \brief Get Property Stereo Scopic 3D + //! \brief Get stereo scopic 3D information for spherical video for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::StereoScopic3DProperty& - //! stereo scopic property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] stereoscopicproperty + //! output stereo scopic 3D property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyStereoScopic3D(uint32_t trackId, uint32_t sampleId, VCD::OMAF::StereoScopic3DProperty& stereoscopicproperty) const = 0; //! - //! \brief Get Property Spherical Video V1 + //! \brief Get spherical video V1 information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::SphericalVideoV1Property& - //! spherical property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] sphericalproperty + //! output spherical video V1 property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertySphericalVideoV1(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SphericalVideoV1Property& sphericalproperty) const = 0; //! - //! \brief Get Property Spherical Video V2 + //! \brief Get spherical video V2 information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::SphericalVideoV2Property& - //! spherical property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] sphericalproperty + //! output spherical video V2 property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertySphericalVideoV2(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SphericalVideoV2Property& sphericalproperty) const = 0; //! - //! \brief Get Property Region Wise Packing + //! \brief Get region wise packing information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] RegionWisePacking* - //! rwpk + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] rwpk + //! output region wise packing property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyRegionWisePacking(uint32_t trackId, uint32_t sampleId, RegionWisePacking *rwpk) const = 0; //! - //! \brief Get Property Coverage Information + //! \brief Get content coverage information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::CoverageInformationProperty& - //! covi Property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] coviProperty + //! output content coverage property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyCoverageInformation(uint32_t trackId, uint32_t sampleId, VCD::OMAF::CoverageInformationProperty& coviProperty) const = 0; //! - //! \brief Get Property Projection Format + //! \brief Get projection format information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::ProjectionFormatProperty& - //! projection Format Property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] projectionFormatProperty + //! output projection format property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyProjectionFormat(uint32_t trackId, uint32_t sampleId, VCD::OMAF::ProjectionFormatProperty& projectionFormatProperty) const = 0; //! - //! \brief Get Property Scheme Types + //! \brief Get scheme type information for + //! specified sample in specified track //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::SchemeTypesProperty& - //! scheme Types Property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] schemeTypesProperty + //! output scheme type property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertySchemeTypes(uint32_t trackId, uint32_t sampleId, VCD::OMAF::SchemeTypesProperty& schemeTypesProperty) const = 0; //! - //! \brief Get Property Stereo Video Configuration + //! \brief Get stereo video information for + //! specified sample in specified track, + //! only podv scheme is supported //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::PodvStereoVideoConfiguration& - //! stereo Video Property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] stereoVideoProperty + //! output stereo video property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyStereoVideoConfiguration(uint32_t trackId, uint32_t sampleId, VCD::OMAF::PodvStereoVideoConfiguration& stereoVideoProperty) const = 0; //! - //! \brief Get Property Rotation + //! \brief Get rotation information for + //! specified sample in specified track, //! - //! \param [in] uint32_t - //! track Id - //! [in] uint32_t - //! sample id - //! [out] VCD::OMAF::Rotation& - //! Rotation Property + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] rotationProperty + //! output podv rotation property //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t getPropertyRotation(uint32_t trackId, uint32_t sampleId, VCD::OMAF::Rotation& rotationProperty) const = 0; + //! - //! \brief Get Property Rotation + //! \brief Get the map //! //! \return std::map - //! return Map Init Track + //! map of //! std::map getMapInitTrk() const {return mMapInitTrk;} public: //! - //! \brief parse Initialization Segment + //! \brief Parse specified initial segment //! - //! \param [in] OmafSegment* - //! stream Interface - //! [in] uint32_t - //! init Segment Id + //! \param [in] streamInterface + //! pointer to specified initial segment + //! \param [in] initSegmentId + //! index of specified initial segment, + //! corresponding to specified track //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t parseInitializationSegment(OmafSegment* streamInterface, uint32_t initSegmentId) = 0; //! - //! \brief invalidate Initialization Segment + //! \brief Invalidate specified initial segment + //! Disable the data buffer pointer to the + //! specified initial segment, then the data + //! from the segment can not be accessed any longer //! - //! \param [in] uint32_t - //! init Segment Id + //! \param [in] initSegmentId + //! index of specified initial segment, + //! corresponding to specified track //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t invalidateInitializationSegment(uint32_t initSegmentId) = 0 ; //! - //! \brief parse Segment + //! \brief Parse specified segment for specified track //! - //! \param [in] OmafSegment* - //! stream Interface - //! [in] uint32_t - //! init segment id - //! [in] uint32_t - //! segment id - //! [in] uint64_t - //! earliest PTS in TS + //! \param [in] streamInterface + //! pointer to specified segment handler + //! \param [in] initSegmentId + //! index of specified initial segment, this index + //! is corresponding to track index + //! \param [in] segmentId + //! index of specified segment + //! \param [in] earliestPTSinTS + //! the earliest presentation time in timescale for + //! the specified sample //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t parseSegment( OmafSegment* streamInterface, uint32_t initSegmentId, @@ -621,32 +684,37 @@ class OmafReader { uint64_t earliestPTSinTS = UINT64_MAX) = 0; //! - //! \brief invalidate Segment + //! \brief Invalidate specified segment for specified track + //! Disable the data buffer pointer to the specified + //! segment, then the data from the segment can not + //! be accessed any longer //! - //! \param [in] uint32_t - //! init Segment Id - //! [in] uint32_t - //! segment id + //! \param [in] initSegmentId + //! index of specified initial segment, which is + //! corresponding to track index + //! \param [in] segmentId + //! index of specified segment //! //! \return int32_t - //! return value + //! ERROR_NONE if success, else failed reason //! virtual int32_t invalidateSegment(uint32_t initSegmentId, uint32_t segmentId) = 0; + //! - //! \brief set Map Init Track + //! \brief Set the map to + //! input map //! - //! \param [in] std::map - //! map + //! \param [in] map + //! input map of type //! - //! \return int32_t - //! return value + //! \return void //! void setMapInitTrk(std::map map){mMapInitTrk = map;} + private: - std::map mMapInitTrk; // mMapInitTrk; //!< the map of }; VCD_OMAF_END; #endif /* OMAFREADER_H */ - diff --git a/src/OmafDashAccess/OmafReaderManager.cpp b/src/OmafDashAccess/OmafReaderManager.cpp index 0108c02e..cdeb4afe 100644 --- a/src/OmafDashAccess/OmafReaderManager.cpp +++ b/src/OmafDashAccess/OmafReaderManager.cpp @@ -34,947 +34,1909 @@ */ #include "OmafReaderManager.h" + #include "OmafMP4VRReader.h" +#include "OmafMediaSource.h" +#include "OmafReader.h" +#include "common.h" + #include +#include +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include "../trace/MtHQ_tp.h" +#include "../trace/Bandwidth_tp.h" +#endif +#endif VCD_OMAF_BEGIN -#define STATUS_UNKNOWN 0 -#define STATUS_STOPPED 1 -#define STATUS_RUNNING 2 -#define STATUS_STOPPING 3 -#define STATUS_SEEKING 4 +// not support concurrency +class OmafPacketParams : public VCD::NonCopyable { + friend OmafSegmentNode; -static uint16_t GetTrackId(uint32_t id) -{ - return (id & 0xffff); -} + public: + using Ptr = std::shared_ptr; -static uint32_t GetCombinedTrackId(uint16_t trackId, uint16_t initSegId) -{ - return ((initSegId << 16) | trackId); -} + public: + OmafPacketParams(){}; + virtual ~OmafPacketParams(){}; -OmafReaderManager::OmafReaderManager() -{ - mCurTrkCnt = 0; - mEOS = false; - mSource = NULL; - mStatus = STATUS_UNKNOWN; - mReader = NULL; - mInitSegParsed = false; - memset(mVPS, 0, 256); - mVPSLen = 0; - memset(mSPS, 0, 256); - mSPSLen = 0; - memset(mPPS, 0, 256); - mPPSLen = 0; - mWidth = 0; - mHeight = 0; - mReadSync = false; -} - -OmafReaderManager::~OmafReaderManager() -{ - Close(); -} + public: + int init(std::shared_ptr reader, uint32_t reader_trackId, uint32_t sampleId) noexcept; -int OmafReaderManager::Initialize( OmafMediaSource* pSource ) -{ - mCurTrkCnt = 0; - mEOS = false; - mSource = pSource; - mStatus = STATUS_STOPPED; - mReader = new OmafMP4VRReader(); - //this->StartThread(); - return ERROR_NONE; -} + private: + std::vector params_; -int OmafReaderManager::Close() -{ - if(mStatus == STATUS_RUNNING || mStatus == STATUS_SEEKING){ - mStatus = STATUS_STOPPING; - this->Join(); + // vps_; + + // sps_; + + // pps_; + + //; + + public: + OmafAudioPacketParams(){}; + virtual ~OmafAudioPacketParams(){}; + + public: + int init(std::shared_ptr reader, uint32_t reader_trackId, uint32_t sampleId) noexcept; + + void writeADTSHdr(uint32_t frameSize); + + private: + int unPackUnsignedIntValue(uint8_t bitsNum, uint32_t *value); + void packOneBit(bool value); + void packUnsignedIntValue(uint8_t bitsNum, uint32_t value); + + std::vector params_; + + uint32_t objType_ = 0; + + uint32_t frequencyIdx_ = 0; + + uint32_t channelCfg_ = 0; + + int32_t curr_bit_pos_ = 0; + + bool binit_ = false; +}; + +// FIXME use better class name? +class OmafSegmentNode : public VCD::NonCopyable { + public: + using Ptr = std::shared_ptr; + using WPtr = std::weak_ptr; + + public: + OmafSegmentNode(std::shared_ptr mgr, OmafDashMode mode, ProjectionFormat projFmt, std::shared_ptr reader, + std::shared_ptr segment, size_t depends_size = 0, bool isExtractor = false) + : omaf_reader_mgr_(mgr), + mode_(mode), + projection_fmt_(projFmt), + reader_(reader), + segment_(segment), + depends_size_(depends_size), + bExtractor_(isExtractor) {} + + virtual ~OmafSegmentNode() { + OMAF_LOG(LOG_INFO, "Release the segment node %s packet size=%lld\n", to_string().c_str(), media_packets_.size()); + // relase packet + while (media_packets_.size()) { + auto p = media_packets_.front(); + if (p) { + delete p; + } + media_packets_.pop(); } + } - SAFE_DELETE(mReader); - releaseAllSegments(); - releasePacketQueue(); + public: + int start(void) noexcept; + int parse(void) noexcept; + int stop(void) noexcept; - for(auto &it:m_readSegMap) - { - std::map initSegNormalSeg = it.second; - for (auto& itRmSeg : initSegNormalSeg) - { - OmafSegment *rmSeg = itRmSeg.second; - delete rmSeg; - rmSeg = NULL; - } - m_readSegMap.erase(it.first); + // int getPacket(std::unique_ptr &pPacket, bool needParams) noexcept; + int getPacket(MediaPacket *&pPacket, bool requireParams) noexcept; + int packetQueueSize(void) const noexcept { return media_packets_.size(); } + std::string to_string() const noexcept { + std::stringstream ss; + + ss << "node, timeline=" << getTimelinePoint(); + + if (segment_) { + ss << ", " << segment_->to_string(); } - m_readSegMap.clear(); + return ss.str(); + }; - return ERROR_NONE; -} + public: + uint32_t getSegId() const noexcept { + if (segment_.get() != nullptr) { + return segment_->GetSegID(); + } + return 0; + } -int OmafReaderManager::AddInitSegment( OmafSegment* pInitSeg, uint32_t& nInitSegID ) -{ - if(NULL == mReader) return ERROR_NULL_PTR; + uint32_t getTrackId() const noexcept { + if (segment_.get() != nullptr) { + return segment_->GetTrackId(); + } + return 0; + } - ScopeLock readerLock(mReaderLock); + uint32_t getInitSegId() const noexcept { + if (segment_.get() != nullptr) { + return segment_->GetInitSegID(); + } + return 0; + } + int64_t getTimelinePoint() const noexcept { + if (segment_.get() != nullptr) { + return segment_->GetTimelinePoint(); + } + return 0; + } + uint64_t getPTS() { + if (media_packets_.size()) { + return media_packets_.front()->GetPTS(); + } + return 0; + } + void clearPacketByPTS(uint64_t pts) { + while (media_packets_.size()) { + auto &packet = media_packets_.front(); + if (packet && (packet->GetPTS() >= pts)) { + break; + } + if (packet) + { + delete packet; + packet = NULL; + } + media_packets_.pop(); + } + } - nInitSegID = mCurTrkCnt++; - int32_t result = mReader->parseInitializationSegment(pInitSeg, nInitSegID); - if (result != 0) - { - LOG(ERROR)<<"parse initialization segment failed! result= "<GetMediaType(); + } + return MediaType_NONE; + } + + const std::chrono::steady_clock::time_point &startTime() const noexcept { return start_time_; }; + bool checkTimeout(int32_t ms) const noexcept { + auto now = std::chrono::steady_clock::now(); + std::chrono::milliseconds time_span = std::chrono::duration_cast(now - start_time_); + return time_span.count() >= ms ? true : false; + } + size_t dependsSize() const noexcept { return depends_size_; } + bool isExtractor() const noexcept { return bExtractor_; } + bool isReady() const noexcept; + size_t GetSamplesNum() { return samples_num_; }; + + // FIXME, who own the media packets + // const std::list packets() const { return media_packets_; } + + void pushDepends(OmafSegmentNode::Ptr node) { depends_.push_back(std::move(node)); } + + bool operator==(OmafSegment::Ptr segment) { return this->segment_ == segment; } + + private: + int parseSegmentStream(std::shared_ptr reader) noexcept; + int removeSegmentStream(std::shared_ptr reader) noexcept; + int cachePackets(std::shared_ptr reader) noexcept; + std::shared_ptr findTrackInformation(std::shared_ptr reader) noexcept; + bool findSampleIndexRange(std::shared_ptr, size_t &begin, size_t &end) noexcept; + OmafPacketParams::Ptr getPacketParams() { + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr) { + return reader_mgr->getPacketParams(segment_->GetQualityRanking()); } + return nullptr; + } + void setPacketParams(OmafPacketParams::Ptr params) { + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr) { + reader_mgr->setPacketParams(segment_->GetQualityRanking(), std::move(params)); + } + } - pInitSeg->SetInitSegID( nInitSegID ); - pInitSeg->SetSegID( nInitSegID ); + OmafPacketParams::Ptr getPacketParamsForExtractors() { + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr) { + return reader_mgr->getPacketParamsForExtractors(segment_->GetTrackId()); + } + return nullptr; + } + void setPacketParamsForExtractors(OmafPacketParams::Ptr params) { + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr) { + reader_mgr->setPacketParamsForExtractors(segment_->GetTrackId(), std::move(params)); + } + } - mMapSegCnt[nInitSegID] = 0; - ///get track information if all initialize segmentation has been parsed - if(mCurTrkCnt == mSource->GetTrackCount()){ - mReader->getTrackInformations( this->mTrackInfos ); - mLock.lock(); - UpdateSourceTrackID(); - mReader->setMapInitTrk(mMapInitTrk); - SetupStatusMap(); - //mLock.lock(); - mInitSegParsed = true; - mLock.unlock(); + OmafAudioPacketParams::Ptr getPacketParamsForAudio() { + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr) { + return reader_mgr->getPacketParamsForAudio(segment_->GetTrackId()); + } + return nullptr; + } + void setPacketParamsForAudio(OmafAudioPacketParams::Ptr params) { + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr) { + reader_mgr->setPacketParamsForAudio(segment_->GetTrackId(), std::move(params)); } + } - return ERROR_NONE; -} + private: + std::weak_ptr omaf_reader_mgr_; -void OmafReaderManager::UpdateSourceTrackID() -{ - for(auto it=mTrackInfos.begin(); it != mTrackInfos.end(); it++){ - TrackInformation *trackInfo = *it; - for( int i = 0; i < mSource->GetStreamCount(); i++ ){ - OmafMediaStream* pStream = mSource->GetStream(i); - std::map pMediaAS = pStream->GetMediaAdaptationSet(); - auto as_it = pMediaAS.begin(); - for( ; as_it != pMediaAS.end(); as_it++){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*) as_it->second; - if(pAS->GetInitSegment()->GetSegID() == trackInfo->initSegId){ - uint16_t actualTrackId = GetTrackId(trackInfo->trackId); - pAS->SetTrackNumber(actualTrackId); - mMapInitTrk[trackInfo->initSegId] = actualTrackId;//trackInfo->trackId; - mMapSegCnt[trackInfo->initSegId] = 0; - mMapSegStatus[actualTrackId].sampleIndex.mCurrentAddSegment = 0; - mMapSegStatus[actualTrackId].sampleIndex.mCurrentReadSegment = 1; - mMapSegStatus[actualTrackId].sampleIndex.mGlobalSampleIndex = 0; - mMapSegStatus[actualTrackId].sampleIndex.mGlobalStartSegment = 0; - mMapSegStatus[actualTrackId].sampleIndex.mSegmentSampleIndex = 0; - mMapSegStatus[actualTrackId].listActiveSeg.clear(); - break; - } - } + // dash mode + OmafDashMode mode_ = OmafDashMode::EXTRACTOR; - if (as_it != pMediaAS.end()) - { - break; - } + ProjectionFormat projection_fmt_ = ProjectionFormat::PF_ERP; - std::map pExtratorAS = pStream->GetExtractors(); - for(auto extractor_it = pExtratorAS.begin(); extractor_it != pExtratorAS.end(); extractor_it++){ - OmafExtractor* pExAS = (OmafExtractor*) extractor_it->second; - if(pExAS->GetInitSegment()->GetSegID() == trackInfo->initSegId){ - uint16_t actualTrackId = GetTrackId(trackInfo->trackId); - pExAS->SetTrackNumber(actualTrackId); - mMapInitTrk[trackInfo->initSegId] = actualTrackId;//trackInfo->trackId; - - mMapSegCnt[trackInfo->initSegId] = 0; - mMapSegStatus[actualTrackId].sampleIndex.mCurrentAddSegment = 0; - mMapSegStatus[actualTrackId].sampleIndex.mCurrentReadSegment = 1; - mMapSegStatus[actualTrackId].sampleIndex.mGlobalSampleIndex = 0; - mMapSegStatus[actualTrackId].sampleIndex.mGlobalStartSegment = 0; - mMapSegStatus[actualTrackId].sampleIndex.mSegmentSampleIndex = 0; - mMapSegStatus[actualTrackId].listActiveSeg.clear(); + // omaf reader + std::weak_ptr reader_; - break; - } - } - } - } -} + // this dash source related segment + OmafSegment::Ptr segment_; -int OmafReaderManager::AddSegment( OmafSegment* pSeg, uint32_t nInitSegID, uint32_t& nSegID) -{ - if(NULL == mReader) return ERROR_NULL_PTR; + // use shared ptr or unique ptr + std::vector depends_; - mLock.lock(); + std::chrono::steady_clock::time_point start_time_; - int64_t segCnt = -1; - // update the segment count for this track (get according to nInitSegID) with last element of m_readSegMap - // , if it is not chose before - if(pSeg->IsReEnabled()) - { - if(m_readSegMap.size()) - segCnt = pSeg->GetSegCount();//m_readSegMap.rbegin()->first; - else - LOG(WARNING)<<"viewport changed but size of m_readSegMap is 0, failed to update segment count!!"<> media_packets_; + std::queue media_packets_; + + // OmafPacketParams::Ptr packet_params; + + const size_t depends_size_ = 0; + + const bool bExtractor_ = false; + + size_t samples_num_ = 0; +}; + +uint32_t buildDashTrackId(uint32_t id) noexcept { return id & static_cast(0xffff); } - mMapSegCnt[nInitSegID] = segCnt - 1; +uint32_t buildReaderTrackId(uint32_t trackId, uint32_t initSegId) noexcept { return (initSegId << 16) | trackId; } + +OMAF_STATUS OmafReaderManager::Initialize(OmafMediaSource *pSource) noexcept { + try { + if (pSource == nullptr) { + OMAF_LOG(LOG_ERROR, "Invalid media source!\n"); + return ERROR_INVALID; } - nSegID = ++(mMapSegCnt[nInitSegID]); - //LOG(INFO)<<"now nSegID = "<IsReEnabled() = "<IsReEnabled()<<", segCnt = "<first == nSegID) - { - break; - } + reader_ = std::make_shared(); + if (reader_.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the omaf mp4 vr reader!\n"); + return ERROR_INVALID; } + breader_working_ = true; + segment_reader_worker_ = std::thread(&OmafReaderManager::threadRunner, this); + + return ERROR_NONE; + + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to init the client reader, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafReaderManager::Close() noexcept { + try { + OMAF_LOG(LOG_INFO, "Close the omaf reader manager!\n"); + breader_working_ = false; - if (it == m_readSegMap.end()) { - std::map initSegNormalSeg; - initSegNormalSeg.insert(std::make_pair(nInitSegID, pSeg)); - m_readSegMap.insert(std::make_pair(nSegID, initSegNormalSeg)); + std::lock_guard lock(segment_opening_mutex_); + segment_opening_list_.clear(); } - else + { - std::map* initSegNormalSeg; - initSegNormalSeg = &(m_readSegMap[nSegID]); - initSegNormalSeg->insert(std::make_pair(nInitSegID, pSeg)); + std::lock_guard lock(segment_opened_mutex_); + segment_opened_list_.clear(); + segment_opened_cv_.notify_all(); } - mLock.unlock(); + { + std::lock_guard lock(segment_parsed_mutex_); + segment_parsed_list_.clear(); + } - /// need to check segment completion for each extractor in a stream; - UpdateSegmentStatus(nInitSegID, nSegID, segCnt); + if (segment_reader_worker_.joinable()) { + breader_working_ = false; + segment_reader_worker_.join(); + } return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to close the client reader, ex: %s\n", ex.what()); + return ERROR_INVALID; + } } -int OmafReaderManager::ParseSegment(uint32_t nSegID, uint32_t nInitSegID) -{ - if(NULL == mReader) return ERROR_NULL_PTR; +OMAF_STATUS OmafReaderManager::OpenInitSegment(std::shared_ptr pInitSeg) noexcept { + try { + if (pInitSeg.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "The init segment is empty!\n"); + return ERROR_INVALID; + } - int ret = ERROR_NONE; +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + //trace + const char *trackType = "init_track"; + uint64_t streamSize = pInitSeg->GetStreamSize(); + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%s", "none"); + tracepoint(bandwidth_tp_provider, packed_segment_size, 0, trackType, tileRes, 0, streamSize); +#endif +#endif + + pInitSeg->RegisterStateChange([this](std::shared_ptr pInitSeg, OmafSegment::State state) { + this->initSegmentStateChange(std::move(pInitSeg), state); + }); + OMAF_STATUS ret = pInitSeg->Open(dash_client_); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to open the init segment, code= %d\n", ret); + return ret; + } - std::map initSegNormalSeg; - initSegNormalSeg = m_readSegMap[nSegID]; - OmafSegment *pSeg = initSegNormalSeg[nInitSegID]; - if(!pSeg) - { - LOG(ERROR) << "cannot get segment with ID "< pInitSeg) noexcept { + try { + if (pInitSeg.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "The init segment is empty!\n"); + return ERROR_INVALID; } + pInitSeg->SetState(OmafSegment::State::OPEN_SUCCES); + this->initSegmentStateChange(std::move(pInitSeg), OmafSegment::State::OPEN_SUCCES); - ret = mReader->parseSegment(pSeg, nInitSegID, nSegID ); + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to add the local init segment, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} - if( 0 != ret ) - { - LOG(ERROR) << "parseSegment return error "< pSeg, bool isExtracotr) noexcept { + try { + size_t depends_size = 0; + auto d_it = initSegId_depends_map_.find(pSeg->GetInitSegID()); + if (d_it != initSegId_depends_map_.end()) { + depends_size = d_it->second.size(); } - for (int i = 0; i < mSource->GetStreamCount(); i++) - { - OmafMediaStream *pStream = mSource->GetStream(i); - if (pStream->HasExtractor()) - { - std::list extractors = pStream->GetEnabledExtractor(); - for (auto it = extractors.begin(); it != extractors.end(); it++) - { - OmafExtractor *pExt = (OmafExtractor*)(*it); + pSeg->RegisterStateChange([this](std::shared_ptr segment, OmafSegment::State state) { + this->normalSegmentStateChange(std::move(segment), state); + }); - uint32_t extractorTrackId = pExt->GetTrackNumber(); - OmafSegment *initSegment = pExt->GetInitSegment(); - uint32_t initSegIndex = initSegment->GetInitSegID(); - if (nInitSegID == initSegIndex) - { - if ((uint32_t)(mMapSegStatus[extractorTrackId].segStatus[nSegID]) == (mMapSegStatus[extractorTrackId].depTrackIDs.size() + 1)) - { - std::vector readTrackInfos; - mReader->getTrackInformations( readTrackInfos ); - mSegTrackInfos[nSegID] = readTrackInfos; - } - } - } + OmafSegmentNode::Ptr new_node = std::make_shared(shared_from_this(), work_params_.mode_, work_params_.proj_fmt_, reader_, + std::move(pSeg), depends_size, isExtracotr); + { + std::unique_lock lock(segment_opening_mutex_); + + bool newTimeslide = true; + for (auto &nodeset : segment_opening_list_) { + if (nodeset.timeline_point_ == new_node->getTimelinePoint()) { + nodeset.segment_nodes_.push_back(new_node); + newTimeslide = false; + break; } + } // end for segment_opening_list_ + + if (newTimeslide) { + OmafSegmentNodeTimedSet nodeset; + nodeset.timeline_point_ = new_node->getTimelinePoint(); + nodeset.create_time_ = std::chrono::steady_clock::now(); + nodeset.segment_nodes_.push_back(new_node); + segment_opening_list_.emplace_back(nodeset); + } + } + + OMAF_STATUS ret = new_node->start(); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to open the segment, code= %d\n", ret); + return ret; } return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to add segment, ex: %s\n", ex.what()); + return ERROR_INVALID; + } } -void OmafReaderManager::SetupStatusMap() -{ - for( int i = 0; i < mSource->GetStreamCount(); i++ ){ - OmafMediaStream* pStream = mSource->GetStream(i); - std::map pExtratorAS = pStream->GetExtractors(); - for(auto as_it = pExtratorAS.begin(); as_it != pExtratorAS.end(); as_it++){ - OmafExtractor* pExAS = (OmafExtractor*) as_it->second; - int trackID = pExAS->GetTrackNumber(); - std::list listDepTracks = pExAS->GetDependTrackID(); - mMapSegStatus[trackID].depTrackIDs = listDepTracks; - } +OMAF_STATUS OmafReaderManager::OpenLocalSegment(std::shared_ptr segment, bool isExtractor) noexcept { + try { + size_t depends_size = 0; + auto d_it = initSegId_depends_map_.find(segment->GetInitSegID()); + if (d_it != initSegId_depends_map_.end()) { + depends_size = d_it->second.size(); } -} -void OmafReaderManager::UpdateSegmentStatus(uint32_t nInitSegID, uint32_t nSegID, int64_t segCnt) -{ - mLock.lock(); - int trackId = mMapInitTrk[nInitSegID]; - if (mMapSegStatus[trackId].depTrackIDs.size()) + OmafSegmentNode::Ptr new_node = std::make_shared(shared_from_this(), work_params_.mode_, work_params_.proj_fmt_, reader_, + segment, depends_size, isExtractor); { - if (nSegID > mMapSegStatus[trackId].segStatus.size()) - { - mMapSegStatus[trackId].segStatus[nSegID] = 0; - } - mMapSegStatus[trackId].segStatus[nSegID]++; - // only update mCurrentReadSegment if segCnt is updated with viewport changed - if(segCnt != -1) - { - mMapSegStatus[trackId].sampleIndex.mCurrentReadSegment = segCnt; + std::unique_lock lock(segment_opening_mutex_); + + bool newTimeslide = true; + for (auto &nodeset : segment_opening_list_) { + if (nodeset.timeline_point_ == new_node->getTimelinePoint()) { + nodeset.segment_nodes_.push_back(new_node); + newTimeslide = false; + break; } + } // end for segment_opening_list_ + + if (newTimeslide) { + OmafSegmentNodeTimedSet nodeset; + nodeset.timeline_point_ = new_node->getTimelinePoint(); + nodeset.create_time_ = std::chrono::steady_clock::now(); + nodeset.segment_nodes_.push_back(new_node); + segment_opening_list_.emplace_back(nodeset); + } } + segment->SetState(OmafSegment::State::OPEN_SUCCES); + this->normalSegmentStateChange(std::move(segment), OmafSegment::State::OPEN_SUCCES); - for( auto it=mMapSegStatus.begin(); it!=mMapSegStatus.end(); it++ ){ - for(auto id = mMapSegStatus[it->first].depTrackIDs.begin(); - id != mMapSegStatus[it->first].depTrackIDs.end(); - id++ ){ - if( *id == mMapInitTrk[nInitSegID] ){ - if (nSegID > mMapSegStatus[it->first].segStatus.size()) - { - mMapSegStatus[it->first].segStatus[nSegID] = 0; - } - mMapSegStatus[it->first].segStatus[nSegID]++; - mMapSegStatus[it->first].listActiveSeg.push_back(nSegID); - mMapSegStatus[it->first].sampleIndex.mCurrentAddSegment = nSegID; - break; + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to add segment, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +OMAF_STATUS OmafReaderManager::GetNextPacket(uint32_t trackID, MediaPacket *&pPacket, bool requireParams) noexcept { + try { + OMAF_STATUS ret = ERROR_NONE; + + bool bpacket_readed = false; + + { + std::unique_lock lock(segment_parsed_mutex_); + + // 1. read the required packet + for (auto &nodeset : segment_parsed_list_) { + std::list::iterator it = nodeset.segment_nodes_.begin(); + while (it != nodeset.segment_nodes_.end()) { + auto &node = *it; + //OMAF_LOG(LOG_INFO, "Require trackid=%u, node trackid=%u\n", trackID, node->getTrackId()); + if (node->getTrackId() == trackID) { + ret = node->getPacket(pPacket, requireParams); + if (ret == ERROR_NONE) { + bpacket_readed = true; } + timeline_point_ = node->getTimelinePoint(); + //OMAF_LOG(LOG_INFO, "timeline_point_ is %ld in GetNextPacket, bpacket_readed %d\n", timeline_point_, bpacket_readed); + if (0 == node->packetQueueSize()) { + //OMAF_LOG(LOG_INFO, "Node count=%d. %s\n", node.use_count(), node->to_string().c_str()); + nodeset.segment_nodes_.erase(it); + } + + break; + } + + it++; } + if (bpacket_readed) + break; + + } + } + if (!bpacket_readed) { + // FIXME, this may a bug for using the timeline point as segment number + if (work_params_.stream_type_ == DASH_STREAM_DYNMIC || !checkEOS(timeline_point_)) { + pPacket = nullptr; + + ret = ERROR_NULL_PACKET; + } else { + ret = ERROR_NONE; + pPacket = new MediaPacket(); + pPacket->SetEOS(true); + } } - mLock.unlock(); + // 2. sync timeline point for outside reading + // drop all dashset whose timeline point is less than timeline point + if (timeline_point_ != -1) { + OMAF_LOG(LOG_INFO, "To clear the timeline point < %ld\n", timeline_point_); + clearOlderSegmentSet(timeline_point_); + std::lock_guard lock(segment_parsed_mutex_); + std::list::iterator it = segment_parsed_list_.begin(); + while (it != segment_parsed_list_.end() && it->timeline_point_ < timeline_point_) { + it = segment_parsed_list_.erase(it); // 'it' will move to next when calling erase + } + } + return ret; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to read frame for trackid=%u, ex: %s\n", trackID, ex.what()); + return ERROR_INVALID; + } } -int OmafReaderManager::Seek( ) -{ - DashMediaInfo info; - mSource->GetMediaInfo( &info ); - int type = info.streaming_type; - if(type != 1) return ERROR_INVALID; - - /// seek - mStatus=STATUS_SEEKING; +OMAF_STATUS OmafReaderManager::GetNextPacketWithPTS(uint32_t trackID, uint64_t pts, MediaPacket *&pPacket, bool requireParams) noexcept { + try { + OMAF_STATUS ret = ERROR_NONE; - releaseAllSegments( ); - releasePacketQueue(); + bool bpacket_readed = false; - mStatus=STATUS_RUNNING; + { + std::unique_lock lock(segment_parsed_mutex_); + + // 1. read the required packet + for (auto &nodeset : segment_parsed_list_) {//loop on different timeline + std::list::iterator it = nodeset.segment_nodes_.begin(); + while (it != nodeset.segment_nodes_.end()) { //loop on different node (track) + auto &node = *it; + //OMAF_LOG(LOG_INFO, "Require trackid=%u, node trackid=%u\n", trackID, node->getTrackId()); + if (node->getTrackId() == trackID) { + ret = node->getPacket(pPacket, requireParams); + if (ret == ERROR_NONE) { + if (pPacket->GetPTS() == pts) + { + bpacket_readed = true; + timeline_point_ = node->getTimelinePoint(); + } + } - return ERROR_NONE; -} + if (0 == node->packetQueueSize()) { + //OMAF_LOG(LOG_INFO, "Node count=%d. %s\n", node.use_count(), node->to_string().c_str()); + nodeset.segment_nodes_.erase(it); + } -void OmafReaderManager::RemoveTrackFromPacketQueue(list& trackIDs) -{ - mPacketLock.lock(); - for(auto &it : trackIDs) - { - PacketQueue pPackQ = mPacketQueues[it]; - for (std::list::iterator iter = pPackQ.begin() ; iter != pPackQ.end(); iter++) + break; + } + it++; + } + if (bpacket_readed) { - MediaPacket *mediaPacket = *iter; - delete mediaPacket; - mediaPacket = NULL; + break; } - mPacketQueues.erase(it); + } + } + if (!bpacket_readed) { + // FIXME, this may a bug for using the timeline point as segment number + if (work_params_.stream_type_ == DASH_STREAM_DYNMIC || !checkEOS(timeline_point_)) { + pPacket = nullptr; + + ret = ERROR_NULL_PACKET; + } else { + ret = ERROR_NONE; + pPacket = new MediaPacket(); + pPacket->SetEOS(true); + } + } + + // 2. sync timeline point for outside reading + // drop all dashset whose timeline point is less than timeline point + if (timeline_point_ != -1) { + OMAF_LOG(LOG_INFO, "To clear the timeline point < %ld\n", timeline_point_); + clearOlderSegmentSet(timeline_point_); + std::lock_guard lock(segment_parsed_mutex_); + std::list::iterator it = segment_parsed_list_.begin(); + while (it != segment_parsed_list_.end() && it->timeline_point_ < timeline_point_) { + it = segment_parsed_list_.erase(it); // 'it' will move to next when calling erase + } } - mPacketLock.unlock(); + return ret; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to read frame for trackid=%u, ex: %s\n", trackID, ex.what()); + return ERROR_INVALID; + } } -int OmafReaderManager::GetNextFrame( int trackID, MediaPacket*& pPacket, bool needParams ) -{ - mPacketLock.lock(); - if( mPacketQueues[trackID].empty()){ - pPacket = NULL; - mPacketLock.unlock(); - return ERROR_NULL_PACKET; +inline bool OmafReaderManager::isEmpty(std::mutex &mutex, const std::list &nodes) noexcept { + try { + std::lock_guard lock(mutex); + if (nodes.empty()) { + return true; + } + if (nodes.size() > 1) { + return false; } - if( !mPacketQueues[trackID].size() ){ - mPacketLock.unlock(); - return OMAF_ERROR_END_OF_STREAM; + const OmafSegmentNodeTimedSet &node_set = nodes.front(); + if (node_set.segment_nodes_.empty()) { + return true; } - pPacket = mPacketQueues[trackID].front(); - mPacketQueues[trackID].pop_front(); - LOG(INFO)<<"========mPacketQueues size========:"<Size(); - newPacket->ReAllocatePacket(newSize); - newPacket->SetRealSize(newSize); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to check the empty, ex: %s\n", ex.what()); + return false; + } +} - char *origData = pPacket->Payload(); - char *newData = newPacket->Payload(); - if(!origData || !newData) - { - SAFE_DELETE(newPacket); - return OMAF_ERROR_NULL_PTR; +bool OmafReaderManager::checkEOS(int64_t segment_num) noexcept { + try { + if (media_source_ == nullptr) { + return true; + } + + bool eos = true; + + // set EOS when all stream meet eos + for (int i = 0; i < media_source_->GetStreamCount(); i++) { + OmafMediaStream *pStream = media_source_->GetStream(i); + if (pStream) { + uint64_t segmentDur = pStream->GetSegmentDuration(); + if (segmentDur == 0) { + continue; } - memcpy(newData, mVPS, mVPSLen); - memcpy(newData + mVPSLen, mSPS, mSPSLen); - memcpy(newData + mVPSLen + mSPSLen, mPPS, mPPSLen); - memcpy(newData + mVPSLen + mSPSLen + mPPSLen, origData, pPacket->Size()); - RegionWisePacking *newRwpk = new RegionWisePacking; - RegionWisePacking *pRwpk = pPacket->GetRwpk(); - if(!newRwpk || !pRwpk) - { - SAFE_DELETE(newPacket); - SAFE_DELETE(newRwpk); - return OMAF_ERROR_NULL_PTR; + double tmpSegNum = static_cast(work_params_.duration_) / 1000.0 / static_cast(segmentDur); + int64_t totalSegNum = static_cast(tmpSegNum); + totalSegNum = abs(tmpSegNum - totalSegNum) < 1e-6 ? totalSegNum : totalSegNum + 1; + if (segment_num < totalSegNum) { + eos = false; + break; } - *newRwpk = *pRwpk; - newRwpk->rectRegionPacking = new RectangularRegionWisePacking[newRwpk->numRegions]; - memcpy(newRwpk->rectRegionPacking, pRwpk->rectRegionPacking, pRwpk->numRegions * sizeof(RectangularRegionWisePacking)); - newPacket->SetRwpk(newRwpk); - delete pPacket; - pPacket = newPacket; + } } - return ERROR_NONE; + if (eos) { + if (!isEmpty(segment_opening_mutex_, segment_opening_list_)) { + OMAF_LOG(LOG_WARNING, "segment opening list is not empty!\n"); + } + } + if (eos) { + if (!isEmpty(segment_opened_mutex_, segment_opened_list_)) { + OMAF_LOG(LOG_WARNING, "segment opened list is not empty!\n"); + } + } + if (eos) { + if (!isEmpty(segment_parsed_mutex_, segment_parsed_list_)) { + OMAF_LOG(LOG_WARNING, "segment parsed list is not empty!\n"); + } + } + if (eos) { + OMAF_LOG(LOG_INFO, "Meet EOS\n"); + } + return eos; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to check the eos, ex: %s\n", ex.what()); + return true; + } } -int OmafReaderManager::ReadNextSegment( - int trackID, - uint16_t initSegID, - bool isExtractor, - std::vector readTrackInfos, - bool& segmentChanged ) -{ - if(NULL == mReader) return ERROR_NULL_PTR; - int32_t ret = ERROR_NONE; +OMAF_STATUS OmafReaderManager::GetPacketQueueSize(uint32_t trackID, size_t &size) noexcept { + try { + std::unique_lock lock(segment_parsed_mutex_); + for (auto &nodeset : segment_parsed_list_) { + for (auto &node : nodeset.segment_nodes_) { + if (node->getTrackId() == trackID) { + size = node->packetQueueSize(); + return ERROR_NONE; + } + } + } - SampleIndex *sampleIdx = &(mMapSegStatus[trackID].sampleIndex); + return ERROR_INVALID; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to read packet size for trackid=%u, ex: %s\n", trackID, ex.what()); + return ERROR_INVALID; + } +} +uint64_t OmafReaderManager::GetOldestPacketPTSForTrack(int trackId) { + try { + uint64_t oldestPTS = 0; + bool findPTS = false; + std::unique_lock lock(segment_parsed_mutex_); + for (auto &nodeset : segment_parsed_list_) { + for (auto &node : nodeset.segment_nodes_) { + if (node->getTrackId() == static_cast(trackId)) { + //return node->getPTS(); + if (!findPTS) + { + oldestPTS = node->getPTS(); + findPTS = true; + } + else + { + if (oldestPTS > node->getPTS()) + { + oldestPTS = node->getPTS(); + } + } + } + } + } - LOG(INFO) << "Begin to read segment " << sampleIdx->mCurrentReadSegment <<" for track "<trackId) == trackID) - { - trackInfo = itTrack; - break; + return oldestPTS; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to read packet size for trackid=%u, ex: %s\n", trackId, ex.what()); + return 0; + } +} +void OmafReaderManager::RemoveOutdatedPacketForTrack(int trackId, uint64_t currPTS) { + try { + std::unique_lock lock(segment_parsed_mutex_); + for (auto &nodeset : segment_parsed_list_) { + for (auto &node : nodeset.segment_nodes_) { + if (node->getTrackId() == static_cast(trackId)) { + node->clearPacketByPTS(currPTS); } + } } - if (!trackInfo) - { - LOG(ERROR) << "The specified track is not found " << endl; - return ERROR_NOT_FOUND; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to read packet size for trackid=%d, ex: %s\n", trackId, ex.what()); + } +} + +void OmafReaderManager::initSegmentStateChange(std::shared_ptr pInitSeg, + OmafSegment::State state) noexcept { + try { + if (state != OmafSegment::State::OPEN_SUCCES) { + OMAF_LOG(LOG_ERROR, "Failed to open the init segment, state= %d\n", static_cast(state)); + OMAF_LOG(LOG_ERROR," Track id is %u\n", pInitSeg->GetTrackId()); + return; } - if (sampleIdx->mCurrentReadSegment > sampleIdx->mCurrentAddSegment) - { - LOG(ERROR) << "Can't read not added segment ! " << endl; - return OMAF_ERROR_INVALID_DATA; + // 1. parse the segment + OMAF_STATUS ret = reader_->parseInitializationSegment(pInitSeg.get(), pInitSeg->GetInitSegID()); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "parse initialization segment failed! code= %d\n", ret); + return; } - std::map segSizeMap; + initSeg_ready_count_++; - for (auto& itSample : trackInfo->samplePropertyArrays) - { - segSizeMap[itSample->segmentId] = 0; - } - int32_t beginSampleId = -1; - for (auto& itSample : trackInfo->samplePropertyArrays) - { - segSizeMap[itSample->segmentId]++; - if (beginSampleId == -1 && itSample->segmentId == sampleIdx->mCurrentReadSegment) - { - beginSampleId = itSample->id; - } + OMAF_LOG(LOG_INFO, "Parsed init seg %u\n", pInitSeg->GetInitSegID()); + // 2 get the track information when all init segment parsed + if (initSegParsedCount() == media_source_->GetTrackCount() && !IsInitSegmentsParsed()) { + buildInitSegmentInfo(); } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to parse the init segment, ex: %s\n", ex.what()); + } +} - if(beginSampleId == -1) return OMAF_ERROR_INVALID_DATA; +void OmafReaderManager::buildInitSegmentInfo(void) noexcept { + try { + std::lock_guard lock(initSeg_mutex_); + // 1. get the track information + std::vector track_infos; + reader_->getTrackInformations(track_infos); + + // 2. go through the track information + for (auto track : track_infos) { + if (track == nullptr) { + OMAF_LOG(LOG_ERROR, "Meet empty track!\n"); + continue; + } + + // FIXME there would has bug, if more than one stream. + // or we need update the logic for more than one stream + for (int i = 0; i < media_source_->GetStreamCount(); i++) { + OmafMediaStream *pStream = media_source_->GetStream(i); + + // 2.1.1 check the adaptation set + std::map pMediaAS = pStream->GetMediaAdaptationSet(); + for (auto as : pMediaAS) { + OmafAdaptationSet *pAS = (OmafAdaptationSet *)as.second; + // FIXME GetInitSegID or GetSegID + if (pAS->GetInitSegment()->GetInitSegID() == track->initSegmentId) { + auto dash_track_id = buildDashTrackId(track->trackId); + pAS->SetTrackNumber(static_cast(dash_track_id)); + pAS->GetInitSegment()->SetTrackId(dash_track_id); + initSeg_trackIds_map_[track->initSegmentId] = dash_track_id; + trackIds_initSeg_map_[dash_track_id] = track->initSegmentId; + OMAF_LOG(LOG_INFO, "Initse id=%u, trackid=%u\n", track->initSegmentId, dash_track_id); + break; + } + } // end for adaptation set loop - for ( ; beginSampleId < (int32_t)(segSizeMap[sampleIdx->mCurrentReadSegment]); beginSampleId++) - { - int sample = beginSampleId; + // 2.1.2 if has init the track, then loop to next + if (initSeg_trackIds_map_.find(track->initSegmentId) != initSeg_trackIds_map_.end()) { + break; + } - uint32_t combinedTrackId = GetCombinedTrackId(trackID, initSegID); + // 2.1.3 check the extractors + std::map pExtratorAS = pStream->GetExtractors(); + for (auto &extractor : pExtratorAS) { + OmafExtractor *pExAS = extractor.second; + // FIXME GetInitSegID or GetSegID + if (pExAS->GetInitSegment()->GetInitSegID() == track->initSegmentId) { + auto dash_track_id = buildDashTrackId(track->trackId); + pExAS->SetTrackNumber(static_cast(dash_track_id)); + pExAS->GetInitSegment()->SetTrackId(dash_track_id); + initSeg_trackIds_map_[track->initSegmentId] = dash_track_id; + trackIds_initSeg_map_[dash_track_id] = track->initSegmentId; + OMAF_LOG(LOG_INFO, "Initse id=%u, trackid=%u\n", track->initSegmentId, dash_track_id); + break; + } + } // end for extractors loop + } // end stream loop + } // end for track loop + bInitSeg_all_ready_ = true; + + // 2.2 setup the id map + setupTrackIdMap(); + + // 3.1 release the track informations + for (auto &track : track_infos) { + if (track) { + delete track; + track = nullptr; + } + } + track_infos.clear(); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to parse the init segment, ex: %s\n", ex.what()); + } +} - if (!mWidth || !mHeight) - { - ret = mReader->getWidth(combinedTrackId, sample, mWidth); - if (ret) - { - LOG(ERROR) << "Failed to get sample width !" << endl; - return ret; +void OmafReaderManager::setupTrackIdMap(void) noexcept { + try { + for (int i = 0; i < media_source_->GetStreamCount(); i++) { + OmafMediaStream *pStream = media_source_->GetStream(i); + std::map pExtratorAS = pStream->GetExtractors(); + for (auto extractor : pExtratorAS) { + OmafExtractor *pExAS = extractor.second; + uint32_t track_id = pExAS->GetTrackNumber(); + std::list depends_track_ids = pExAS->GetDependTrackID(); + if (trackIds_initSeg_map_.find(track_id) != trackIds_initSeg_map_.end()) { + auto &initSeg_id = trackIds_initSeg_map_[track_id]; + initSegId_depends_map_[initSeg_id] = std::vector(); + for (auto depend_id : depends_track_ids) { + if (trackIds_initSeg_map_.find(depend_id) != trackIds_initSeg_map_.end()) { + auto depend_initSeg_id = trackIds_initSeg_map_[depend_id]; + initSegId_depends_map_[initSeg_id].push_back(depend_initSeg_id); } + } + } + } // end for extractors + } // end for media stream + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to set up track map, ex: %s\n", ex.what()); + } +} - if (mWidth == 0) - { - LOG(ERROR) << "Get invalid sample width !" << endl; - return OMAF_ERROR_INVALID_DATA; - } +void OmafReaderManager::normalSegmentStateChange(std::shared_ptr segment, + OmafSegment::State state) noexcept { + try { + // 0. check params + if (segment.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Empty segment!\n"); + return; + } - ret = mReader->getHeight(combinedTrackId, sample, mHeight); - if (ret) - { - LOG(ERROR) << "Failed to get sample height !" << endl; - return ret; - } + OmafSegmentNode::Ptr opened_dash_node; - if (mHeight == 0) - { - LOG(ERROR) << "Get invalid sample height !" << endl; - return OMAF_ERROR_INVALID_DATA; + // 1. remove from the opening list + { + std::lock_guard lock(segment_opening_mutex_); + for (auto &nodeset : segment_opening_list_) { + if (nodeset.timeline_point_ == segment->GetTimelinePoint()) { + std::list::iterator it = nodeset.segment_nodes_.begin(); + while (it != nodeset.segment_nodes_.end()) { + auto &node = (*(*it).get()); + if (node == segment) { + opened_dash_node = std::move(*it); + nodeset.segment_nodes_.erase(it); + break; } - - LOG(INFO) << "Get sample width " << mWidth << " and sample height " << mHeight << " !" << endl; + it++; + } } + } + } - MediaPacket* packet = new MediaPacket(); - uint32_t packetSize = ((mWidth * mHeight * 3) / 2 ) / 2; - packet->ReAllocatePacket(packetSize); - - if (!mVPSLen || !mSPSLen || !mPPSLen) - { - memset(mVPS, 0, 256); - memset(mSPS, 0, 256); - memset(mPPS, 0, 256); - mVPSLen = 0; - mSPSLen = 0; - mPPSLen = 0; - - std::vector parameterSets; - ret = mReader->getDecoderConfiguration(combinedTrackId, sample, parameterSets); - if (ret) - { - LOG(ERROR) << "Failed to get VPS/SPS/PPS ! " << endl; - return ret; - } + if (opened_dash_node.get() == nullptr) { + OMAF_LOG(LOG_WARNING, "Can't find the dash node for coming segment!\n"); + return; + } - for (auto const& parameter : parameterSets) - { - if (parameter.decodeSpecInfoType == VCD::OMAF::HEVC_VPS) - { - mVPSLen = parameter.decodeSpecInfoData.size(); - for (uint32_t i = 0; i < parameter.decodeSpecInfoData.size(); i++) - { - mVPS[i] = parameter.decodeSpecInfoData[i]; - } - } +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + //trace + if (opened_dash_node.get() != nullptr) { + if (opened_dash_node->isExtractor()) { + const char *trackType = "extractor_track"; + uint64_t streamSize = segment->GetStreamSize(); + + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%s", "none"); + int trackIndex = segment->GetTrackId(); + uint32_t nSegID = segment->GetSegID(); + tracepoint(bandwidth_tp_provider, packed_segment_size, trackIndex, trackType, tileRes, nSegID, streamSize); + } + else { + const char *trackType = "tile_track"; + uint64_t streamSize = segment->GetStreamSize(); + + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%s", "none"); + int trackIndex = segment->GetTrackId(); + uint32_t nSegID = segment->GetSegID(); + tracepoint(bandwidth_tp_provider, packed_segment_size, trackIndex, trackType, tileRes, nSegID, streamSize); + } + } +#endif +#endif - if (parameter.decodeSpecInfoType == VCD::OMAF::HEVC_SPS) - { - mSPSLen = parameter.decodeSpecInfoData.size(); - for (uint32_t i = 0; i < parameter.decodeSpecInfoData.size(); i++) - { - mSPS[i] = parameter.decodeSpecInfoData[i]; - } + // 2. append to the dash opened list + { + std::lock_guard lock(segment_opened_mutex_); + bool new_timeline_point = true; + if (work_params_.mode_ == OmafDashMode::EXTRACTOR) { + // this is a dash node built from extractor + // build depends based on extractor + if (opened_dash_node->isExtractor()) { + for (auto &nodeset : segment_opened_list_) { + if (nodeset.timeline_point_ == opened_dash_node->getTimelinePoint()) { + new_timeline_point = false; + std::list::iterator it = nodeset.segment_nodes_.begin(); + + // put all depends node into depend + const auto &depends = initSegId_depends_map_[opened_dash_node->getInitSegId()]; + + while (it != nodeset.segment_nodes_.end()) { + // check whether this dash node is belong the target dash's depends + auto initSeg_id = (*it)->getInitSegId(); + auto is_in_depend = false; + for (auto id : depends) { + if (id == initSeg_id) { + is_in_depend = true; + break; + } + } // end for id + + if (is_in_depend) { + // remove from the list and push to depends + opened_dash_node->pushDepends(std::move(*it)); + // it will move to next when calling erase + it = nodeset.segment_nodes_.erase(it); + } else { + it++; } - - if (parameter.decodeSpecInfoType == VCD::OMAF::HEVC_PPS) - { - mPPSLen = parameter.decodeSpecInfoData.size(); - for (uint32_t i = 0; i < parameter.decodeSpecInfoData.size(); i++) - { - mPPS[i] = parameter.decodeSpecInfoData[i]; + } // end while + + nodeset.segment_nodes_.push_back(std::move(opened_dash_node)); + break; + } // end for if same timeslide + } // end for dash_opened_list loop + } else { + // this is a dash node built from the general segment + for (auto &nodeset : segment_opened_list_) { + if (nodeset.timeline_point_ == opened_dash_node->getTimelinePoint()) { + new_timeline_point = false; + // while loop all node in the dash nodes, + // the opend_dash_node maybe added to more than one dash node who built from extractor + // the detail structure depend on the media stream's extractor logic + // FIXME, if one segment belong to different extractor at the same time, the logix has bug now. + // it should update the logic of parse in the dash_node + // + bool bfind_extractor = false; + for (auto &node : nodeset.segment_nodes_) { + // this is a extractor + if (node->isExtractor()) { + const auto &depends = initSegId_depends_map_[node->getInitSegId()]; + for (auto id : depends) { + // this dash node is in depends of the node + if (id == opened_dash_node->getInitSegId()) { + bfind_extractor = true; + node->pushDepends(std::move(opened_dash_node)); + break; } + } // end for auto id } + } // end for auto node + if (!bfind_extractor) { + nodeset.segment_nodes_.push_back(std::move(opened_dash_node)); + } + break; } + } // end for nodeset } - - if (isExtractor) - { - ret = mReader->getExtractorTrackSampleData(combinedTrackId, sample, (char *)(packet->Payload()), packetSize ); + } else { + // not extractor mode + for (auto &nodeset : segment_opened_list_) { + if (nodeset.timeline_point_ == opened_dash_node->getTimelinePoint()) { + nodeset.segment_nodes_.push_back(std::move(opened_dash_node)); + new_timeline_point = false; + break; + } } - else - { - ret = mReader->getTrackSampleData(combinedTrackId, sample, (char *)(packet->Payload()), packetSize ); + } + + if (new_timeline_point) { + bool to_append = true; + if (!segment_opened_list_.empty()) { + auto &tail_nodeset = segment_opened_list_.back(); + // the nodeset queue's timeline should increase one by one + if (opened_dash_node->getTimelinePoint() <= tail_nodeset.timeline_point_) { + OMAF_LOG(LOG_WARNING, "Try to insert the timeline: %lld, which <= %lld\n", opened_dash_node->getTimelinePoint(), tail_nodeset.timeline_point_); + to_append = false; + } } + if (to_append) { + OmafSegmentNodeTimedSet nodeset; + nodeset.timeline_point_ = opened_dash_node->getTimelinePoint(); + nodeset.create_time_ = std::chrono::steady_clock::now(); + nodeset.segment_nodes_.push_back(std::move(opened_dash_node)); + segment_opened_list_.emplace_back(nodeset); + } + } + + // TODO, refine the logic, + // we may send the notify by checking all segment ready for extractor mode + segment_opened_cv_.notify_all(); + } // end of append to the dash opened list + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when set up track map, ex: %s\n", ex.what()); + } +} - RegionWisePacking *pRwpk = new RegionWisePacking; +void OmafReaderManager::threadRunner() noexcept { + try { + //OMAF_LOG(LOG_INFO, "Start the reader runner!\n"); + + while (breader_working_) { + // 1. find the ready segment/dash_node opend list + OmafSegmentNode::Ptr ready_dash_node = findReadySegmentNode(); + + // 1.1 no ready dash node, then wait + if (ready_dash_node.get() == nullptr) { + std::unique_lock lock(segment_opened_mutex_); + segment_opened_cv_.wait(lock); + continue; + } + + // 2. parse the ready segment/dash_node + const int64_t timeline_point = ready_dash_node->getTimelinePoint(); + //OMAF_LOG(LOG_INFO, "Get ready segment! timeline=%lld\n", timeline_point); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + tracepoint(mthq_tp_provider, T4_parse_start_time, timeline_point); +#endif +#endif + OMAF_STATUS ret = ready_dash_node->parse(); + + if (ready_dash_node->getMediaType() == MediaType_Video) + { + std::lock_guard lock(segment_samples_mutex_); + std::map::iterator it; + it = samples_num_per_seg_.find(ready_dash_node->getTimelinePoint()); + if (it == samples_num_per_seg_.end()) + { + samples_num_per_seg_.insert(std::make_pair(ready_dash_node->getTimelinePoint(), ready_dash_node->GetSamplesNum())); + } + } + //samples_num_per_seg_ = ready_dash_node->GetSamplesNum(); + + // 3. move the parsed segment/dash_node to parsed list + if (ret == ERROR_NONE) { + //OMAF_LOG(LOG_INFO, "Success to parsed dash segment! timeline=%lld\n", timeline_point); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + tracepoint(mthq_tp_provider, T5_parse_end_time, timeline_point); +#endif +#endif + std::unique_lock lock(segment_parsed_mutex_); + bool new_timeline_point = true; + for (auto &nodeset : segment_parsed_list_) { + if (nodeset.timeline_point_ == timeline_point) { + nodeset.segment_nodes_.push_back(std::move(ready_dash_node)); + new_timeline_point = false; + break; + } + } + if (new_timeline_point) { + OmafSegmentNodeTimedSet nodeset; + nodeset.timeline_point_ = timeline_point; + nodeset.create_time_ = std::chrono::steady_clock::now(); + nodeset.segment_nodes_.push_back(std::move(ready_dash_node)); + segment_parsed_list_.emplace_back(nodeset); + } + segment_parsed_cv_.notify_all(); + } else { + OMAF_LOG(LOG_ERROR, "Failed to parse %s\n", ready_dash_node->to_string().c_str()); + } + + // 4. clear dash set whose timeline point older than current ready segment/dash_node + // we use simple logic to main the dash node sets + // we will remove older dash nodes + //clearOlderSegmentSet(timeline_point_); + } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception in reader runner, ex: %s\n", ex.what()); + } - ret = mReader->getPropertyRegionWisePacking(combinedTrackId, sample, pRwpk); + OMAF_LOG(LOG_INFO, "Exit from the reader runner!\n"); +} - packet->SetRwpk(pRwpk); +OmafSegmentNode::Ptr OmafReaderManager::findReadySegmentNode() noexcept { + try { + OmafSegmentNode::Ptr ready_dash_node; + std::unique_lock lock(segment_opened_mutex_); + for (auto &nodeset : segment_opened_list_) { + //OMAF_LOG(LOG_INFO, "To find the ready node set timeline=%lld\n", nodeset.timeline_point_); + + // 1.1.1 try to find the ready node + std::list::iterator it = nodeset.segment_nodes_.begin(); + while (it != nodeset.segment_nodes_.end()) { + auto &node = *it; + if (node->isReady()) { + if (work_params_.mode_ == OmafDashMode::EXTRACTOR) { + if (node->isExtractor()) { + ready_dash_node = std::move(node); + } + else if (node->getMediaType() == MediaType_Audio) { + ready_dash_node = std::move(node); + OMAF_LOG(LOG_INFO, "Found one ready audio track segment node!\n"); + } + } else { + ready_dash_node = std::move(node); + } - if (ret == OMAF_MEMORY_TOO_SMALL_BUFFER ) - { - LOG(ERROR) << "The frame size has exceeded the maximum packet size" << endl; - return ret; + if (ready_dash_node.get() != nullptr) { + break; + } } - else if (ret) - { - LOG(ERROR) << "Failed to get packet " << (sampleIdx->mGlobalSampleIndex + beginSampleId) << " for track " << trackID << " and error is " << ret << endl; - return ret; + it++; + } // end while + + // 1.1.2 find the ready node, exit and return + if (ready_dash_node.get() != nullptr) { + nodeset.segment_nodes_.erase(it); + OMAF_LOG(LOG_INFO, "Get ready segment node with timeline %ld\n", nodeset.timeline_point_); + break; + } + else { + OMAF_LOG(LOG_INFO, "No ready segment node for timeline %ld\n", nodeset.timeline_point_); + } + + // 1.2.1 no ready node found, then check whether timeout + bool btimeout = true; + for (auto node : nodeset.segment_nodes_) { + if (!node->checkTimeout(work_params_.segment_timeout_ms_)) { + btimeout = false; + break; + } + } + + // 1.2.2 some node not timeout, still wait + if (!btimeout) { + // do nothing, wait the data ready + break; + } + + // 1.3 all nodes timeout, then move to next timeline point's nodeset + continue; + } // end for nodeset loop + + return ready_dash_node; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when find the ready dash node, ex: %s\n", ex.what()); + return nullptr; + } +} + +void OmafReaderManager::clearOlderSegmentSet(int64_t timeline_point) noexcept { + try { + { + // 1. clear the opening list to save the network bandwith + std::lock_guard lock(segment_opening_mutex_); + std::list::iterator it = segment_opening_list_.begin(); + while ((it != segment_opening_list_.end()) && (it->timeline_point_ < timeline_point)) { + OMAF_LOG(LOG_INFO, "Removing older dash opening list, timeline=%lld\n", it->timeline_point_); + for (auto &node : it->segment_nodes_) { + node->stop(); } - packet->SetRealSize(packetSize); - mPacketLock.lock(); - mPacketQueues[trackID].push_back(packet); - mPacketLock.unlock(); + it = segment_opening_list_.erase(it); + } } - LOG(INFO) << "Segment " << trackInfo->samplePropertyArrays[beginSampleId - 1]->segmentId << " for track " << trackID << " has been read !" << endl; - sampleIdx->mCurrentReadSegment++; - sampleIdx->mGlobalSampleIndex += beginSampleId; - LOG(INFO) << "Total read " << sampleIdx->mGlobalSampleIndex << " samples for track " << trackID <<" now !" << endl; + { + // 2. clear the opened dash node list + std::lock_guard lock(segment_opened_mutex_); + std::list::iterator it = segment_opened_list_.begin(); + while ((it != segment_opened_list_.end()) && (it->timeline_point_ < timeline_point)) { + OMAF_LOG(LOG_INFO, "Removing older dash opened list, timeline=%lld\n", it->timeline_point_); + it = segment_opened_list_.erase(it); + } + } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when clear the older dash set, whose timeline is older than %lld, ex: %s\n", timeline_point, ex.what()); + } +} - removeSegment(initSegID, sampleIdx->mCurrentReadSegment - 1); +// FIXME, dims and vps/sps/pps may not in the same sample +int OmafPacketParams::init(std::shared_ptr reader, uint32_t reader_trackId, uint32_t sampleId) noexcept { + try { + // 1. read width and heigh + OMAF_STATUS ret = reader->getDims(reader_trackId, sampleId, width_, height_); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to get sample dims !\n"); + return ret; + } + if (!width_ || !height_) { + OMAF_LOG(LOG_ERROR, "Failed to get the dims!\n"); + return OMAF_ERROR_INVALID_DATA; + } - SegStatus st = mMapSegStatus[trackID]; - std::list::iterator itRef = st.depTrackIDs.begin(); - for ( ; itRef != st.depTrackIDs.end(); itRef++) - { - int refTrack = *itRef; + // 2. read vps/sps/pps params + std::vector parameterSets; + ret = reader->getDecoderConfiguration(reader_trackId, sampleId, parameterSets); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to get VPS/SPS/PPS !\n"); + return ret; + } - TrackInformation *refTrackInfo = nullptr; - for ( auto &itRefInfo : readTrackInfos) - { - if (GetTrackId(itRefInfo->trackId) == refTrack) - { - refTrackInfo = itRefInfo; - break; - } + for (auto const ¶meter : parameterSets) { + if (parameter.codecSpecInfoType == VCD::MP4::HEVC_VPS) { + vps_.resize(parameter.codecSpecInfoBits.size); + for (size_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + vps_[i] = parameter.codecSpecInfoBits[i]; } + } - if(refTrackInfo) removeSegment(refTrackInfo->initSegId, sampleIdx->mCurrentReadSegment - 1); - - } + if (parameter.codecSpecInfoType == VCD::MP4::HEVC_SPS) { + sps_.resize(parameter.codecSpecInfoBits.size); + for (size_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + sps_[i] = parameter.codecSpecInfoBits[i]; + } + } - for(auto &it : readTrackInfos) - { - for(uint32_t i = 0; i < it->samplePropertyArrays.size(); i++) - { - SampleInformation* sampInfo = it->samplePropertyArrays[i]; - SAFE_DELETE(sampInfo); + if (parameter.codecSpecInfoType == VCD::MP4::HEVC_PPS) { + pps_.resize(parameter.codecSpecInfoBits.size); + for (uint32_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + pps_[i] = parameter.codecSpecInfoBits[i]; } - it->samplePropertyArrays.clear(); - SAFE_DELETE(it); + } } - readTrackInfos.clear(); - return ERROR_NONE; + binit_ = vps_.size() && sps_.size() && pps_.size(); + + params_.resize(vps_.size() + sps_.size() + pps_.size()); + memcpy_s(params_.data(), params_.size(), vps_.data(), vps_.size()); + memcpy_s(params_.data() + vps_.size(), sps_.size(), sps_.data(), sps_.size()); + memcpy_s(params_.data() + vps_.size() + sps_.size(), pps_.size(), pps_.data(), pps_.size()); + + return binit_ ? ERROR_NONE : ERROR_INVALID; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to init the packet params, ex: %s\n", ex.what()); + binit_ = false; + return ERROR_INVALID; + } } -void OmafReaderManager::Run() +int OmafAudioPacketParams::unPackUnsignedIntValue(uint8_t bitsNum, uint32_t *value) { - bool go_on = true; + int ret = ERROR_NONE; + if ((8 - ((uint8_t)(curr_bit_pos_) % 8)) > bitsNum) + { + uint8_t tempVal = params_[(uint8_t)(curr_bit_pos_)/8]; + tempVal <<= (curr_bit_pos_ % 8); + tempVal >>= (8 - bitsNum); + *value = (uint32_t)tempVal; + } + else if ((16 - ((uint8_t)(curr_bit_pos_) % 16)) > bitsNum) + { + uint16_t tempVal1 = (uint16_t)(params_[(uint8_t)(curr_bit_pos_)/8]); + uint16_t tempVal2 = (uint16_t)(params_[(uint8_t)(curr_bit_pos_)/8 + 1]); + uint16_t tempVal = (tempVal1 << 8) | tempVal2; + tempVal <<= curr_bit_pos_; + tempVal >>= (16 - bitsNum); + *value = (uint32_t)tempVal; + } + else + { + OMAF_LOG(LOG_ERROR, "Not supported bits number %d for bits reader !\n", bitsNum); + ret = OMAF_ERROR_INVALID_DATA; + } + return ret; +} - if(NULL == mSource) return; +int OmafAudioPacketParams::init(std::shared_ptr reader, uint32_t reader_trackId, uint32_t sampleId) noexcept { + try { + std::vector parameterSets; + int ret = reader->getDecoderConfiguration(reader_trackId, sampleId, parameterSets); + if (ret) { + OMAF_LOG(LOG_ERROR, "Failed to get audio specific info !\n"); + return ret; + } - bool bSegChange = false; + for (auto const ¶meter : parameterSets) { + if (parameter.codecSpecInfoType == VCD::MP4::AudioSpecificConfig) { + params_.resize(parameter.codecSpecInfoBits.size); + for (size_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + params_[i] = parameter.codecSpecInfoBits[i]; + } + } + } - mStatus = STATUS_RUNNING; + binit_ = params_.size(); - while(go_on && mStatus != STATUS_STOPPED){ - mLock.lock(); + ret = unPackUnsignedIntValue(5, &objType_); + if (ret) + return ret; - // exit the waiting if segment is parsed or wait time is more than 10 mins - int64_t waitTime = 0; - while (!mInitSegParsed && waitTime < 600000) - { - mLock.unlock(); - ::usleep(1000); - mLock.lock(); - waitTime++; - } - mLock.unlock(); + OMAF_LOG(LOG_INFO, "Parsed audio obj type %d\n", objType_); + curr_bit_pos_ += 5; - if( mStatus==STATUS_STOPPING ){ - mStatus = STATUS_STOPPED; - break; - } + ret = unPackUnsignedIntValue(4, &frequencyIdx_); + if (ret) + return ret; - if(mStatus==STATUS_SEEKING){ - continue; - } - DashMediaInfo info; + OMAF_LOG(LOG_INFO, "Parsed audio sample rate idx %d\n", frequencyIdx_); + curr_bit_pos_ += 4; - mSource->GetMediaInfo( &info ); - int type = info.streaming_type; + ret = unPackUnsignedIntValue(4, &channelCfg_); + if (ret) + return ret; - if(type == 1 && mEOS) break; + OMAF_LOG(LOG_INFO, "Parsed audio channel configuration %d\n", channelCfg_); + curr_bit_pos_ += 4; - /// begin to read packet for each stream - for( int i = 0; i < mSource->GetStreamCount(); i++ ){ - if( mStatus==STATUS_STOPPING ){ - mStatus = STATUS_STOPPED; - break; - } - OmafMediaStream* pStream = mSource->GetStream(i); - if(pStream->HasExtractor()){ - std::list extractors = pStream->GetEnabledExtractor(); - for(auto it=extractors.begin(); it!=extractors.end(); it++){ - OmafExtractor* pExt = (OmafExtractor*)(*it); - SegStatus *st = &(mMapSegStatus[pExt->GetTrackNumber()]); - - ///if static mode, check EOS - if(type == 1){ - uint64_t segmentDur = pStream->GetSegmentDuration(); - if (segmentDur == 0) - { - return; - } - float tmpSegNum = float(info.duration) / 1000 / segmentDur; - uint32_t totalSegNum = abs(tmpSegNum - uint32_t(tmpSegNum)) < 1e-6 ? uint32_t(tmpSegNum) : uint32_t(tmpSegNum) + 1; - if (st->sampleIndex.mCurrentReadSegment > totalSegNum) - { - mLock.lock(); - this->mEOS = true; - mLock.unlock(); - return; - } - } + return ERROR_NONE; + } catch(const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to init audio packet params, ex: %s\n", ex.what()); + binit_ = false; + return ERROR_INVALID; + } +} - // exit the waiting if segment downloaded or wait time is more than 10 mins - int64_t waitTime = 0; - mLock.lock(); - while (st->sampleIndex.mCurrentReadSegment > st->sampleIndex.mCurrentAddSegment && mStatus!=STATUS_STOPPING && waitTime < 600000) - { - LOG(INFO) << "New segment " << st->sampleIndex.mCurrentReadSegment << " hasn't come, then wait !" << endl; - mLock.unlock(); - ::usleep(1000); - mLock.lock(); - waitTime++; - } - mLock.unlock(); +void OmafAudioPacketParams::packOneBit(bool value) +{ + --curr_bit_pos_; + if (curr_bit_pos_ == -1) + { + curr_bit_pos_ = 7; + params_.push_back(0); + } + params_[params_.size() - 1] |= (uint8_t(value) << curr_bit_pos_); +} - if( mStatus==STATUS_STOPPING ){ - mStatus = STATUS_STOPPED; - break; - } +void OmafAudioPacketParams::packUnsignedIntValue(uint8_t bitsNum, uint32_t value) +{ + for (int32_t num = (bitsNum - 1); num >= 0; --num) + { + packOneBit(((value >> num) & 1)); + } +} - if((uint32_t)(st->segStatus[st->sampleIndex.mCurrentReadSegment]) == (st->depTrackIDs.size() + 1)){ - uint16_t trackID = pExt->GetTrackNumber(); - uint16_t initSegID = 0; - for (auto& idPair : mMapInitTrk) - { - if (idPair.second == trackID) - { - initSegID = idPair.first; - break; - } - } - - - std::list::iterator itRef = st->depTrackIDs.begin(); - for ( ; itRef != st->depTrackIDs.end(); itRef++) - { - int refTrack = *itRef; - uint32_t initSegIndex = 0; - - for (auto& idPair : mMapInitTrk) - { - if (idPair.second == refTrack) - { - initSegIndex = idPair.first; - break; - } - } - - ParseSegment(st->sampleIndex.mCurrentReadSegment, initSegIndex); - - } - - ParseSegment(st->sampleIndex.mCurrentReadSegment, initSegID); - std::vector readTrackInfos = mSegTrackInfos[st->sampleIndex.mCurrentReadSegment]; - this->ReadNextSegment(trackID, initSegID, true, readTrackInfos, bSegChange); - mSegTrackInfos.erase(st->sampleIndex.mCurrentReadSegment - 1); - - RemoveReadSegmentFromMap(); - }else{ - ::usleep(100); - } - } - }else{ - std::map mapAS = pStream->GetMediaAdaptationSet(); - for(auto as_it=mapAS.begin(); as_it!=mapAS.end(); as_it++){ - OmafAdaptationSet* pAS = (OmafAdaptationSet*)(as_it->second); - SegStatus *st = &(mMapSegStatus[pAS->GetTrackNumber()]); - if(type == 1){ - uint64_t segmentDur = pStream->GetSegmentDuration(); - if (segmentDur == 0) - { - return; - } - float tmpSegNum = float(info.duration) / 1000 / segmentDur; - uint32_t totalSegNum = abs(tmpSegNum - uint32_t(tmpSegNum)) < 1e-6 ? uint32_t(tmpSegNum) : uint32_t(tmpSegNum) + 1; - if (st->sampleIndex.mCurrentReadSegment > totalSegNum) - { - mLock.lock(); - this->mEOS = true; - mLock.unlock(); - return; - } - } - uint16_t trackID = pAS->GetTrackNumber(); - uint16_t initSegID = 0; - for (auto& idPair : mMapInitTrk) - { - if (idPair.second == trackID) - { - initSegID = idPair.first; - break; - } - } +void OmafAudioPacketParams::writeADTSHdr(uint32_t frameSize) +{ + params_.clear(); + curr_bit_pos_ = 0; + + packUnsignedIntValue(12, 0xfff); + packUnsignedIntValue(1, 0); + packUnsignedIntValue(2, 0); + packUnsignedIntValue(1, 1); + packUnsignedIntValue(2, objType_); + packUnsignedIntValue(4, frequencyIdx_); + packUnsignedIntValue(1, 0); + packUnsignedIntValue(3, channelCfg_); + packUnsignedIntValue(1, 0); + packUnsignedIntValue(1, 0); + + packUnsignedIntValue(1, 0); + packUnsignedIntValue(1, 0); + packUnsignedIntValue(13, (frameSize + 7)); //ADTS Header size is 7 bytes + packUnsignedIntValue(11, 0x7ff); + packUnsignedIntValue(2, 0); + OMAF_LOG(LOG_INFO, "ADTS header size %ld bytes\n", params_.size()); +} - // exit the waiting if segment downloaded or wait time is more than 10 mins - int64_t waitTime = 0; - mLock.lock(); - while (st->sampleIndex.mCurrentReadSegment > st->sampleIndex.mCurrentAddSegment && mStatus!=STATUS_STOPPING && waitTime < 600000) - { - LOG(INFO) << "New segment " << st->sampleIndex.mCurrentReadSegment << " hasn't come, then wait !" << endl; - mLock.unlock(); - ::usleep(1000); - mLock.lock(); - waitTime++; - } - mLock.unlock(); +int OmafSegmentNode::parse() noexcept { + try { + clock_t lBefore = clock(); + clock_t lBefore2 = lBefore; + double dResult; - if( mStatus==STATUS_STOPPING ){ - mStatus = STATUS_STOPPED; - break; - } + auto reader = reader_.lock(); + if (reader.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "The omaf reader is empty!\n"); + return ERROR_NULL_PTR; + } - std::vector readTrackInfos = mSegTrackInfos[st->sampleIndex.mCurrentReadSegment]; - this->ReadNextSegment(trackID, initSegID, false, readTrackInfos, bSegChange); - } - } - } + OMAF_STATUS ret = ERROR_NONE; + // 1.1 calling depends to parse the segment + for (auto &node : depends_) { + ret = node->parseSegmentStream(reader); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to parse the dependent node %s. Error code=%d\n", node->to_string().c_str(), ret); + return ret; + } + } - for(int i=0; ito_string().c_str(), ret); + return ret; } + dResult = (double)(clock() - lBefore) * 1000 / CLOCKS_PER_SEC; + OMAF_LOG(LOG_INFO, "OmafSegmentNode parsing segment dependency and self time is %f ms\n", dResult); + + // 2 cache packets from the reader + lBefore = clock(); + ret = cachePackets(reader); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to read packet from %s. Error code=%d\n", this->to_string().c_str(), ret); + return ERROR_INVALID; + } + dResult = (double)(clock() - lBefore) * 1000 / CLOCKS_PER_SEC; + OMAF_LOG(LOG_INFO, "OmafSegmentNode parsing cachePackets time is %f ms\n", dResult); + + // 3.1 remove self segment from reader + lBefore = clock(); + ret = removeSegmentStream(reader); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to remove segment from reader %s. Error code=%d\n", this->to_string().c_str(), ret); + return ERROR_INVALID; + } + + // 3.2 remove segment of depends from reader + for (auto &node : depends_) { + ret = node->removeSegmentStream(reader); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to remove dependent segment form reader %s. Error code=%d\n", node->to_string().c_str(), ret); + return ret; + } + } + dResult = (double)(clock() - lBefore) * 1000 / CLOCKS_PER_SEC; + OMAF_LOG(LOG_INFO, "OmafSegmentNode parsing remove segment dependency and self is %f ms\n", dResult); + dResult = (double)(clock() - lBefore2) * 1000 / CLOCKS_PER_SEC; + OMAF_LOG(LOG_INFO, "OmafSegmentNode parsing time in total is %f ms\n", dResult); + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when parse the segment! ex: %s\n", ex.what()); + return ERROR_INVALID; + } } -// Keep more than 1 element in m_readSegMap for segment count update if viewport changed -void OmafReaderManager::RemoveReadSegmentFromMap() -{ - if(m_readSegMap.size() < 10) return; +int OmafSegmentNode::start(void) noexcept { + try { + OMAF_STATUS ret = ERROR_NONE; + if (segment_.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Try to open the empty segment!\n"); + return ERROR_INVALID; + } - for(auto &it:m_readSegMap) - { - if(m_readSegMap.size() < 10) - break; - std::map initSegNormalSeg = it.second; - for (auto& itRmSeg : initSegNormalSeg) - { - OmafSegment *rmSeg = itRmSeg.second; - delete rmSeg; - rmSeg = NULL; - } - m_readSegMap.erase(it.first); + auto reader_mgr = omaf_reader_mgr_.lock(); + if (reader_mgr.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "The reader manager is empty!\n"); + return ERROR_NULL_PTR; + } + + start_time_ = std::chrono::steady_clock::now(); + ret = segment_->Open(reader_mgr->dash_client_); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to open the segment!\n"); + return ret; } + + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when open the segment, ex: %s\n", ex.what()); + return ERROR_INVALID; + } } -void OmafReaderManager::releaseAllSegments( ) -{ - ScopeLock managerLock(mLock); +int OmafSegmentNode::stop(void) noexcept { + try { + OMAF_STATUS ret = ERROR_NONE; + if (segment_.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "Try to stop the empty segment!\n"); + return ERROR_INVALID; + } + ret = this->segment_->Stop(); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_WARNING, "Failed to stop the segment!\n"); + } + + for (auto &node : depends_) { + node->stop(); + } + return ret; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when stop the segment, ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +int OmafSegmentNode::parseSegmentStream(std::shared_ptr reader) noexcept { + try { + return reader->parseSegment(segment_.get(), segment_->GetInitSegID(), segment_->GetSegID()); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when parse the segment! ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} - for(auto it=mMapSegStatus.begin(); it!=mMapSegStatus.end(); it++){ +int OmafSegmentNode::removeSegmentStream(std::shared_ptr reader) noexcept { + try { + return reader->invalidateSegment(segment_->GetInitSegID(), segment_->GetSegID()); - SegStatus *s = &(it->second); - s->sampleIndex.mCurrentAddSegment = 0; - s->sampleIndex.mCurrentReadSegment = 0; - s->sampleIndex.mGlobalSampleIndex = 0; - s->sampleIndex.mGlobalStartSegment = 0; - s->sampleIndex.mSegmentSampleIndex = 0; - s->listActiveSeg.clear(); - } + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when remove the segment from reader! ex: %s\n", ex.what()); + return ERROR_INVALID; + } } -void OmafReaderManager::setNextSampleId(int trackID, uint32_t id, bool& segmentChanged) -{ - size_t indexInSegment = 0; +// int OmafSegmentNode::getPacket(std::unique_ptr &pPacket, bool needParams) { +int OmafSegmentNode::getPacket(MediaPacket *&pPacket, bool requireParams) noexcept { + try { + if (media_packets_.size() <= 0) { + OMAF_LOG(LOG_INFO, "There is no packets\n"); + return ERROR_NULL_PACKET; + } - TrackInformation *trackInfo = nullptr; - for (auto &itTrack : mTrackInfos) + pPacket = media_packets_.front(); + media_packets_.pop(); + if (pPacket->GetMediaType() == MediaType_Video) { - if (GetTrackId(itTrack->trackId) == trackID) - { - trackInfo = itTrack; - break; + if (requireParams) { + auto packet_params = (bExtractor_ == true) ? getPacketParamsForExtractors() : getPacketParams(); + if (packet_params.get() == nullptr || !packet_params->binit_) { + OMAF_LOG(LOG_ERROR, "Invalid VPS/SPS/PPS in getting packet !\n"); + return OMAF_ERROR_INVALID_DATA; } + pPacket->InsertParams(packet_params->params_); + pPacket->SetVPSLen(packet_params->vps_.size()); + pPacket->SetSPSLen(packet_params->sps_.size()); + pPacket->SetPPSLen(packet_params->pps_.size()); + pPacket->SetVideoHeaderSize(packet_params->params_.size()); + } } - - for (size_t index = 0; index < trackInfo->samplePropertyArrays.size(); index++) + else if (pPacket->GetMediaType() == MediaType_Audio) { - if (trackInfo->samplePropertyArrays[index]->id == id) - { - mMapSegStatus[trackID].sampleIndex.mGlobalSampleIndex = static_cast(index); - mMapSegStatus[trackID].sampleIndex.mSegmentSampleIndex = static_cast(indexInSegment); - break; + if (requireParams) { + pPacket->InsertADTSHdr(); + } + } + + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when read the frame! ex: %s\n", ex.what()); + return ERROR_INVALID; + } +} + +int OmafSegmentNode::cachePackets(std::shared_ptr reader) noexcept { + try { + OMAF_STATUS ret = ERROR_NONE; + std::shared_ptr track_info = findTrackInformation(reader); + if (track_info.get() == nullptr) { + OMAF_LOG(LOG_FATAL, "Failed to find the sepcial track information.%d\n", this->to_string().c_str()); + return ERROR_INVALID; + } + + size_t sample_begin = 0; + size_t sample_end = 0; + if (!findSampleIndexRange(track_info, sample_begin, sample_end)) { + OMAF_LOG(LOG_ERROR, "Failed to find the sample range for segment. %s\n", this->to_string().c_str()); + return ERROR_INVALID; + } + samples_num_ = sample_end - sample_begin; + OMAF_LOG(LOG_INFO, "segment %s has samples num %ld\n", this->to_string().c_str(), samples_num_); +#if 0 + if (sample_begin < 1) { + LOG(FATAL) << "The begin sample id is less than 1, whose value =" << sample_begin << "." << this->to_string() + ); + return ERROR_INVALID; + } +#endif + if (segment_->GetMediaType() == MediaType_Video) { + auto packet_params = (bExtractor_ == true) ? getPacketParamsForExtractors() : getPacketParams(); + for (size_t sample = sample_begin; sample < sample_end; sample++) { + uint32_t reader_track_id = buildReaderTrackId(segment_->GetTrackId(), segment_->GetInitSegID()); + + if (packet_params.get() == nullptr) { + packet_params = std::make_shared(); + } + if (!packet_params->binit_) { + ret = packet_params->init(reader, reader_track_id, sample); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to read the packet params include width/height/vps/sps/pps!\n"); + return ret; + } + if (bExtractor_) + { + this->setPacketParamsForExtractors(packet_params); + } + else + { + this->setPacketParams(packet_params); + } + } + + // cache packets + // std::shared_ptr packet = make_unique_vcd; + MediaPacket *packet = new MediaPacket(); + if (packet == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the packet!\n"); + return ERROR_INVALID; } - if (trackInfo->samplePropertyArrays[index]->segmentId == mMapSegStatus[trackID].sampleIndex.mCurrentReadSegment) + uint32_t packet_size = ((packet_params->width_ * packet_params->height_ * 3) >> 1) >> 1; + // FIXME, we need refine the packet buffer. we may include the vps/pps/sps here + packet->ReAllocatePacket(packet_size); + + if (mode_ == OmafDashMode::EXTRACTOR) { + ret = reader->getExtractorTrackSampleData(reader_track_id, sample, static_cast(packet->Payload()), + packet_size); + } else { + ret = reader->getTrackSampleData(reader_track_id, sample, static_cast(packet->Payload()), packet_size); + } + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to read sample data from reader, code= %d\n", ret); + SAFE_DELETE(packet); + return ret; + } + + std::unique_ptr pRwpk = make_unique_vcd(); + ret = reader->getPropertyRegionWisePacking(reader_track_id, sample, pRwpk.get()); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to read region wise packing data from reader, code= %d\n", ret); + SAFE_DELETE(packet); + return ret; + } + packet->SetRwpk(std::move(pRwpk)); + packet->SetPTS(track_info->sampleProperties.size * (segment_->GetSegID() - 1) + sample_begin + sample); + if ((sample + 1) == sample_end) { - indexInSegment++; + packet->SetSegmentEnded(true); + } + + // for later binding + packet->SetQualityRanking(segment_->GetQualityRanking()); + + if (mode_ == OmafDashMode::EXTRACTOR) { + packet->SetQualityNum(MAX_QUALITY_NUM); + vector boundLeft(1); // num of quality is limited to 2. + vector boundTop(1); + const RegionWisePacking &rwpk = packet->GetRwpk(); + packet->SetVideoWidth(rwpk.packedPicWidth); + packet->SetVideoHeight(rwpk.packedPicHeight); + if (projection_fmt_ == ProjectionFormat::PF_ERP) + { + for (int j = rwpk.numRegions - 1; j >= 0; j--) { + if (rwpk.rectRegionPacking[j].projRegLeft == 0 && rwpk.rectRegionPacking[j].projRegTop == 0 && + (rwpk.rectRegionPacking[j].packedRegLeft != 0) && + (rwpk.rectRegionPacking[j].packedRegTop == 0)) { + boundLeft.push_back(rwpk.rectRegionPacking[j].packedRegLeft); + boundTop.push_back(rwpk.rectRegionPacking[j].packedRegTop); + break; + } + } + } + else if (projection_fmt_ == ProjectionFormat::PF_CUBEMAP) + { + uint32_t lowTileW = 0; + uint32_t lowTileH = 0; + int j = 0; + for (j = rwpk.numRegions - 1; j >= 0; j--) { + if (rwpk.rectRegionPacking[j].projRegLeft == 0 && rwpk.rectRegionPacking[j].projRegTop == 0) + { + lowTileW = rwpk.rectRegionPacking[j].projRegWidth; + lowTileH = rwpk.rectRegionPacking[j].projRegHeight; + break; + } + } + + int lowResStartIdx = 0; + for ( ; j >= 0; j--) { + if ((rwpk.rectRegionPacking[j].projRegWidth == lowTileW) && + (rwpk.rectRegionPacking[j].projRegHeight == lowTileH) && + (rwpk.rectRegionPacking[j].packedRegLeft != 0) && + (rwpk.rectRegionPacking[j].packedRegTop == 0)) + { + lowResStartIdx = j; + } + } + boundLeft.push_back(rwpk.rectRegionPacking[lowResStartIdx].packedRegLeft); + boundTop.push_back(rwpk.rectRegionPacking[lowResStartIdx].packedRegTop); + } + + for (int idx = 0; idx < packet->GetQualityNum(); idx++) { + SourceResolution srcRes; + srcRes.qualityRanking = static_cast(idx + 1); + srcRes.top = boundTop[idx]; + srcRes.left = boundLeft[idx]; + srcRes.height = rwpk.packedPicHeight; + if (idx == 0) { + srcRes.width = boundLeft[idx + 1] - boundLeft[idx]; + } else { + srcRes.width = rwpk.packedPicWidth - boundLeft[idx]; + } + packet->SetSourceResolution(idx, srcRes); + } + } else { + packet->SetSRDInfo(segment_->GetSRDInfo()); } + + packet->SetRealSize(packet_size); + packet->SetSegID(track_info->sampleProperties[sample].segmentId); + + media_packets_.push(packet); + OMAF_LOG(LOG_INFO, "Push packet with PTS %ld for track %d\n", packet->GetPTS(), segment_->GetTrackId()); + } } -} + else if (segment_->GetMediaType() == MediaType_Audio) { + auto packet_params = getPacketParamsForAudio(); + for (size_t sample = sample_begin; sample < sample_end; sample++) { + uint32_t reader_track_id = buildReaderTrackId(segment_->GetTrackId(), segment_->GetInitSegID()); -uint32_t OmafReaderManager::removeSegment(uint32_t initSegmentId, uint32_t segmentId) -{ - LOG(INFO) << "removeSegment " << segmentId << " for track " << mMapInitTrk[initSegmentId] << endl; + if (packet_params.get() == nullptr) { + packet_params = std::make_shared(); + } + if (!packet_params->binit_) { + ret = packet_params->init(reader, reader_track_id, sample); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to read the packet params for audio track!\n"); + return ret; + } - ScopeLock managerLock(mLock); + this->setPacketParamsForAudio(packet_params); + } - int32_t result = mReader->invalidateSegment(initSegmentId, segmentId); + MediaPacket *packet = new MediaPacket(); + if (packet == nullptr) { + OMAF_LOG(LOG_ERROR, "Failed to create the packet!\n"); + return ERROR_INVALID; + } - if (result != 0){ - LOG(ERROR) << "removeSegment Failed " << segmentId << endl; - return ERROR_INVALID; - } + uint32_t chlNum = segment_->GetAudioChlNum(); + uint32_t packet_size = 1024 * chlNum; - if (mMapSegStatus[mMapInitTrk[initSegmentId]].listActiveSeg.size() > 0) - { - std::list::iterator it = mMapSegStatus[mMapInitTrk[initSegmentId]].listActiveSeg.begin(); + packet->ReAllocatePacket(packet_size); + + ret = reader->getTrackSampleData(reader_track_id, sample, static_cast(packet->Payload()), packet_size); + + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Failed to read sample data from reader for audio track, code= %d\n", ret); + SAFE_DELETE(packet); + return ret; + } - for ( ; it != mMapSegStatus[mMapInitTrk[initSegmentId]].listActiveSeg.end();) + packet->SetPTS(track_info->sampleProperties.size * (segment_->GetSegID() - 1) + sample_begin + sample); + if ((sample + 1) == sample_end) { - if ((uint32_t)(*it) == segmentId) - { - it = mMapSegStatus[mMapInitTrk[initSegmentId]].listActiveSeg.erase(it); - } - else - { - it++; - } + packet->SetSegmentEnded(true); } + + packet->SetMediaType(MediaType_Audio); + + packet->SetRealSize(packet_size); + + packet_params->writeADTSHdr(packet_size); + packet->SetADTSHdr(packet_params->params_); + + packet->SetSegID(track_info->sampleProperties[sample].segmentId); + + media_packets_.push(packet); + OMAF_LOG(LOG_INFO, "Push packet with PTS %ld for audio track %d\n", packet->GetPTS(), segment_->GetTrackId()); + OMAF_LOG(LOG_INFO, "Add packet size %d\n", packet_size); + } } - LOG(INFO) << "~~Now active list has segments " << mMapSegStatus[mMapInitTrk[initSegmentId]].listActiveSeg.size() << endl; + return ERROR_NONE; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when read packets! ex: %s\n", ex.what()); + return ERROR_INVALID; + } } -void OmafReaderManager::releasePacketQueue() -{ - ScopeLock packetLock(mPacketLock); - for(auto it=mPacketQueues.begin(); it!=mPacketQueues.end(); it++){ - PacketQueue queue = (*it).second; - for(auto qu_it=queue.begin(); qu_it!=queue.end(); qu_it++){ - MediaPacket* pkt = (MediaPacket*)(*qu_it); - delete pkt; - pkt = NULL; +std::shared_ptr OmafSegmentNode::findTrackInformation(std::shared_ptr reader) noexcept { + try { + std::vector track_infos; + OMAF_STATUS ret = reader->getTrackInformations(track_infos); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to get the trackinformation list from reader, code=%d\n", ret); + return nullptr; + } + + // get the required track information and release the old data + std::shared_ptr track_info; + for (auto &track : track_infos) { + if (track != nullptr) { + if (buildDashTrackId(track->trackId) == segment_->GetTrackId()) { + track_info = std::make_shared(); + *(track_info.get()) = *track; + } + delete track; + } + + track = nullptr; + } + track_infos.clear(); + + return std::move(track_info); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when find the track information! ex: %s\n", ex.what()); + return nullptr; + } +} + +bool OmafSegmentNode::findSampleIndexRange(std::shared_ptr track_info, size_t &begin, + size_t &end) noexcept { + try { + if (track_info.get() == nullptr) { + return false; + } + bool found = false; + for (size_t index = 0; index < track_info->sampleProperties.size; index++) { + if (segment_->GetSegID() == track_info->sampleProperties[index].segmentId) { + end++; + if (!found) { + found = true; + begin = track_info->sampleProperties[index].sampleId; + } + } + } + return found; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when find the start index! ex: %s\n", ex.what()); + return false; + } +} + +bool OmafSegmentNode::isReady() const noexcept { + try { + if (segment_.get() == nullptr) { + OMAF_LOG(LOG_ERROR, "The segment is empty!\n"); + return false; + } + + if (segment_->GetState() != OmafSegment::State::OPEN_SUCCES) { + OMAF_LOG(LOG_WARNING, "The segment is not in open success. state=%d\n", static_cast(segment_->GetState())); + return false; + } + + if (bExtractor_) { + if (depends_.size() != depends_size_) { + return false; + } + for (auto &node : depends_) { + if (!node->isReady()) { + return false; } - queue.clear(); + } } + + return true; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when check segement state. ex: %s\n", ex.what()); + return false; + } } VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafReaderManager.h b/src/OmafDashAccess/OmafReaderManager.h index b37c51f4..f86b3aac 100644 --- a/src/OmafDashAccess/OmafReaderManager.h +++ b/src/OmafDashAccess/OmafReaderManager.h @@ -33,166 +33,195 @@ //! Created on May 28, 2019, 1:41 PM //! -#ifndef OMAFEXTRATORREADER_H -#define OMAFEXTRATORREADER_H +#ifndef OMAFMP4READERMGR_H +#define OMAFMP4READERMGR_H -#include "general.h" -#include "OmafReader.h" #include "MediaPacket.h" +#include "general.h" + #include "OmafMediaSource.h" -#include "OmafDashSource.h" +#include "OmafReader.h" + +#include +#include +#include +#include +#include +#include +#include VCD_OMAF_BEGIN -typedef std::list PacketQueue; - -struct SampleIndex -{ - SampleIndex() - : mCurrentAddSegment(0) - , mCurrentReadSegment(1) - , mSegmentSampleIndex(0) - , mGlobalStartSegment(0) - , mGlobalSampleIndex(0) - {} - - uint32_t mCurrentAddSegment; - uint32_t mCurrentReadSegment; - uint32_t mSegmentSampleIndex; - uint32_t mGlobalStartSegment; - uint32_t mGlobalSampleIndex; -}; +using PacketQueue = std::list; -typedef struct SegStatus{ - SampleIndex sampleIndex; - std::list depTrackIDs; - std::map segStatus; // listActiveSeg; -}SegStatus; - -class OmafReaderManager : public Threadable{ -public: - OmafReaderManager(); - virtual ~OmafReaderManager(); -public: - //! \brief initialize the reader with MediaSource - //! - int Initialize( OmafMediaSource* pSource ); - - //! \brief close the reader - //! - int Close(); - - //! \brief add init Segment for reading after it is downloaded - //! - int AddInitSegment( OmafSegment* pInitSeg, uint32_t& nInitSegID ); - - //! \brief add Segment for reading after it is downloaded - //! - int AddSegment( OmafSegment* pSeg, uint32_t nInitSegID, uint32_t& nSegID); - - int ParseSegment(uint32_t nSegID, uint32_t nInitSegID); - - //! \brief Get Next packet from packet queue. each track has a packet queue - //! - int GetNextFrame( int trackID, MediaPacket*& pPacket, bool needParams ); - - //! \brief Get initial segments parse status. - //! - bool isAllInitSegParsed() - { - bool isParsed = false; - mLock.lock(); - isParsed = mInitSegParsed; - mLock.unlock(); - return isParsed; - }; - -public: - //! \brief call when seeking - //! - int Seek( ); - - void RemoveTrackFromPacketQueue(list& trackIDs); - -public: - //! \brief the thread routine to read packet for each active track - //! - virtual void Run(); - -private: - //! \brief read packet for trackID - //! - int ReadNextSegment( - int trackID, - uint16_t initSegID, - bool isExtractor, - std::vector readTrackInfos, - bool& segmentChanged ); - - //! \brief Setup Track information for each stream and relative adaptation set - //! - void UpdateSourceTrackID(); - - //! \brief Update SegmentStatus based on stream. if there is extractor in the stream, need - //! to considering the segments for each referenced segment by the extractor. Extractor - //! can work only all referenced segment are ready. - void UpdateSegmentStatus(uint32_t nInitSegID, uint32_t nSegID, int64_t segCnt); - - //! \brief setup the structure to track the IDs and status of all segment - //! - void SetupStatusMap(); - - //! \brief release all use Segment - //! - void releaseAllSegments( ); - - //! \brief remove segment for reader based on initSegmentID & SegmentID - //! - uint32_t removeSegment(uint32_t initSegmentId, uint32_t segmentId); - - //! \brief release all packets in the packet queues - //! - void releasePacketQueue(); - - //! \brief release all use Segment - //! - void setNextSampleId(int trackID, uint32_t id, bool& segmentChanged); - - void RemoveReadSegmentFromMap(); - -private: - OmafReader* mReader; // mPacketQueues; // - std::vector mTrackInfos; //> mSegTrackInfos; // mMapSegCnt; // mMapSegStatus; // mMapInitTrk; //> m_readSegMap; //> -}; +enum class OmafDashMode { EXTRACTOR = 0, LATER_BINDING = 1 }; -typedef Singleton READERMANAGER; +class OmafSegmentNode; +class OmafPacketParams; +class OmafAudioPacketParams; -VCD_OMAF_END; +struct _omafSegmentNodeTimedSet { + int64_t timeline_point_ = -1; + std::chrono::steady_clock::time_point create_time_; + std::list> segment_nodes_; +}; -#endif /* OMAFEXTRATORREADER_H */ +using OmafSegmentNodeTimedSet = struct _omafSegmentNodeTimedSet; + +class OmafReaderManager : public VCD::NonCopyable, public enable_shared_from_this { + friend OmafSegmentNode; + + public: + struct _params { + OmafDashMode mode_; + DashStreamType stream_type_ = DASH_STREAM_DYNMIC; + size_t duration_ = 0; + int32_t segment_timeout_ms_ = 3000; // ms + ProjectionFormat proj_fmt_ = ProjectionFormat::PF_ERP; + }; + + using OmafReaderParams = struct _params; + + public: + using Ptr = std::shared_ptr; + + public: + OmafReaderManager(std::shared_ptr client, OmafReaderParams params) + : dash_client_(client), work_params_(params) {} + virtual ~OmafReaderManager() { Close(); } + + public: + //! \brief initialize the reader with MediaSource + //! + OMAF_STATUS Initialize(OmafMediaSource *pSource) noexcept; + + //! \brief close the reader + //! + OMAF_STATUS Close() noexcept; + + //! \brief add init Segment for reading after it is downloaded + //! + OMAF_STATUS OpenInitSegment(std::shared_ptr pInitSeg) noexcept; + OMAF_STATUS OpenLocalInitSegment(std::shared_ptr pInitSeg) noexcept; + + //! \brief add Segment for reading after it is downloaded + //! + OMAF_STATUS OpenSegment(std::shared_ptr pSeg, bool isExtractor = false) noexcept; + OMAF_STATUS OpenLocalSegment(std::shared_ptr pSeg, bool isExtractor = false) noexcept; + + //! \brief Get Next packet from packet queue. each track has a packet queue + //! + OMAF_STATUS GetNextPacket(uint32_t trackID, MediaPacket *&pPacket, bool requireParams) noexcept; + + //! \brief Get Next packet with assigned track index and PTS from packet queue. + OMAF_STATUS GetNextPacketWithPTS(uint32_t trackID, uint64_t pts, MediaPacket *&pPacket, bool requireParams) noexcept; + + //! \brief Get mPacketQueue[trackID] size + //! + OMAF_STATUS GetPacketQueueSize(uint32_t trackID, size_t &size) noexcept; + + //! \brief Get initial segments parse status. + //! + inline bool IsInitSegmentsParsed() { return bInitSeg_all_ready_.load(); }; + + uint64_t GetOldestPacketPTSForTrack(int trackId); + void RemoveOutdatedPacketForTrack(int trackId, uint64_t currPTS); + size_t GetSamplesNumPerSegmentForTimeLine(uint64_t currTimeLine) + { + size_t samples_num = 0; + { + std::lock_guard lock(segment_samples_mutex_); + std::map::iterator it; + it = samples_num_per_seg_.find(currTimeLine); + if (it != samples_num_per_seg_.end()) + { + samples_num = it->second; + } + } + + return samples_num; + } + + private: + void threadRunner() noexcept; + std::shared_ptr findReadySegmentNode() noexcept; + void clearOlderSegmentSet(int64_t timeline_point) noexcept; + bool checkEOS(int64_t segment_num) noexcept; + bool isEmpty(std::mutex &mutex, const std::list &nodes) noexcept; + + private: + inline int initSegParsedCount(void) noexcept { return initSeg_ready_count_.load(); } + void buildInitSegmentInfo(void) noexcept; + void setupTrackIdMap(void) noexcept; + + private: + void initSegmentStateChange(std::shared_ptr, OmafSegment::State) noexcept; + void normalSegmentStateChange(std::shared_ptr, OmafSegment::State) noexcept; + + std::shared_ptr getPacketParams(uint32_t qualityRanking) noexcept { + return omaf_packet_params_[qualityRanking]; + } + void setPacketParams(uint32_t qualityRanking, std::shared_ptr params) { + omaf_packet_params_[qualityRanking] = std::move(params); + } + + std::shared_ptr getPacketParamsForExtractors(uint32_t extractorTrackIdx) noexcept { + return packet_params_for_extractors_[extractorTrackIdx]; + } + void setPacketParamsForExtractors(uint32_t extractorTrackIdx, std::shared_ptr params) { + packet_params_for_extractors_[extractorTrackIdx] = std::move(params); + } + + std::shared_ptr getPacketParamsForAudio(uint32_t audioTrackIdx) noexcept { + return packet_params_for_audio_[audioTrackIdx]; + } + void setPacketParamsForAudio(uint32_t audioTrackIdx, std::shared_ptr params) { + packet_params_for_audio_[audioTrackIdx] = std::move(params); + } + + private: + std::shared_ptr dash_client_; + + OmafReaderParams work_params_; + int64_t timeline_point_ = -1; + // omaf reader + std::thread segment_reader_worker_; + bool breader_working_ = false; + + std::mutex segment_samples_mutex_; + std::map samples_num_per_seg_; + //size_t samples_num_per_seg_ = 0; + std::shared_ptr reader_; + + std::mutex segment_opening_mutex_; + std::list segment_opening_list_; + std::mutex segment_opened_mutex_; + std::condition_variable segment_opened_cv_; + std::list segment_opened_list_; + std::mutex segment_parsed_mutex_; + std::condition_variable segment_parsed_cv_; + std::list segment_parsed_list_; + + OmafMediaSource *media_source_ = nullptr; + std::map> omaf_packet_params_; + + std::map> packet_params_for_extractors_; + + std::map> packet_params_for_audio_; + + std::mutex initSeg_mutex_; + + // initSeg_trackIds_map_; + // trackIds_initSeg_map_; + // map < initSeg_id, depends_initSeg_ids> + std::map> initSegId_depends_map_; + + std::atomic_int initSeg_ready_count_{0}; + std::atomic_bool bInitSeg_all_ready_{false}; +}; +// using READERMANAGER = Singleton; +VCD_OMAF_END +#endif // OMAFMP4READERMGR_H diff --git a/src/OmafDashAccess/OmafSegment.cpp b/src/OmafDashAccess/OmafSegment.cpp index 7b6b99e8..9e4bdf5e 100644 --- a/src/OmafDashAccess/OmafSegment.cpp +++ b/src/OmafDashAccess/OmafSegment.cpp @@ -33,200 +33,152 @@ * Created on May 24, 2019, 11:07 AM */ -#include - -#include "OmafSegment.h" #include "DownloadManager.h" -#include "OmafReaderManager.h" - -VCD_OMAF_BEGIN - -OmafSegment::OmafSegment() -{ - pthread_mutex_init(&mMutex, NULL); - pthread_cond_init(&mCond, NULL); - mSeg = NULL; - mStoreFile = true; - mCacheFile = ""; - mStatus = SegUnknown; - mSegSize = 0; - mInitSegment = false; - mData = NULL; - mReEnabled = false; - mSegCnt = 0; - mInitSegID = 0; - mSegID = 0; -} +#include "OmafSegment.h" -OmafSegment::~OmafSegment() -{ - pthread_mutex_destroy( &mMutex ); - pthread_cond_destroy( &mCond ); +#include +#include - //SAFE_DELETE(mSeg); - mSeg->StopDownloadSegment((OmafDownloaderObserver*) this); +VCD_OMAF_BEGIN - DOWNLOADMANAGER::GetInstance()->DeleteCacheFile(mCacheFile); +std::atomic_uint32_t OmafSegment::INITSEG_ID(0); + +OmafSegment::OmafSegment(DashSegmentSourceParams ds_params, int segCnt, bool bInitSegment) + : ds_params_(ds_params), seg_count_(segCnt), bInit_segment_(bInitSegment) { + if (bInit_segment_) { + initSeg_id_ = INITSEG_ID.fetch_add(1); + seg_id_ = initSeg_id_; + } + mQualityRanking = INVALID_QUALITY_RANKING; + mMediaType = MediaType_Video; + mChlsNum = 0; + mSampleRate = 0; } -OmafSegment::OmafSegment(SegmentElement* pSeg, int segCnt, bool bInitSegment, bool reEnabled):OmafSegment() -{ - pthread_mutex_init(&mMutex, NULL); - pthread_cond_init(&mCond, NULL); - mSeg = pSeg; - mStoreFile = false; - mCacheFile = ""; - mStatus = SegUnknown; - mSegSize = 0; - mInitSegment = bInitSegment; - mReEnabled = reEnabled; - mSegCnt = segCnt; - mInitSegID = 0; - mSegID = 0; +OmafSegment::~OmafSegment() { + if (buse_stored_file_ && !cache_file_.empty()) { + DOWNLOADMANAGER::GetInstance()->DeleteCacheFile(cache_file_); + } } -int OmafSegment::StartDownload() -{ - if(NULL == mSeg) return ERROR_NULL_PTR; - - mStoreFile = DOWNLOADMANAGER::GetInstance()->UseCache(); - - mSegSize = 0; - - mStatus = SegReady; - - mSeg->StartDownloadSegment((OmafDownloaderObserver*) this); +int OmafSegment::Open(std::shared_ptr dash_client) noexcept { + try { + if (dash_client.get() == nullptr) { + return ERROR_NULL_PTR; + } + dash_client_ = std::move(dash_client); + + state_ = State::CREATE; + + // mSegElement->StartDownloadSegment((OmafDownloaderObserver *)this); + dash_client_->open( + ds_params_, [this](std::unique_ptr sb) { this->dash_stream_.push_back(std::move(sb)); }, + [this](OmafDashSegmentClient::State s) { + switch (s) { + case OmafDashSegmentClient::State::SUCCESS: + this->state_ = State::OPEN_SUCCES; + break; + case OmafDashSegmentClient::State::STOPPED: + this->state_ = State::OPEN_STOPPED; + break; + case OmafDashSegmentClient::State::TIMEOUT: + this->state_ = State::OPEN_TIMEOUT; + break; + case OmafDashSegmentClient::State::FAILURE: + this->state_ = State::OPEN_FAILED; + break; + default: + break; + } + if (this->state_change_cb_) { + this->state_change_cb_(this->shared_from_this(), this->state_); + } + }); return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when start downloading the file: %s, ex: %s\n", ds_params_.dash_url_.c_str(), ex.what()); + return ERROR_INVALID; + } } -int OmafSegment::WaitComplete() -{ - if( mStatus == SegDownloaded ) return ERROR_NONE; - - int64_t waitTime = 0; - - //printf("wait download =======\n"); - /*pthread_mutex_lock(&m_mutex); - pthread_cond_wait(&m_cond, &m_mutex); - - pthread_mutex_unlock(&m_mutex);*/ - // exit the waiting if segment downloaded or wait time is more than 10 mins - while(mStatus != SegDownloaded && waitTime < 60000){ - ::usleep(10000); - waitTime++; +int OmafSegment::Stop() noexcept { + try { + if (dash_client_.get() == nullptr) { + return ERROR_NULL_PTR; } - + dash_client_->remove(ds_params_); return ERROR_NONE; + } catch (const std::exception& ex) { + OMAF_LOG(LOG_ERROR, "Exception when start downloading the file: %s, ex: %s\n", ds_params_.dash_url_.c_str(), ex.what()); + return ERROR_INVALID; + } } -int OmafSegment::Open( ) -{ - return StartDownload(); -} +#if 0 +int OmafSegment::Read(uint8_t *data, size_t len) { + // if (NULL == mSegElement) return ERROR_NULL_PTR; -int OmafSegment::Open( SegmentElement* pSeg ) -{ - mSeg = pSeg; + // if (mStatus != SegDownloaded) WaitComplete(); - return Open(); + // return mSegElement->Read(data, len); } -int OmafSegment::Read(uint8_t *data, size_t len) -{ - if(NULL == mSeg) return ERROR_NULL_PTR; +int OmafSegment::Peek(uint8_t *data, size_t len) { + // if (NULL == mSegElement) return ERROR_NULL_PTR; - if(mStatus != SegDownloaded) WaitComplete(); + // if (mStatus != SegDownloaded) WaitComplete(); - return mSeg->Read(data, len); + // return mSegElement->Peek(data, len); } -int OmafSegment::Peek(uint8_t *data, size_t len) -{ - if(NULL == mSeg) return ERROR_NULL_PTR; +int OmafSegment::Peek(uint8_t *data, size_t len, size_t offset) { + // if (NULL == mSegElement) return ERROR_NULL_PTR; - if(mStatus != SegDownloaded) WaitComplete(); + // if (mStatus != SegReady) WaitComplete(); - return mSeg->Peek(data, len); + // return mSegElement->Peek(data, len, offset); } -int OmafSegment::Peek(uint8_t *data, size_t len, size_t offset) -{ - if(NULL == mSeg) return ERROR_NULL_PTR; - - if(mStatus != SegReady) WaitComplete(); - return mSeg->Peek(data, len, offset); -} - -int OmafSegment::Close() -{ - if(NULL == mSeg) return ERROR_NULL_PTR; +int OmafSegment::Close() { + // if (NULL == mSegElement) return ERROR_NULL_PTR; - if(mStatus != SegDownloading) mSeg->StopDownloadSegment((OmafDownloaderObserver*) this); + // if (mStatus != SegDownloading) + // mSegElement->StopDownloadSegment((OmafDownloaderObserver *)this); - //SAFE_DELETE( mSeg ); + // SAFE_DELETE( mSegElement ); - return ERROR_NONE; + return ERROR_NONE; } - -int OmafSegment::SaveToFile() -{ - mCacheFile = DOWNLOADMANAGER::GetInstance()->GetCacheFolder() + "/" + DOWNLOADMANAGER::GetInstance()->AssignCacheFileName(); - mFileStream.open(mCacheFile, ios::out|ios::binary); - - mData = (uint8_t*)malloc(mSegSize); - Read( mData, mSegSize ); - - mFileStream.write( (char *)mData, mSegSize); - - mFileStream.close(); - - if (mData) - { - free(mData); - mData = NULL; +#endif +int OmafSegment::CacheToFile() noexcept { + try { + std::string fileName = + ds_params_.dash_url_.substr(ds_params_.dash_url_.find_last_of('/') + 1, + ds_params_.dash_url_.length() - ds_params_.dash_url_.find_last_of('/') - 1); + cache_file_ = DOWNLOADMANAGER::GetInstance()->GetCacheFolder() + "/" + + DOWNLOADMANAGER::GetInstance()->AssignCacheFileName() + fileName; + if (!dash_stream_.cacheToFile(cache_file_)) { + OMAF_LOG(LOG_ERROR, "Failed to cache the dash to file: %s\n", cache_file_.c_str()); + return OMAF_ERROR_FILE_WRITE; } - LOG(INFO)<<"close saved cache "< +#include #include VCD_OMAF_BEGIN -typedef enum{ - SegUnknown = 0, - SegReady, - SegDownloading, - SegDownloaded, - SegAborted, - SegEOS, -}SEGSTATUS; - -class OmafSegment : public OmafDownloaderObserver { -public: - //! - //! \brief construct - //! - OmafSegment(); - - //! - //! \brief construct with SegmentElement - //! - OmafSegment( SegmentElement* pSeg, int segCnt, bool bInitSegment = false, bool reEnabled = false ); - - //! - //! \brief de-construct - //! - virtual ~OmafSegment(); - -public: - - //! - //! \brief basic Get/Set methods for the properties. - //! - std::string GetSegmentCacheFile() { return mCacheFile; }; - - void SetSegmentCacheFile(std::string cacheFileName) - { - mCacheFile = cacheFileName; - }; - - SEGSTATUS GetSegStatus() { return mStatus; }; - void SetSegStatus(SEGSTATUS status) { mStatus = status; }; - - void SetSegment( SegmentElement* pSeg ) { mSeg = pSeg; }; - SegmentElement* GetSegment() { return mSeg; }; - - bool bInitSegment() { return mInitSegment; }; - void SetInitSegment(bool bInit) { mInitSegment = bInit; }; - - //! - //! \brief Informs the OmafDownloaderObserver object that the download rate has changed. - //! @param bytesDownloaded the number of downloaded bytes - //! - virtual void DownloadDataNotify (uint64_t bytesDownloaded); - - //! - //! \brief Informs the OmafDownloaderObserver object that the download state has changed. - //! @param state the download state - //! - virtual void DownloadStatusNotify (DownloaderStatus state); - - //! - //! \brief Basic operation to read / write the segment data. - //! - int Open( ); - int Open( SegmentElement* pSeg ); - int Read(uint8_t *data, size_t len); - int Peek(uint8_t *data, size_t len); - int Peek(uint8_t *data, size_t len, size_t offset); - int Close(); - - void SetSegID( uint32_t id ) { mSegID = id; }; - uint32_t GetSegID() { return mSegID; }; - void SetInitSegID( uint32_t id ) { mInitSegID = id; }; - uint32_t GetInitSegID() { return mInitSegID; }; - void SetSegStored() { mStoreFile = true; }; - - bool IsReEnabled(){return mReEnabled;}; - int GetSegCount(){return mSegCnt;}; - -private: - //! - //! \brief save the memory data to file. - //! - int SaveToFile(); - - //! - //! \brief start downloading process. - //! - int StartDownload(); - - //! - //! \brief waiting for all data downloaded. - //! - int WaitComplete(); - -private: - SegmentElement* mSeg; // { + public: + using Ptr = std::shared_ptr; + + // + // @enum State + // @brief segment state + // + enum class State { + CREATE = 0, + OPEN = 1, + OPEN_SUCCES = 2, + OPEN_STOPPED = 3, + + OPEN_TIMEOUT = 4, + OPEN_FAILED = 5, + PARSE_OK = 6, + PARSE_FAILED = 7, + DESTROYED = 8 + }; + + // + // @method + // @brief segment state change callback + // @brief segment state + // + using OnStateChange = std::function; + + public: + // + // @brief constructor with dash source + // + OmafSegment(DashSegmentSourceParams ds_params, int segCnt, bool bInitSegment = false); + + //! + //! \brief de-construct + //! + virtual ~OmafSegment(); + + public: + offset_t ReadStream(char* buffer, offset_t size) override { + if (!buse_stored_file_) { + return dash_stream_.ReadStream(buffer, size); + } else { + if (!mFileStream.is_open()) { + mFileStream.open(this->GetSegmentCacheFile().c_str(), ios_base::binary | ios_base::in); + } + mFileStream.read(buffer, size); + std::streamsize readCnt = mFileStream.gcount(); + return (offset_t)readCnt; + } + }; + + bool SeekAbsoluteOffset(offset_t offset) override { + if (!buse_stored_file_) { + return dash_stream_.SeekAbsoluteOffset(offset); + } else { + if (!mFileStream.is_open()) { + mFileStream.open(this->GetSegmentCacheFile().c_str(), ios_base::binary | ios_base::in); + } + if (mFileStream.tellg() == -1) { + mFileStream.clear(); + mFileStream.seekg(0, ios_base::beg); + } + + mFileStream.seekg(offset); + return true; + } + } + + offset_t TellOffset() override { + if (!buse_stored_file_) { + return dash_stream_.TellOffset(); + } else { + if (!mFileStream.is_open()) { + mFileStream.open(this->GetSegmentCacheFile().c_str(), ios_base::binary | ios_base::in); + } + return mFileStream.tellg(); + } + }; + + offset_t GetStreamSize() override { + if (!buse_stored_file_) { + return dash_stream_.GetStreamSize(); + } else { + if (!mFileStream.is_open()) { + mFileStream.open(this->GetSegmentCacheFile().c_str(), ios_base::binary | ios_base::in); + } + mFileStream.seekg(0, ios_base::end); + int64_t size = mFileStream.tellg(); + mFileStream.seekg(0, ios_base::beg); + return size; + } + }; + + public: + // + // @brief register state change callback + // + // @param[in] cb + // @brief state change callback + // + // @return void + // @brief + inline void RegisterStateChange(OnStateChange cb) noexcept { state_change_cb_ = cb; } + + // + // @brief get dash segment cache file path + // + // @return std::string + // @brief segment cache file path + inline std::string GetSegmentCacheFile() const noexcept { return cache_file_; }; + + // + // @brief set segment cache file path + // + // @param[in] cacheFileName + // @brief cache file path + // + // @return void + // @brief + inline void SetSegmentCacheFile(std::string cacheFileName) noexcept { cache_file_ = cacheFileName; }; + + // + // @brief get segment state + // + // @return State + // @brief segment state + inline State GetState() const noexcept { return state_; } + inline void SetState(State s) noexcept { state_ = s; } + + // + // @brief check is init segment + // + // @return bool + // @brief yes or no + inline bool IsInitSegment() const noexcept { return bInit_segment_; } + + // + // @brief set init segement or not + // + // @param[in] bInit + // @brief yes or no + // + // @return void + // @brief + inline void SetInitSegment(bool bInit) noexcept { bInit_segment_ = bInit; }; + + // + // @brief open this segment, which will trigger the dash download + // + // @param[in] ds + // @brief dash source params + // + // @param[in] cb + // @brief dash open state change callback + // + // @return int + // @brief calling success or not + int Open(std::shared_ptr dash_client) noexcept; + int Stop() noexcept; + // int Read(uint8_t* data, size_t len); + // int Peek(uint8_t* data, size_t len); + // int Peek(uint8_t* data, size_t len, size_t offset); + + // int Close(); + + void SetSegID(uint32_t id) noexcept { seg_id_ = id; }; + uint32_t GetSegID() noexcept { return seg_id_; }; + void SetInitSegID(uint32_t id) noexcept { initSeg_id_ = id; }; + uint32_t GetInitSegID() const noexcept { return initSeg_id_; }; + void SetTrackId(uint32_t id) noexcept { track_id_ = id; }; + uint32_t GetTrackId() const noexcept { return track_id_; }; + void SetSegStored() noexcept { buse_stored_file_ = true; }; + int GetSegCount() const noexcept { return seg_count_; }; + void SetSegSize(uint64_t segSize) noexcept { seg_size_ = segSize; }; + uint64_t GetSegSize() const noexcept { return seg_size_; }; + + int64_t GetTimelinePoint(void) { return ds_params_.timeline_point_; } + void SetQualityRanking(QualityRank qualityRanking) { mQualityRanking = qualityRanking; }; + QualityRank GetQualityRanking() { return mQualityRanking; }; + + void SetSRDInfo(const SRDInfo& srdInfo) { mSRDInfo = srdInfo; } + + SRDInfo GetSRDInfo() { return mSRDInfo; }; + + std::string to_string() const noexcept; + + void SetMediaType(MediaType type) { mMediaType = type; }; + + MediaType GetMediaType() { return mMediaType; }; + + void SetAudioChlNum(uint32_t chlNum) { mChlsNum = chlNum; }; + + uint32_t GetAudioChlNum() { return mChlsNum; }; + + void SetAudioSampleRate(uint32_t sampleRate) { mSampleRate = sampleRate; }; + + uint32_t GetAudioSampleRate() { return mSampleRate; }; + + private: + //! + //! \brief save the memory data to file. + //! + int CacheToFile() noexcept; + + private: + std::shared_ptr dash_client_; + DashSegmentSourceParams ds_params_; + + StreamBlocks dash_stream_; + + // SegmentElement* mSegElement; // +#include +#include +#include +#include "general.h" +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ +#include "../trace/MtHQ_tp.h" +#endif +#endif + +VCD_OMAF_BEGIN + +OmafTileTracksSelector::~OmafTileTracksSelector() +{ + if (m_currentTracks.size()) + m_currentTracks.clear(); +} + +bool IsSelectionChanged(TracksMap selection1, TracksMap selection2) +{ + bool isChanged = false; + + if (selection1.size() && selection2.size()) + { + if (selection1.size() != selection2.size()) + { + isChanged = true; + } + else + { + std::map::iterator it1; + for (it1 = selection1.begin(); it1 != selection1.end(); it1++) + { + OmafAdaptationSet *as1 = it1->second; + std::map::iterator it2; + for (it2 = selection2.begin(); it2 != selection2.end(); it2++) + { + OmafAdaptationSet *as2 = it2->second; + if (as1 == as2) + { + break; + } + } + if (it2 == selection2.end()) + { + isChanged = true; + break; + } + } + } + } + + return isChanged; +} + +int OmafTileTracksSelector::SelectTracks(OmafMediaStream* pStream) +{ + DashStreamInfo *streamInfo = pStream->GetStreamInfo(); + int ret = ERROR_NONE; + if (streamInfo->stream_type == MediaType_Video) + { + TracksMap selectedTracks; + uint32_t rowSize = pStream->GetRowSize(); + uint32_t colSize = pStream->GetColSize(); + if (mUsePrediction && mPoseHistory.size() >= POSE_SIZE) // using prediction + { + std::vector> predictedTracksArray = GetTileTracksByPosePrediction(pStream); + if (predictedTracksArray.empty()) // Prediction error occurs + { + selectedTracks = GetTileTracksByPose(pStream); + } + else + { + // fetch union set of predictedTracksArray ordered by ViewportPriority + std::sort(predictedTracksArray.begin(), predictedTracksArray.end(), \ + [&](std::pair track1, std::pair track2) { return track1.first < track2.first;}); + for (uint32_t i = 0; i < predictedTracksArray.size(); i++) + { + TracksMap oneTracks = predictedTracksArray[i].second; + TracksMap::iterator iter = oneTracks.begin(); + for ( ; iter != oneTracks.end(); iter++) + { + // ignore when key is identical and have tracks selection limitation. + if (selectedTracks.size() <= rowSize * colSize / 2 || selectedTracks.size() < oneTracks.size()) + { + selectedTracks.insert(*iter); + } + else break; + } + } + } + // clear predictedTracksArray + if (predictedTracksArray.size()) + { + for (uint32_t i = 0; i < predictedTracksArray.size(); i++) + { + predictedTracksArray[i].second.clear(); + } + predictedTracksArray.clear(); + } + } + else // not using prediction + { + selectedTracks = GetTileTracksByPose(pStream); + } + + if (selectedTracks.empty() && m_currentTracks.empty()) + return ERROR_INVALID; + + bool isPoseChanged = IsSelectionChanged(m_currentTracks, selectedTracks); + + if (m_currentTracks.empty() || isPoseChanged) + { + if (m_currentTracks.size()) + { + m_currentTracks.clear(); + } + m_currentTracks = selectedTracks; + } + selectedTracks.clear(); + + ret = pStream->UpdateEnabledTileTracks(m_currentTracks); + } + else if (streamInfo->stream_type == MediaType_Audio) + { + ret = pStream->EnableAllAudioTracks(); + } + + return ret; +} + +bool IsPoseChanged(HeadPose* pose1, HeadPose* pose2) +{ + // return false if two pose is same + if(abs(pose1->yaw - pose2->yaw)<1e-3 && abs(pose1->pitch - pose2->pitch)<1e-3) + { + OMAF_LOG(LOG_INFO,"pose has not changed!\n"); + return false; + } + return true; +} + +TracksMap OmafTileTracksSelector::GetTileTracksByPose(OmafMediaStream* pStream) +{ + TracksMap selectedTracks; + int64_t historySize = 0; + HeadPose* previousPose = NULL; + { + std::lock_guard lock(mMutex); + if(mPoseHistory.size() == 0) + { + return selectedTracks; + } + + previousPose = mPose; + + mPose = mPoseHistory.front(); + mPoseHistory.pop_front(); + + if(!mPose) + { + return selectedTracks; + } + + historySize = mPoseHistory.size(); + } + + // won't get viewport if pose hasn't changed + if( previousPose && mPose && !IsPoseChanged( previousPose, mPose ) && historySize > 1 && !mUsePrediction) + { + OMAF_LOG(LOG_INFO,"pose hasn't changed!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + //trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 0); +#endif +#endif + return selectedTracks; + } + + // to select tile tracks; + OMAF_LOG(LOG_INFO, "Start to select tile tracks!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T1_select_tracks, "tiletracks"); +#endif +#endif + + selectedTracks = SelectTileTracks(pStream, mPose); + if (selectedTracks.size() && previousPose) + { + OMAF_LOG(LOG_INFO,"pose has changed from yaw %f, pitch %f\n", previousPose->yaw, previousPose->pitch); + OMAF_LOG(LOG_INFO,"to yaw %f, pitch %f\n", mPose->yaw, mPose->pitch); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 1); +#endif +#endif + } + + if (previousPose != mPose) + SAFE_DELETE(previousPose); + + return selectedTracks; +} + +TracksMap OmafTileTracksSelector::SelectTileTracks( + OmafMediaStream* pStream, + HeadPose* pose) +{ + TracksMap selectedTracks; + + // to select tile tracks + int ret = I360SCVP_setViewPortEx(m360ViewPortHandle, pose); + if (ret) + return selectedTracks; + + ret = I360SCVP_process(mParamViewport, m360ViewPortHandle); + if (ret) + return selectedTracks; + TileDef *tilesInViewport = new TileDef[1024]; + if (!tilesInViewport) + return selectedTracks; + + Param_ViewportOutput paramViewportOutput; + int32_t selectedTilesNum = I360SCVP_getTilesInViewport( + tilesInViewport, ¶mViewportOutput, m360ViewPortHandle); + + // in planar projection format + if (abs(pose->zoomFactor) < 1e-3 && mProjFmt == ProjectionFormat::PF_PLANAR) + { + return selectedTracks; + } + + if (selectedTilesNum <= 0 || selectedTilesNum > 1024) + { + OMAF_LOG(LOG_ERROR, "Failed to get tiles information in viewport !\n"); + DELETE_ARRAY(tilesInViewport); + return selectedTracks; + } + + std::map asMap = pStream->GetMediaAdaptationSet(); + std::map::iterator itAS; + + // insert all tile tracks in viewport into selected tile tracks map + uint32_t sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && selectedTilesNum%sqrtedSize) { sqrtedSize--; } + bool needAddtionalTile = false; + if ((selectedTilesNum > 4) && (sqrtedSize == 1)) // selectedTilesNum is prime number + { + OMAF_LOG(LOG_INFO,"need additional tile is true! original selected tile num of high quality is %d\n", selectedTilesNum); + needAddtionalTile = true; + } + if (mProjFmt == ProjectionFormat::PF_ERP) + { + for (int32_t index = 0; index < selectedTilesNum; index++) + { + int32_t left = tilesInViewport[index].x; + int32_t top = tilesInViewport[index].y; + + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + OmafSrd *srd = adaptationSet->GetSRD(); + int32_t tileLeft = srd->get_X(); + int32_t tileTop = srd->get_Y(); + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + + if ((qualityRanking == HIGHEST_QUALITY_RANKING) && (tileLeft == left) && (tileTop == top)) + { + int trackID = adaptationSet->GetID(); + selectedTracks.insert(make_pair(trackID, adaptationSet)); + break; + } + } + } + } + else if (mProjFmt == ProjectionFormat::PF_CUBEMAP) + { + for (int32_t index = 0; index < selectedTilesNum; index++) + { + int32_t left = tilesInViewport[index].x; + int32_t top = tilesInViewport[index].y; + int32_t faceId = tilesInViewport[index].faceId; + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + + if (qualityRanking == HIGHEST_QUALITY_RANKING) + { + TileDef *tileInfo = adaptationSet->GetTileInfo(); + if (!tileInfo) + { + OMAF_LOG(LOG_ERROR, "NULL tile information for Cubemap !\n"); + DELETE_ARRAY(tilesInViewport); + return selectedTracks; + } + int32_t tileLeft = tileInfo->x; + int32_t tileTop = tileInfo->y; + int32_t tileFaceId = tileInfo->faceId; + if ((tileLeft == left) && (tileTop == top) && (tileFaceId == faceId)) + { + int trackID = adaptationSet->GetID(); + selectedTracks.insert(make_pair(trackID, adaptationSet)); + break; + } + } + } + } + } + else if (mProjFmt == ProjectionFormat::PF_PLANAR) + { + uint32_t corresQualityRanking = 0; + for (int32_t index = 0; index < selectedTilesNum; index++) + { + int32_t left = tilesInViewport[index].x; + int32_t top = tilesInViewport[index].y; + int32_t strId = tilesInViewport[index].streamId; + map::iterator itStrQua; + itStrQua = mTwoDStreamQualityMap.find(strId); + if (itStrQua == mTwoDStreamQualityMap.end()) + { + OMAF_LOG(LOG_ERROR, "Can't find corresponding quality ranking for stream index %d !\n", strId); + DELETE_ARRAY(tilesInViewport); + return selectedTracks; + } + + corresQualityRanking = itStrQua->second; + OMAF_LOG(LOG_INFO, "Selected tile from stream %d with quality ranking %d\n", strId, corresQualityRanking); + + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + OmafSrd *srd = adaptationSet->GetSRD(); + int32_t tileLeft = srd->get_X(); + int32_t tileTop = srd->get_Y(); + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + + if ((qualityRanking == (uint32_t)(corresQualityRanking)) && (tileLeft == left) && (tileTop == top)) + { + int trackID = adaptationSet->GetID(); + OMAF_LOG(LOG_INFO, "Selected track %d\n", trackID); + selectedTracks.insert(make_pair(trackID, adaptationSet)); + break; + } + } + } + + if (needAddtionalTile) + { + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + int trackID = adaptationSet->GetID(); + if (selectedTracks.find(trackID) == selectedTracks.end() && qualityRanking == corresQualityRanking) + { + selectedTracks.insert(make_pair(trackID, adaptationSet)); + break; + } + } + } + } + + if (needAddtionalTile && (mProjFmt != ProjectionFormat::PF_PLANAR)) + { + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + int trackID = adaptationSet->GetID(); + if (selectedTracks.find(trackID) == selectedTracks.end() && qualityRanking == HIGHEST_QUALITY_RANKING) + { + selectedTracks.insert(make_pair(trackID, adaptationSet)); + break; + } + } + } + // insert all tile tracks from low qulity video into selected tile tracks map when projection type is not PF_PLANAR + if (mProjFmt != ProjectionFormat::PF_PLANAR) + { + for (itAS = asMap.begin(); itAS != asMap.end(); itAS++) + { + OmafAdaptationSet *adaptationSet = itAS->second; + uint32_t qualityRanking = adaptationSet->GetRepresentationQualityRanking(); + if (qualityRanking > HIGHEST_QUALITY_RANKING) + { + int trackID = adaptationSet->GetID(); + selectedTracks.insert(make_pair(trackID, adaptationSet)); + } + } + } + + DELETE_ARRAY(tilesInViewport); + + return selectedTracks; +} + +std::vector> OmafTileTracksSelector::GetTileTracksByPosePrediction( + OmafMediaStream *pStream) +{ + std::vector> predictedTracks; + int64_t historySize = 0; + HeadPose* previousPose = NULL; + { + std::lock_guard lock(mMutex); + if(mPoseHistory.size() == 0) + { + return predictedTracks; + } + + previousPose = mPose; + + mPose = mPoseHistory.front(); + mPoseHistory.pop_front(); + + if(!mPose) + { + return predictedTracks; + } + + historySize = mPoseHistory.size(); + + } + // won't get viewport if pose hasn't changed + if( previousPose && mPose && !IsPoseChanged( previousPose, mPose ) && historySize > 1) + { + OMAF_LOG(LOG_INFO,"pose hasn't changed!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + //trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 0); +#endif +#endif + SAFE_DELETE(previousPose); + return predictedTracks; + } + // if viewport changed, then predict viewport using pose history. + if (mPredictPluginMap.size() == 0) + { + OMAF_LOG(LOG_ERROR,"predict plugin map is empty!\n"); + return predictedTracks; + } + // 1. figure out the pts of predicted angle + uint32_t current_segment_num = pStream->GetSegmentNumber(); + + DashStreamInfo *stream_info = pStream->GetStreamInfo(); + if (stream_info == nullptr) return predictedTracks; + + int32_t stream_frame_rate = stream_info->framerate_num / stream_info->framerate_den; + uint64_t first_predict_pts = current_segment_num > 0 ? (current_segment_num - 1) * stream_frame_rate : 0; + // 2. predict process + ViewportPredictPlugin *plugin = mPredictPluginMap.at(mPredictPluginName); + std::map predict_angles; + OMAF_LOG(LOG_INFO, "first_predict_pts %ld\n", first_predict_pts); + plugin->Predict(first_predict_pts, predict_angles); + if (predict_angles.empty()) + { + OMAF_LOG(LOG_INFO,"predictPose_func return an invalid value!\n"); + return predictedTracks; + } + // candicate nums to select tile tracks + uint32_t poseCandicateNum = predict_angles.size(); + HeadPose *predictPose = new HeadPose[poseCandicateNum]; + if (!predictPose) + return predictedTracks; + for (uint32_t i = 0; i < poseCandicateNum; i++) + { + predictPose[i].yaw = predict_angles[ptsInterval[i] + first_predict_pts]->yaw; + predictPose[i].pitch = predict_angles[ptsInterval[i] + first_predict_pts]->pitch; + OMAF_LOG(LOG_INFO, "Start to select tile tracks!\n"); +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + // trace + tracepoint(mthq_tp_provider, T1_select_tracks, "tiletracks"); +#endif +#endif + TracksMap selectedTracks = SelectTileTracks(pStream, &predictPose[i]); + if (selectedTracks.size() && previousPose) + { + predictedTracks.push_back(make_pair(predict_angles[ptsInterval[i] + first_predict_pts]->priority, selectedTracks)); + OMAF_LOG(LOG_INFO,"pose has changed from yaw %f, pitch %f\n", previousPose->yaw, previousPose->pitch); + OMAF_LOG(LOG_INFO,"to yaw %f, pitch %f\n", mPose->yaw, mPose->pitch); + +#ifndef _ANDROID_NDK_OPTION_ +#ifdef _USE_TRACE_ + //trace + tracepoint(mthq_tp_provider, T2_detect_pose_change, 1); +#endif +#endif + } + } + SAFE_DELETE(previousPose); + SAFE_DELARRAY(predictPose); + predict_angles.clear(); + return predictedTracks; +} + +VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafTileTracksSelector.h b/src/OmafDashAccess/OmafTileTracksSelector.h new file mode 100644 index 00000000..03d313b1 --- /dev/null +++ b/src/OmafDashAccess/OmafTileTracksSelector.h @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ + +//! +//! \file: OmafTileTracksSelector.h +//! \brief: Tile tracks selector class definition +//! \detail: Define the data and the operation of tile tracks selector based on +//! viewport +//! +//! Created on May 28, 2019, 1:19 PM +//! + +#ifndef OMAFTILETRACKSSELECTOR_H +#define OMAFTILETRACKSSELECTOR_H + +#include "OmafTracksSelector.h" + +using namespace VCD::OMAF; + +VCD_OMAF_BEGIN + +typedef std::map TracksMap; + +class OmafTileTracksSelector : public OmafTracksSelector +{ +public: + //! + //! \brief construct + //! + OmafTileTracksSelector(int size = POSE_SIZE) : OmafTracksSelector(size) + { + }; + + //! + //! \brief de-construct + //! + virtual ~OmafTileTracksSelector(); + +public: + + //! + //! \brief Select tile tracks for the stream based on the latest pose. each time + //! the selector will select tile tracks based on the latest pose. the + //! information stored in mPoseHistory can be used for prediction for + //! further movement + //! + virtual int SelectTracks(OmafMediaStream* pStream); + + //! + //! \brief update Viewport; each time pose update will be recorded, but only + //! the latest will be used when SelectTracks is called. + //! + int UpdateViewport(HeadPose* pose); + + //! + //! \brief Set Init viewport + //! + int SetInitialViewport( + std::vector& pView, + HeadSetInfo* headSetInfo, + OmafMediaStream* pStream); + + //! + //! \brief Load viewport prediction plugin + //! + int EnablePosePrediction(std::string predictPluginName, std::string libPath); + + //! + //! \brief Get the priority of the segment + //! + //virtual int GetSegmentPriority(OmafSegment *segment); + +private: + + TracksMap GetTileTracksByPose(OmafMediaStream* pStream); + + std::vector> GetTileTracksByPosePrediction(OmafMediaStream* pStream); + + //TracksMap GetCoveredTileTracks(OmafMediaStream* pStream, CCDef* outCC); + + TracksMap SelectTileTracks(OmafMediaStream* pStream, HeadPose* pose); + +private: + TracksMap m_currentTracks; +}; + +VCD_OMAF_END; + +#endif /* OMAFTILETRACKSSELECTOR_H */ diff --git a/src/OmafDashAccess/OmafTilesStitch.cpp b/src/OmafDashAccess/OmafTilesStitch.cpp new file mode 100644 index 00000000..fa9efa32 --- /dev/null +++ b/src/OmafDashAccess/OmafTilesStitch.cpp @@ -0,0 +1,1921 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ + +#include "OmafTilesStitch.h" +#include "math.h" + +#include "common.h" +VCD_OMAF_BEGIN + +OmafTilesStitch::OmafTilesStitch() { + m_360scvpParam = nullptr; + m_360scvpHandle = nullptr; + m_fullResVideoHeader = nullptr; + m_fullResVPSSize = 0; + m_fullResSPSSize = 0; + m_fullResPPSSize = 0; + m_fullWidth = 0; + m_fullHeight = 0; + m_mainMergedWidth = 0; + m_mainMergedHeight = 0; + m_mainMergedTileRows = 0; + m_mainMergedTileCols = 0; + m_needHeaders = false; + m_isInitialized = false; + m_projFmt = PF_UNKNOWN; + m_tmpRegionrwpk = nullptr; + m_maxStitchWidth = 0; + m_maxStitchHeight = 0; +} + +OmafTilesStitch::~OmafTilesStitch() { + if (m_selectedTiles.size()) { + std::list allPackets; + std::map>::iterator it; + for (it = m_selectedTiles.begin(); it != m_selectedTiles.end();) { + std::map packets = it->second; + if (packets.size()) { + std::map::iterator it1; + for (it1 = packets.begin(); it1 != packets.end();) { + MediaPacket *onePacket = it1->second; + std::list::iterator pktIter; + pktIter = std::find(allPackets.begin(), allPackets.end(), onePacket); + if (pktIter == allPackets.end()) + { + allPackets.push_back(onePacket); + SAFE_DELETE(onePacket); + } + packets.erase(it1++); + } + packets.clear(); + } + allPackets.clear(); + m_selectedTiles.erase(it++); + } + + m_selectedTiles.clear(); + } + + if (m_initTilesMergeArr.size()) { + std::map>::iterator it; + for (it = m_initTilesMergeArr.begin(); it != m_initTilesMergeArr.end();) { + + vector layOut = it->second; + + for (uint32_t i = 0; i < layOut.size(); i++) { + if (layOut[i]) { + DELETE_ARRAY(layOut[i]->tilesLayout.tileRowHeight); + DELETE_ARRAY(layOut[i]->tilesLayout.tileColWidth); + } + SAFE_DELETE(layOut[i]); + } + layOut.clear(); + m_initTilesMergeArr.erase(it++); + } + m_initTilesMergeArr.clear(); + } + + if (m_updatedTilesMergeArr.size()) { + std::map>::iterator it; + for (it = m_updatedTilesMergeArr.begin(); it != m_updatedTilesMergeArr.end();) { + + vector layOut = it->second; + + for (uint32_t i = 0; i < layOut.size(); i++) { + if (layOut[i]) { + DELETE_ARRAY(layOut[i]->tilesLayout.tileRowHeight); + DELETE_ARRAY(layOut[i]->tilesLayout.tileColWidth); + } + SAFE_DELETE(layOut[i]); + } + layOut.clear(); + m_updatedTilesMergeArr.erase(it++); + } + + m_updatedTilesMergeArr.clear(); + } + + SAFE_DELETE(m_360scvpParam); + + if (m_360scvpHandle) { + I360SCVP_unInit(m_360scvpHandle); + m_360scvpHandle = nullptr; + } + + m_allQualities.clear(); + if (m_fullResVideoHeader) { + delete[] m_fullResVideoHeader; + m_fullResVideoHeader = nullptr; + } + + if (m_mergedVideoHeaders.size()) { + std::map>>::iterator itHrd; + for (itHrd = m_mergedVideoHeaders.begin(); itHrd != m_mergedVideoHeaders.end();) { + vector> oneHeader = itHrd->second; + for (uint32_t i = 0; i < oneHeader.size(); i++) { + std::map::iterator itParam; + for (itParam = oneHeader[i].begin(); itParam != oneHeader[i].end();) { + uint8_t *headers = itParam->second; + DELETE_ARRAY(headers); + oneHeader[i].erase(itParam++); + } + oneHeader[i].clear(); + } + m_mergedVideoHeaders.erase(itHrd++); + } + m_mergedVideoHeaders.clear(); + } + m_tmpRegionrwpk = nullptr; + m_sources.clear(); +} + +int32_t OmafTilesStitch::Initialize(std::map &firstFramePackets, bool needParams, + VCD::OMAF::ProjectionFormat projFmt, std::map allSources) { + if (0 == firstFramePackets.size()) { + OMAF_LOG(LOG_ERROR, "There is no media packet for tiles stitch !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + if (m_selectedTiles.size()) { + OMAF_LOG(LOG_ERROR, "Non-empty selected tile track media packets at the beginning !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + m_needHeaders = needParams; + m_projFmt = projFmt; + m_sources = allSources; + + std::map::iterator it; + for (it = firstFramePackets.begin(); it != firstFramePackets.end(); it++) { + MediaPacket *onePacket = it->second; + auto qualityRanking = onePacket->GetQualityRanking(); + m_allQualities.insert(qualityRanking); + } + + std::set::iterator itQuality = m_allQualities.begin(); + if (itQuality == m_allQualities.end()) + { + OMAF_LOG(LOG_ERROR, " Quality set is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + auto firstQuality = *itQuality; + + for (; itQuality != m_allQualities.end(); itQuality++) { + auto oneQuality = *itQuality; + std::map packets; + std::map middlePackets; + std::set tracksID; + for (it = firstFramePackets.begin(); it != firstFramePackets.end(); it++) { + MediaPacket *onePacket = it->second; + auto qualityRanking = onePacket->GetQualityRanking(); + if (qualityRanking == oneQuality) { + tracksID.insert(it->first); + middlePackets.insert(std::make_pair(it->first, onePacket)); + } + } + + std::set::iterator itId = tracksID.begin(); + for (; itId != tracksID.end(); itId++) { + uint32_t oneID = *itId; + MediaPacket *onePacket = middlePackets[oneID]; + packets.insert(std::make_pair(oneID, onePacket)); + } + + middlePackets.clear(); + tracksID.clear(); + OMAF_LOG(LOG_INFO, "For quality ranking %d, total tiles number needed to be merged is %lld\n", static_cast(oneQuality), packets.size()); + + m_selectedTiles.insert(std::make_pair(oneQuality, packets)); + } + + if (m_allQualities.size() != m_selectedTiles.size()) { + OMAF_LOG(LOG_ERROR, "Failed to differentiate media packets from different quality ranking !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + std::map highestQualityPackets; + highestQualityPackets = m_selectedTiles[firstQuality]; + it = highestQualityPackets.begin(); + if (it == highestQualityPackets.end()) + { + OMAF_LOG(LOG_ERROR, "Highest quality packets map is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + MediaPacket *tilePacket = it->second; + if (!tilePacket) { + OMAF_LOG(LOG_ERROR, "nullptr Media Packet !\n"); + return OMAF_ERROR_NULL_PTR; + } + + int32_t ret = ParseVideoHeader(tilePacket); + + if (ERROR_NONE == ret) m_isInitialized = true; + + return ret; +} + +int32_t OmafTilesStitch::ParseVideoHeader(MediaPacket *tilePacket) { + if (!tilePacket) return OMAF_ERROR_NULL_PTR; + + if (m_360scvpParam || m_360scvpHandle) { + OMAF_LOG(LOG_ERROR, "There should be no 360SCVP library usage before parsing video headers !\n"); + return OMAF_ERROR_INVALID_DATA; + } + bool hasHeader = tilePacket->GetHasVideoHeader(); + if (!hasHeader) { + OMAF_LOG(LOG_ERROR, "The first media packet should have video headers !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + m_fullResVPSSize = tilePacket->GetVPSLen(); + m_fullResSPSSize = tilePacket->GetSPSLen(); + m_fullResPPSSize = tilePacket->GetPPSLen(); + uint32_t fullResHeaderSize = m_fullResVPSSize + m_fullResSPSSize + m_fullResPPSSize; + + m_fullResVideoHeader = new uint8_t[fullResHeaderSize]; + memset(m_fullResVideoHeader, 0, fullResHeaderSize); + memcpy_s(m_fullResVideoHeader, fullResHeaderSize, (uint8_t *)(tilePacket->Payload()), fullResHeaderSize); + m_360scvpParam = new param_360SCVP; + if (!m_360scvpParam) return OMAF_ERROR_NULL_PTR; + + memset(m_360scvpParam, 0, sizeof(param_360SCVP)); + m_360scvpParam->usedType = E_PARSER_ONENAL; + m_360scvpParam->pInputBitstream = m_fullResVideoHeader; + m_360scvpParam->inputBitstreamLen = fullResHeaderSize; + m_360scvpParam->logFunction = (void*)logCallBack; + + m_360scvpHandle = I360SCVP_Init(m_360scvpParam); + if (!m_360scvpHandle) { + OMAF_LOG(LOG_ERROR, "Failed to initialize 360SCVP library handle !\n"); + return OMAF_ERROR_NULL_PTR; + } + + Nalu *oneNalu = new Nalu; + if (!oneNalu) return OMAF_ERROR_NULL_PTR; + + oneNalu->data = m_fullResVideoHeader; + oneNalu->dataSize = fullResHeaderSize; + int32_t ret = I360SCVP_ParseNAL(oneNalu, m_360scvpHandle); + if (ret) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_NALU_NOT_FOUND; + } + + if (oneNalu->data != m_fullResVideoHeader) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_INVALID_HEADER; + } + + if ((uint32_t)(oneNalu->dataSize) != m_fullResVPSSize) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_INVALID_HEADER; + } + + oneNalu->data += m_fullResVPSSize; + oneNalu->dataSize = fullResHeaderSize - m_fullResVPSSize; + ret = I360SCVP_ParseNAL(oneNalu, m_360scvpHandle); + if (ret) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_NALU_NOT_FOUND; + } + + if (oneNalu->data != (m_fullResVideoHeader + m_fullResVPSSize)) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_INVALID_HEADER; + } + + if ((uint32_t)(oneNalu->dataSize) != m_fullResSPSSize) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_INVALID_HEADER; + } + + oneNalu->data += m_fullResSPSSize; + oneNalu->dataSize = fullResHeaderSize - m_fullResVPSSize - m_fullResSPSSize; + ret = I360SCVP_ParseNAL(oneNalu, m_360scvpHandle); + if (ret) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_NALU_NOT_FOUND; + } + + if (oneNalu->data != (m_fullResVideoHeader + m_fullResVPSSize + m_fullResSPSSize)) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_INVALID_HEADER; + } + + if ((uint32_t)(oneNalu->dataSize) != m_fullResPPSSize) { + SAFE_DELETE(oneNalu); + return OMAF_ERROR_INVALID_HEADER; + } + + SAFE_DELETE(oneNalu); + + Param_PicInfo *picInfo = new Param_PicInfo; + if (!picInfo) return OMAF_ERROR_NULL_PTR; + + I360SCVP_GetParameter(m_360scvpHandle, ID_SCVP_PARAM_PICINFO, (void **)(&picInfo)); + + m_fullWidth = picInfo->picWidth; + m_fullHeight = picInfo->picHeight; + OMAF_LOG(LOG_INFO, "Full resolution video has width %u and height %u\n", m_fullWidth, m_fullHeight); + + SAFE_DELETE(picInfo); + + return ERROR_NONE; +} + +int32_t OmafTilesStitch::UpdateSelectedTiles(std::map &currPackets, bool needParams) { + if (0 == currPackets.size()) return OMAF_ERROR_INVALID_DATA; + + if (0 == m_allQualities.size()) { + OMAF_LOG(LOG_ERROR, "Tile track groups are invalid !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + if (m_selectedTiles.size()) { + std::list allPackets; + std::map>::iterator it; + for (it = m_selectedTiles.begin(); it != m_selectedTiles.end();) { + std::map packets = it->second; + if (packets.size()) { + std::map::iterator it1; + for (it1 = packets.begin(); it1 != packets.end();) { + MediaPacket *onePacket = it1->second; + std::list::iterator pktIter; + pktIter = std::find(allPackets.begin(), allPackets.end(), onePacket); + if (pktIter == allPackets.end()) + { + allPackets.push_back(onePacket); + SAFE_DELETE(onePacket); + } + packets.erase(it1++); + } + packets.clear(); + } + allPackets.clear(); + m_selectedTiles.erase(it++); + } + + m_selectedTiles.clear(); + } + + m_needHeaders = needParams; + + std::set allQualities; + std::map::iterator it; + for (it = currPackets.begin(); it != currPackets.end(); it++) { + MediaPacket *onePacket = it->second; + auto qualityRanking = onePacket->GetQualityRanking(); + allQualities.insert(qualityRanking); + } + + if (m_allQualities.size() != allQualities.size()) { + OMAF_LOG(LOG_INFO, "Video qualities number has been changed !\n"); + } + + m_allQualities.clear(); + if (m_allQualities.size()) { + OMAF_LOG(LOG_ERROR, "Failed to clear all quality rankings !\n"); + return OMAF_ERROR_OPERATION; + } + + m_allQualities = allQualities; + allQualities.clear(); + + std::set::iterator itQuality = m_allQualities.begin(); + if (itQuality == m_allQualities.end()) + { + OMAF_LOG(LOG_ERROR, "Quality set is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + + OMAF_LOG(LOG_INFO, "Selected stream has quality ranking %d\n", *itQuality); + + for (; itQuality != m_allQualities.end(); itQuality++) { + auto oneQuality = *itQuality; + std::map packets; + std::map middlePackets; + std::set tracksID; + for (it = currPackets.begin(); it != currPackets.end(); it++) { + MediaPacket *onePacket = it->second; + auto qualityRanking = onePacket->GetQualityRanking(); + if (qualityRanking == oneQuality) { + tracksID.insert(it->first); + middlePackets.insert(std::make_pair(it->first, onePacket)); + } + } + + std::set::iterator itId = tracksID.begin(); + for (; itId != tracksID.end(); itId++) { + uint32_t oneID = *itId; + MediaPacket *onePacket = middlePackets[oneID]; + packets.insert(std::make_pair(oneID, onePacket)); + } + + middlePackets.clear(); + tracksID.clear(); + + OMAF_LOG(LOG_INFO, "In Update, For quality ranking %d, total tiles number needed to be merged is %lld\n", static_cast(oneQuality), packets.size()); + + m_selectedTiles.insert(std::make_pair(oneQuality, packets)); + } + + if (m_allQualities.size() != m_selectedTiles.size()) { + OMAF_LOG(LOG_ERROR, "Failed to differentiate media packets from different quality ranking !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + return ERROR_NONE; +} + +vector> OmafTilesStitch::GenerateRowAndColArr(uint32_t packetsSize, uint32_t splitNum, uint32_t maxTile_x, uint32_t maxTile_y) +{ + vector> arrangementArr; + if (packetsSize < splitNum || splitNum == 0) + { + OMAF_LOG(LOG_ERROR, " invalid split num OR packet size!\n"); + return arrangementArr; + } + + uint32_t normalSplitPacketSize = 0; + uint32_t lastSplitPacketSize = 0; + if (splitNum > 1) + { + normalSplitPacketSize = maxTile_x * maxTile_y; + lastSplitPacketSize = normalSplitPacketSize; + if (packetsSize % splitNum) + { + lastSplitPacketSize = packetsSize - normalSplitPacketSize * (splitNum - 1); + } + } + else + { + normalSplitPacketSize = packetsSize / splitNum; + lastSplitPacketSize = normalSplitPacketSize; + } + + for (uint32_t i = 0; i < splitNum; i++) + { + pair oneArrangement; // pair < row, col > + uint32_t size = i < splitNum - 1 ? normalSplitPacketSize : lastSplitPacketSize; + + // 1. tranverse all the proximate number and find the required split layout + uint32_t supplementedNum = 0; + + uint32_t maxDividedSize = 0, maxSqrtedSize = 0; + // notice: diviedSize >= sqrtedSize + if (maxTile_x > maxTile_y) { maxDividedSize = maxTile_x; maxSqrtedSize = maxTile_y; } + else { maxDividedSize = maxTile_y; maxSqrtedSize = maxTile_x; } + + uint32_t sqrtedSize = 0, dividedSize = 0; + while (size <= maxDividedSize * maxSqrtedSize) + { + sqrtedSize = sqrt(size); + while (sqrtedSize > 0) + { + if (size % sqrtedSize == 0) + { + dividedSize = size / sqrtedSize; + // 2. find required split layout + if (dividedSize <= maxDividedSize && sqrtedSize <= maxSqrtedSize) + { + break; + } + } + sqrtedSize--; + } + if (sqrtedSize != 0) break; + else { size++; supplementedNum++; } // add tile number to remap + } + if (size > maxDividedSize * maxSqrtedSize) + { + OMAF_LOG(LOG_ERROR, "Sub-picture width/height for split is failed!\n"); + OMAF_LOG(LOG_ERROR, "maxDividedSize %d, maxSqrtedSize %d\n", maxDividedSize, maxSqrtedSize); + OMAF_LOG(LOG_ERROR, "size %d \n", size); + return arrangementArr; + } + if (supplementedNum > 0) + { + OMAF_LOG(LOG_INFO, "Repeat %u tiles to make sure normal packed sub-picture width/height ratio for split %d\n", supplementedNum, i); + } + + OMAF_LOG(LOG_INFO, "one arrangement has the tile division of %u x %u\n", sqrtedSize, dividedSize); + if (dividedSize > sqrtedSize ) { + oneArrangement = std::make_pair(sqrtedSize, dividedSize); //height , width + } else { + oneArrangement = std::make_pair(dividedSize, sqrtedSize); + } + arrangementArr.push_back(oneArrangement); + } + return arrangementArr; +} + +std::map> OmafTilesStitch::CalculateTilesMergeArrangement() { + std::map> tilesMergeArr; + + if (0 == m_selectedTiles.size()) return tilesMergeArr; + + std::map>::iterator it; + for (it = m_selectedTiles.begin(); it != m_selectedTiles.end(); it++) { + int32_t oneTileWidth = 0; + int32_t oneTileHeight = 0; + int32_t mostLeftPos = 0; + int32_t mostTopPos = 0; + auto qualityRanking = it->first; + std::map packets = it->second; + std::map::iterator itPacket; + + itPacket = packets.begin(); + if (itPacket == packets.end()) + { + OMAF_LOG(LOG_ERROR, "Packet map is empty!\n"); + return tilesMergeArr; + } + MediaPacket *onePacket = itPacket->second; + SRDInfo srd = onePacket->GetSRDInfo(); + oneTileWidth = srd.width; + oneTileHeight = srd.height; + + // 1 find the max tile split + uint32_t maxTile_x = m_maxStitchWidth / oneTileWidth; + uint32_t maxTile_y = m_maxStitchHeight / oneTileHeight; + + uint32_t packetsSize = packets.size(); + + // 2. check if need to split into multiple videos + uint32_t splitNum = ceil(float(packetsSize) / (maxTile_x * maxTile_y)); + + // 3. generate row and col arrays according to split num and maxTile_x & maxTile_y + vector> rowAndColArr = GenerateRowAndColArr(packetsSize, splitNum, maxTile_x, maxTile_y); + vector mergeArrList; + for (uint32_t i = 0; i < splitNum; i++) + { + TilesMergeArrangement *oneArr = new TilesMergeArrangement; + if (!oneArr) return tilesMergeArr; + if (rowAndColArr[i].first > maxTile_y || rowAndColArr[i].second > maxTile_x) + { + OMAF_LOG(LOG_WARNING, "split limitation broke! tile y %d, tile x %d\n", rowAndColArr[i].first, rowAndColArr[i].second); + } + oneArr->mergedWidth = oneTileWidth * rowAndColArr[i].second; + oneArr->mergedHeight = oneTileHeight * rowAndColArr[i].first; + oneArr->mostTopPos = mostTopPos; + oneArr->mostLeftPos = mostLeftPos; + oneArr->tilesLayout.tileRowsNum = rowAndColArr[i].first; + oneArr->tilesLayout.tileColsNum = rowAndColArr[i].second; + oneArr->tilesLayout.tileRowHeight = new uint16_t[oneArr->tilesLayout.tileRowsNum]; + if (!(oneArr->tilesLayout.tileRowHeight)) + { + SAFE_DELETE(oneArr); + return tilesMergeArr; + } + oneArr->tilesLayout.tileColWidth = new uint16_t[oneArr->tilesLayout.tileColsNum]; + if (!(oneArr->tilesLayout.tileColWidth)) { + delete[](oneArr->tilesLayout.tileRowHeight); + oneArr->tilesLayout.tileRowHeight = nullptr; + SAFE_DELETE(oneArr); + return tilesMergeArr; + } + + for (uint8_t idx = 0; idx < oneArr->tilesLayout.tileRowsNum; idx++) { + (oneArr->tilesLayout).tileRowHeight[idx] = oneTileHeight / LCU_SIZE; + } + + for (uint8_t idx = 0; idx < oneArr->tilesLayout.tileColsNum; idx++) { + (oneArr->tilesLayout).tileColWidth[idx] = oneTileWidth / LCU_SIZE; + } + mergeArrList.push_back(oneArr); + OMAF_LOG(LOG_INFO, "FOR quality %d video, the total merged packets size is %u\n", qualityRanking, splitNum); + if ((!m_mainMergedWidth || !m_mainMergedHeight || !m_mainMergedTileRows || !m_mainMergedTileCols) && + (qualityRanking == HIGHEST_QUALITY_RANKING)) { + m_mainMergedWidth = oneTileWidth * rowAndColArr[i].second; + m_mainMergedHeight = oneTileHeight * rowAndColArr[i].first; + m_mainMergedTileRows = rowAndColArr[i].first; + m_mainMergedTileCols = rowAndColArr[i].second; + OMAF_LOG(LOG_INFO, "For Highest quality video, initial merged width is %u and height is %u\n", m_mainMergedWidth, m_mainMergedHeight); + OMAF_LOG(LOG_INFO, "For Highest quality video, initial merged tile rows is %u and tile cols is %u\n", m_mainMergedTileRows, m_mainMergedTileCols); + } + } + tilesMergeArr.insert(std::make_pair(qualityRanking, mergeArrList)); + } + + return tilesMergeArr; +} + +vector> OmafTilesStitch::CalculateMergedRwpkForERP(QualityRank qualityRanking, + bool hasPacketLost, + bool hasLayoutChanged) { + vector> ret; + if (0 == m_selectedTiles.size()) return ret; + + if (hasPacketLost && hasLayoutChanged) { + OMAF_LOG(LOG_ERROR, "Packet lost and layout change can't happen at the same time !\n"); + return ret; + } + + std::map>::iterator it; + it = m_selectedTiles.find(qualityRanking); + if (it == m_selectedTiles.end()) { + OMAF_LOG(LOG_ERROR, "Can't find media packets of specified quality ranking !\n"); + return ret; + } + + std::map packets = it->second; + if (0 == packets.size()) { + OMAF_LOG(LOG_ERROR, "Invalid media packets size for specified quality ranking !\n"); + return ret; + } + + vector mergeLayout; + std::map>::iterator itArr; + std::map::iterator itPacket; + + if (0 == m_initTilesMergeArr.size() && 0 == m_updatedTilesMergeArr.size()) { + OMAF_LOG(LOG_ERROR, "There is no tiles merge layout before calculating rwpk !\n"); + return ret; + } + OMAF_LOG(LOG_INFO, "hasPacketLost: %d, hasLayoutChanged: %d\n", hasPacketLost, hasLayoutChanged); + if (!hasPacketLost && !hasLayoutChanged) { + if (0 == m_updatedTilesMergeArr.size()) { + itArr = m_initTilesMergeArr.find(qualityRanking); + if (itArr == m_initTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + } else { + itArr = m_updatedTilesMergeArr.find(qualityRanking); + if (itArr == m_updatedTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + } + } else if (!hasPacketLost && hasLayoutChanged) { + itArr = m_updatedTilesMergeArr.find(qualityRanking); + if (itArr == m_updatedTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + } + + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + + uint32_t arrangeNum = mergeLayout.size(); + vector> rwpk; // = make_unique_vcd(); + for (uint32_t i = 0; i < arrangeNum; i++) { + std::unique_ptr onerwpk = make_unique_vcd(); + if (!onerwpk) return ret; + rwpk.push_back(std::move(onerwpk)); + } + for (uint32_t i = 0; i < mergeLayout.size(); i++) + { + uint32_t width = mergeLayout[i]->mergedWidth; + uint32_t height = mergeLayout[i]->mergedHeight; + uint8_t tileRowsNum = mergeLayout[i]->tilesLayout.tileRowsNum; + uint8_t tileColsNum = mergeLayout[i]->tilesLayout.tileColsNum; + + rwpk[i]->constituentPicMatching = 0; + rwpk[i]->numRegions = (uint8_t)(tileRowsNum) * (uint8_t)(tileColsNum); + rwpk[i]->projPicWidth = m_fullWidth; + rwpk[i]->projPicHeight = m_fullHeight; + rwpk[i]->packedPicWidth = width; + rwpk[i]->packedPicHeight = height; + DELETE_ARRAY(rwpk[i]->rectRegionPacking); + m_tmpRegionrwpk = new RectangularRegionWisePacking[rwpk[i]->numRegions]; + if (!m_tmpRegionrwpk) { + return ret; + } + rwpk[i]->rectRegionPacking = m_tmpRegionrwpk; + } + vector packetSizeOfEachArr(arrangeNum, 0); + uint32_t totalPacketNum = 0; + for (uint32_t i = 0; i < arrangeNum; i++){ + totalPacketNum += mergeLayout[i]->tilesLayout.tileColsNum * mergeLayout[i]->tilesLayout.tileRowsNum; + packetSizeOfEachArr[i] = totalPacketNum; + } + + uint8_t regIdx = 0; + uint32_t packetArrCnt = 0; + for (itPacket = packets.begin(); ; itPacket++) { + if (regIdx >= totalPacketNum) + break; + + if (itPacket == packets.end()) + { + if (totalPacketNum > packets.size()) + { + itPacket = packets.begin(); + } + else if (totalPacketNum == packets.size()) + { + break; + } + else + { + OMAF_LOG(LOG_ERROR, "total packet number is less than seleceted packet size!\n"); + return ret; + } + } + + if (regIdx >= packetSizeOfEachArr[packetArrCnt]){ + packetArrCnt++; + } + MediaPacket *onePacket = itPacket->second; + uint32_t realIdx = packetArrCnt == 0 ? regIdx : regIdx - packetSizeOfEachArr[packetArrCnt - 1]; + RectangularRegionWisePacking *rectReg = &(rwpk[packetArrCnt]->rectRegionPacking[realIdx]); + memset(rectReg, 0, sizeof(RectangularRegionWisePacking)); + + rectReg->transformType = 0; + rectReg->guardBandFlag = false; + + SRDInfo srd = onePacket->GetSRDInfo(); + if (qualityRanking == HIGHEST_QUALITY_RANKING) { + rectReg->projRegWidth = srd.width; + rectReg->projRegHeight = srd.height; + rectReg->projRegTop = srd.top; + rectReg->projRegLeft = srd.left; + + rectReg->packedRegWidth = srd.width; + rectReg->packedRegHeight = srd.height; + uint8_t rowIdx = realIdx / mergeLayout[packetArrCnt]->tilesLayout.tileColsNum; + uint8_t colIdx = realIdx % mergeLayout[packetArrCnt]->tilesLayout.tileColsNum; + rectReg->packedRegTop = rowIdx * srd.height; + rectReg->packedRegLeft = colIdx * srd.width; + } else { + rectReg->projRegWidth = (uint32_t)round((float)(srd.width * m_fullWidth) / (float)(mergeLayout[packetArrCnt]->mergedWidth)); + rectReg->projRegHeight = (uint32_t)round((float)(srd.height * m_fullHeight) / (float)(mergeLayout[packetArrCnt]->mergedHeight)); + rectReg->projRegTop = (uint32_t)round((float)(srd.top * m_fullHeight) / (float)(mergeLayout[packetArrCnt]->mergedHeight)); + rectReg->projRegLeft = (uint32_t)round((float)(srd.left * m_fullWidth) / (float)(mergeLayout[packetArrCnt]->mergedWidth)); + + rectReg->packedRegWidth = srd.width; + rectReg->packedRegHeight = srd.height; + rectReg->packedRegTop = srd.top; + rectReg->packedRegLeft = srd.left; + } + + rectReg->leftGbWidth = 0; + rectReg->rightGbWidth = 0; + rectReg->topGbHeight = 0; + rectReg->bottomGbHeight = 0; + rectReg->gbNotUsedForPredFlag = true; + rectReg->gbType0 = 0; + rectReg->gbType1 = 0; + rectReg->gbType2 = 0; + rectReg->gbType3 = 0; + + regIdx++; + } + packetSizeOfEachArr.clear(); + return rwpk; +} + +vector> OmafTilesStitch::CalculateMergedRwpkForCubeMap(QualityRank qualityRanking, + bool hasPacketLost, + bool hasLayoutChanged) { + vector> ret; + if (0 == m_selectedTiles.size()) return ret; + + if (hasPacketLost && hasLayoutChanged) { + OMAF_LOG(LOG_ERROR, "Packet lost and layout change can't happen at the same time !\n"); + return ret; + } + + std::map>::iterator it; + it = m_selectedTiles.find(qualityRanking); + if (it == m_selectedTiles.end()) { + OMAF_LOG(LOG_ERROR, "Can't find media packets of specified quality ranking !\n"); + return ret; + } + + std::map packets = it->second; + if (0 == packets.size()) { + OMAF_LOG(LOG_ERROR, "Invalid media packets size for specified quality ranking !\n"); + return ret; + } + + vector mergeLayout; + std::map>::iterator itArr; + std::map::iterator itPacket; + + if (0 == m_initTilesMergeArr.size() && 0 == m_updatedTilesMergeArr.size()) { + OMAF_LOG(LOG_ERROR, "There is no tiles merge layout before calculating rwpk !\n"); + return ret; + } + OMAF_LOG(LOG_INFO, "hasPacketLost: %d, hasLayoutChanged: %d\n", hasPacketLost, hasLayoutChanged); + if (!hasPacketLost && !hasLayoutChanged) { + if (0 == m_updatedTilesMergeArr.size()) { + itArr = m_initTilesMergeArr.find(qualityRanking); + if (itArr == m_initTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "NULL tiles merge layout for specified quality ranking !\n"); + return ret; + } + } else { + itArr = m_updatedTilesMergeArr.find(qualityRanking); + if (itArr == m_updatedTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "NULL tiles merge layout for specified quality ranking !\n"); + return ret; + } + } + } else if (!hasPacketLost && hasLayoutChanged) { + itArr = m_updatedTilesMergeArr.find(qualityRanking); + if (itArr == m_updatedTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "NULL tiles merge layout for specified quality ranking !\n"); + return ret; + } + } + + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "NULL tiles merge layout for specified quality ranking !\n"); + return ret; + } + + uint32_t arrangeNum = mergeLayout.size(); + vector> rwpk; // = make_unique_vcd(); + for (uint32_t i = 0; i < arrangeNum; i++) { + std::unique_ptr onerwpk = make_unique_vcd(); + if (!onerwpk) return ret; + rwpk.push_back(std::move(onerwpk)); + } + for (uint32_t i = 0; i < mergeLayout.size(); i++) + { + uint32_t width = mergeLayout[i]->mergedWidth; + uint32_t height = mergeLayout[i]->mergedHeight; + uint8_t tileRowsNum = mergeLayout[i]->tilesLayout.tileRowsNum; + uint8_t tileColsNum = mergeLayout[i]->tilesLayout.tileColsNum; + + rwpk[i]->constituentPicMatching = 0; + rwpk[i]->numRegions = (uint8_t)(tileRowsNum) * (uint8_t)(tileColsNum); + rwpk[i]->projPicWidth = m_fullWidth; + rwpk[i]->projPicHeight = m_fullHeight; + rwpk[i]->packedPicWidth = width; + rwpk[i]->packedPicHeight = height; + DELETE_ARRAY(rwpk[i]->rectRegionPacking); + m_tmpRegionrwpk = new RectangularRegionWisePacking[rwpk[i]->numRegions]; + if (!m_tmpRegionrwpk) { + return ret; + } + rwpk[i]->rectRegionPacking = m_tmpRegionrwpk; + } + + vector packetSizeOfEachArr(arrangeNum, 0); + uint32_t totalPacketNum = 0; + for (uint32_t i = 0; i < arrangeNum; i++){ + totalPacketNum += mergeLayout[i]->tilesLayout.tileColsNum * mergeLayout[i]->tilesLayout.tileRowsNum; + packetSizeOfEachArr[i] = totalPacketNum; + } + + uint8_t regIdx = 0; + uint32_t packetArrCnt = 0; + for (itPacket = packets.begin(); ; itPacket++) { + if (regIdx >= totalPacketNum) + break; + + if (itPacket == packets.end()) + { + if (totalPacketNum > packets.size()) + { + itPacket = packets.begin(); + } + else if (totalPacketNum == packets.size()) + { + break; + } + else + { + OMAF_LOG(LOG_ERROR, " total packet number is less than seleceted packet size!\n"); + return ret; + } + } + + if (regIdx >= packetSizeOfEachArr[packetArrCnt]){ + packetArrCnt++; + } + MediaPacket *onePacket = itPacket->second; + uint32_t realIdx = packetArrCnt == 0 ? regIdx : regIdx - packetSizeOfEachArr[packetArrCnt - 1]; + RectangularRegionWisePacking *rectReg = &(rwpk[packetArrCnt]->rectRegionPacking[realIdx]); + memset(rectReg, 0, sizeof(RectangularRegionWisePacking)); + + const RegionWisePacking &tileRwpk = onePacket->GetRwpk(); + rectReg->transformType = tileRwpk.rectRegionPacking[0].transformType; + rectReg->guardBandFlag = false; + + SRDInfo srd = onePacket->GetSRDInfo(); + if (qualityRanking == HIGHEST_QUALITY_RANKING) { + rectReg->projRegWidth = tileRwpk.rectRegionPacking[0].projRegWidth; + rectReg->projRegHeight = tileRwpk.rectRegionPacking[0].projRegHeight; + rectReg->projRegTop = tileRwpk.rectRegionPacking[0].projRegTop; + rectReg->projRegLeft = tileRwpk.rectRegionPacking[0].projRegLeft; + + rectReg->packedRegWidth = tileRwpk.rectRegionPacking[0].packedRegWidth; + rectReg->packedRegHeight = tileRwpk.rectRegionPacking[0].packedRegHeight; + uint8_t rowIdx = realIdx / mergeLayout[packetArrCnt]->tilesLayout.tileColsNum; + uint8_t colIdx = realIdx % mergeLayout[packetArrCnt]->tilesLayout.tileColsNum; + rectReg->packedRegTop = rowIdx * srd.height; + rectReg->packedRegLeft = colIdx * srd.width; + } else { + rectReg->projRegWidth = + (uint32_t)round((float)(tileRwpk.rectRegionPacking[0].projRegWidth * m_fullWidth) / (float)(mergeLayout[packetArrCnt]->mergedWidth)); + rectReg->projRegHeight = + (uint32_t)round((float)(tileRwpk.rectRegionPacking[0].projRegHeight * m_fullHeight) / (float)(mergeLayout[packetArrCnt]->mergedHeight)); + rectReg->projRegTop = + (uint32_t)round((float)(tileRwpk.rectRegionPacking[0].projRegTop * m_fullHeight) / (float)(mergeLayout[packetArrCnt]->mergedHeight)); + rectReg->projRegLeft = + (uint32_t)round((float)(tileRwpk.rectRegionPacking[0].projRegLeft * m_fullWidth) / (float)(mergeLayout[packetArrCnt]->mergedWidth)); + + rectReg->packedRegWidth = tileRwpk.rectRegionPacking[0].packedRegWidth; + rectReg->packedRegHeight = tileRwpk.rectRegionPacking[0].packedRegHeight; + rectReg->packedRegTop = tileRwpk.rectRegionPacking[0].packedRegTop; + rectReg->packedRegLeft = tileRwpk.rectRegionPacking[0].packedRegLeft; + } + + rectReg->leftGbWidth = 0; + rectReg->rightGbWidth = 0; + rectReg->topGbHeight = 0; + rectReg->bottomGbHeight = 0; + rectReg->gbNotUsedForPredFlag = true; + rectReg->gbType0 = 0; + rectReg->gbType1 = 0; + rectReg->gbType2 = 0; + rectReg->gbType3 = 0; + + regIdx++; + } + packetSizeOfEachArr.clear(); + return rwpk; +} + +vector> OmafTilesStitch::CalculateMergedRwpkForPlanar(QualityRank qualityRanking, + bool hasPacketLost, + bool hasLayoutChanged) { + vector> ret; + if (0 == m_selectedTiles.size()) return ret; + + if (hasPacketLost && hasLayoutChanged) { + OMAF_LOG(LOG_ERROR, "Packet lost and layout change can't happen at the same time !\n"); + return ret; + } + + std::map>::iterator it; + it = m_selectedTiles.find(qualityRanking); + if (it == m_selectedTiles.end()) { + OMAF_LOG(LOG_ERROR, "Can't find media packets of specified quality ranking !\n"); + return ret; + } + + std::map packets = it->second; + if (0 == packets.size()) { + OMAF_LOG(LOG_ERROR, "Invalid media packets size for specified quality ranking !\n"); + return ret; + } + + vector mergeLayout; + std::map>::iterator itArr; + std::map::iterator itPacket; + + if (0 == m_initTilesMergeArr.size() && 0 == m_updatedTilesMergeArr.size()) { + OMAF_LOG(LOG_ERROR, "There is no tiles merge layout before calculating rwpk !\n"); + return ret; + } + OMAF_LOG(LOG_INFO, "hasPacketLost: %d, hasLayoutChanged: %d\n", hasPacketLost, hasLayoutChanged); + if (!hasPacketLost && !hasLayoutChanged) { + if (0 == m_updatedTilesMergeArr.size()) { + itArr = m_initTilesMergeArr.find(qualityRanking); + if (itArr == m_initTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + } else { + itArr = m_updatedTilesMergeArr.find(qualityRanking); + if (itArr == m_updatedTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + } + } else if (!hasPacketLost && hasLayoutChanged) { + itArr = m_updatedTilesMergeArr.find(qualityRanking); + if (itArr == m_updatedTilesMergeArr.end()) { + OMAF_LOG(LOG_ERROR, "Can't find tiles merge layout for specified quality ranking !\n"); + return ret; + } + + mergeLayout = itArr->second; + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + } + + if (mergeLayout.empty()) { + OMAF_LOG(LOG_ERROR, "nullptr tiles merge layout for specified quality ranking !\n"); + return ret; + } + + uint32_t arrangeNum = mergeLayout.size(); + vector> rwpk; // = make_unique_vcd(); + for (uint32_t i = 0; i < arrangeNum; i++) { + std::unique_ptr onerwpk = make_unique_vcd(); + if (!onerwpk) return ret; + rwpk.push_back(std::move(onerwpk)); + } + for (uint32_t i = 0; i < mergeLayout.size(); i++) + { + uint32_t width = mergeLayout[i]->mergedWidth; + uint32_t height = mergeLayout[i]->mergedHeight; + uint8_t tileRowsNum = mergeLayout[i]->tilesLayout.tileRowsNum; + uint8_t tileColsNum = mergeLayout[i]->tilesLayout.tileColsNum; + + rwpk[i]->constituentPicMatching = 0; + rwpk[i]->numRegions = (uint8_t)(tileRowsNum) * (uint8_t)(tileColsNum); + rwpk[i]->packedPicWidth = width; + rwpk[i]->packedPicHeight = height; + DELETE_ARRAY(rwpk[i]->rectRegionPacking); + m_tmpRegionrwpk = new RectangularRegionWisePacking[rwpk[i]->numRegions]; + if (!m_tmpRegionrwpk) { + return ret; + } + rwpk[i]->rectRegionPacking = m_tmpRegionrwpk; + } + vector packetSizeOfEachArr(arrangeNum, 0); + uint32_t totalPacketNum = 0; + for (uint32_t i = 0; i < arrangeNum; i++){ + totalPacketNum += mergeLayout[i]->tilesLayout.tileColsNum * mergeLayout[i]->tilesLayout.tileRowsNum; + packetSizeOfEachArr[i] = totalPacketNum; + } + + uint8_t regIdx = 0; + uint32_t packetArrCnt = 0; + for (itPacket = packets.begin(); ; itPacket++) { + if (regIdx >= totalPacketNum) + break; + + if (itPacket == packets.end()) + { + if (totalPacketNum > packets.size()) + { + itPacket = packets.begin(); + } + else if (totalPacketNum == packets.size()) + { + break; + } + else + { + OMAF_LOG(LOG_ERROR, "total packet number is less than seleceted packet size!\n"); + return ret; + } + } + + if (regIdx >= packetSizeOfEachArr[packetArrCnt]){ + packetArrCnt++; + } + MediaPacket *onePacket = itPacket->second; + uint32_t origProjPicWidth = (onePacket->GetRwpk()).projPicWidth; + uint32_t origProjPicHeight = (onePacket->GetRwpk()).projPicHeight; + OMAF_LOG(LOG_INFO, "Orig ProjPicWidth %d and ProjPicHeight %d\n", origProjPicWidth, origProjPicHeight); + rwpk[packetArrCnt]->projPicWidth = origProjPicWidth; + rwpk[packetArrCnt]->projPicHeight = origProjPicHeight; + + uint32_t realIdx = packetArrCnt == 0 ? regIdx : regIdx - packetSizeOfEachArr[packetArrCnt - 1]; + RectangularRegionWisePacking *rectReg = &(rwpk[packetArrCnt]->rectRegionPacking[realIdx]); + memset(rectReg, 0, sizeof(RectangularRegionWisePacking)); + + rectReg->transformType = 0; + rectReg->guardBandFlag = false; + + SRDInfo srd = onePacket->GetSRDInfo(); + + rectReg->projRegWidth = srd.width; + rectReg->projRegHeight = srd.height; + rectReg->projRegTop = srd.top; + rectReg->projRegLeft = srd.left; + + rectReg->packedRegWidth = srd.width; + rectReg->packedRegHeight = srd.height; + uint8_t rowIdx = realIdx / mergeLayout[packetArrCnt]->tilesLayout.tileColsNum; + uint8_t colIdx = realIdx % mergeLayout[packetArrCnt]->tilesLayout.tileColsNum; + rectReg->packedRegTop = rowIdx * srd.height; + rectReg->packedRegLeft = colIdx * srd.width; + + rectReg->leftGbWidth = 0; + rectReg->rightGbWidth = 0; + rectReg->topGbHeight = 0; + rectReg->bottomGbHeight = 0; + rectReg->gbNotUsedForPredFlag = true; + rectReg->gbType0 = 0; + rectReg->gbType1 = 0; + rectReg->gbType2 = 0; + rectReg->gbType3 = 0; + + regIdx++; + } + packetSizeOfEachArr.clear(); + return rwpk; +} + +int32_t OmafTilesStitch::GenerateTilesMergeArrangement() { + if (0 == m_selectedTiles.size()) return OMAF_ERROR_INVALID_DATA; + + if (0 == m_initTilesMergeArr.size()) { + m_initTilesMergeArr = CalculateTilesMergeArrangement(); + if (0 == m_initTilesMergeArr.size()) { + OMAF_LOG(LOG_ERROR, "Failed to calculate tiles merged arrangement !\n"); + return OMAF_ERROR_TILES_MERGE_ARRANGEMENT; + } + } else { + if (m_updatedTilesMergeArr.size()) { + if (m_initTilesMergeArr.size() != m_updatedTilesMergeArr.size()) + OMAF_LOG(LOG_INFO, "The number of tiles merged video streams has been changed compared with the number at the beginning !\n"); + + std::map>::iterator itArr; + for (itArr = m_updatedTilesMergeArr.begin(); itArr != m_updatedTilesMergeArr.end();) { + vector layOutArr = itArr->second; + for (uint32_t i = 0; i < layOutArr.size(); i++) + { + if (layOutArr[i]) { + DELETE_ARRAY(layOutArr[i]->tilesLayout.tileRowHeight); + DELETE_ARRAY(layOutArr[i]->tilesLayout.tileColWidth); + SAFE_DELETE(layOutArr[i]); + } + } + m_updatedTilesMergeArr.erase(itArr++); + } + m_updatedTilesMergeArr.clear(); + } + + m_updatedTilesMergeArr = CalculateTilesMergeArrangement(); + if (0 == m_updatedTilesMergeArr.size()) { + OMAF_LOG(LOG_ERROR, "Failed to calculate tiles merged arrangement\n"); + return OMAF_ERROR_TILES_MERGE_ARRANGEMENT; + } + } + + return ERROR_NONE; +} + +int32_t OmafTilesStitch::IsArrChanged(QualityRank qualityRanking, vector layOut, vector initLayOut, bool *isArrChanged, bool *packetLost, bool *arrangeChanged) +{ + if (layOut.empty()) { + OMAF_LOG(LOG_ERROR, " Invalid tile merge arrangement data!\n"); + return OMAF_ERROR_NULL_PTR; + } + if (isArrChanged == NULL || packetLost == NULL || arrangeChanged == NULL) { + OMAF_LOG(LOG_ERROR, "Invalid flags for arrangement!\n"); + return OMAF_ERROR_INVALID_DATA; + } + if (layOut.size() != initLayOut.size()) { + *isArrChanged = true; + *arrangeChanged = true; + } + else { + for (uint32_t i = 0; i < layOut.size(); i++) { + uint32_t width = layOut[i]->mergedWidth; + uint32_t height = layOut[i]->mergedHeight; + + uint32_t initWidth = initLayOut[i]->mergedWidth; + uint32_t initHeight = initLayOut[i]->mergedHeight; + + if ((width == initWidth) && (height < initHeight)) { + OMAF_LOG(LOG_INFO, "Packet not lost but tiles merge layout has been changed !\n"); + *arrangeChanged = true; + *isArrChanged = true; + } + + if ((height == initHeight) && (width < initWidth)) { + OMAF_LOG(LOG_INFO, "Packet not lost but tiles merge layout has been changed !\n"); + *arrangeChanged = true; + *isArrChanged = true; + } + + if ((width < initWidth) && (height < initHeight)) { + OMAF_LOG(LOG_INFO, "Packet not lost but tiles merge layout has been changed !\n"); + *arrangeChanged = true; + *isArrChanged = true; + } + + if ((width > initWidth) || (height > initHeight)) { + OMAF_LOG(LOG_INFO, "Packet not lost but tiles merge layout has been changed !\n"); + *arrangeChanged = true; + *isArrChanged = true; + } + } + } + OMAF_LOG(LOG_INFO, "arrangeChanged %d, isArrChanged %d\n", *arrangeChanged, *isArrChanged); + return ERROR_NONE; +} + +int32_t OmafTilesStitch::GenerateMergedVideoHeaders(bool arrangeChanged, QualityRank qualityRanking, + vector layOut, + vector initLayOut, + std::map packets) { + int32_t ret = ERROR_NONE; + if (layOut.empty()) { + OMAF_LOG(LOG_ERROR, "INVALID tile merge arrangement data!\n"); + return OMAF_ERROR_NULL_PTR; + } + // 1. clear all headers + std::map>>::iterator itMergeHrd; + for (itMergeHrd = m_mergedVideoHeaders.begin(); itMergeHrd != m_mergedVideoHeaders.end(); ) { + QualityRank oneQualityRanking = itMergeHrd->first; + vector> oneVideoHeaderArr = m_mergedVideoHeaders[oneQualityRanking]; + for (uint32_t i = 0; i < oneVideoHeaderArr.size(); i++) { + std::map::iterator itHdr = oneVideoHeaderArr[i].begin(); + if (itHdr == oneVideoHeaderArr[i].end()) + { + OMAF_LOG(LOG_ERROR, "Video header map is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + uint8_t *headers = itHdr->second; + DELETE_ARRAY(headers); + oneVideoHeaderArr[i].clear(); + } + oneVideoHeaderArr.clear(); + m_mergedVideoHeaders.erase(itMergeHrd++); + } + // 2. generate new headers. + if ((qualityRanking == HIGHEST_QUALITY_RANKING) && (0 == m_mergedVideoHeaders[qualityRanking].size())) { + if (qualityRanking == HIGHEST_QUALITY_RANKING) { + if (!m_fullResVideoHeader) { + OMAF_LOG(LOG_ERROR, "nullptr original video headers data !\n"); + return OMAF_ERROR_NULL_PTR; + } + vector> videoHeaders; + for (uint32_t i = 0; i < layOut.size(); i++) { + uint32_t headersSize = 0; + uint8_t *headers = new uint8_t[1024]; + if (!headers) return OMAF_ERROR_NULL_PTR; + + memset(headers, 0, 1024); + memcpy_s(headers, 1024, m_fullResVideoHeader, m_fullResVPSSize); + headersSize += m_fullResVPSSize; + + uint8_t *tmp = headers + m_fullResVPSSize; + m_360scvpParam->pInputBitstream = m_fullResVideoHeader + m_fullResVPSSize; + m_360scvpParam->inputBitstreamLen = m_fullResSPSSize; + m_360scvpParam->destWidth = (arrangeChanged || initLayOut.size() < i + 1 ? layOut[i]->mergedWidth : initLayOut[i]->mergedWidth); + m_360scvpParam->destHeight = (arrangeChanged || initLayOut.size() < i + 1 ? layOut[i]->mergedHeight : initLayOut[i]->mergedHeight); + m_360scvpParam->pOutputBitstream = tmp; + ret = I360SCVP_GenerateSPS(m_360scvpParam, m_360scvpHandle); + if (ret) { + DELETE_ARRAY(headers); + return OMAF_ERROR_SCVP_OPERATION_FAILED; + } + + headersSize += m_360scvpParam->outputBitstreamLen; + tmp += m_360scvpParam->outputBitstreamLen; + m_360scvpParam->pInputBitstream = m_fullResVideoHeader + m_fullResVPSSize + m_fullResSPSSize; + m_360scvpParam->inputBitstreamLen = m_fullResPPSSize; + m_360scvpParam->pOutputBitstream = tmp; + if (arrangeChanged) { + ret = I360SCVP_GeneratePPS(m_360scvpParam, &(layOut[i]->tilesLayout), m_360scvpHandle); + } else { + ret = I360SCVP_GeneratePPS(m_360scvpParam, &(initLayOut[i]->tilesLayout), m_360scvpHandle); + } + + if (ret) { + DELETE_ARRAY(headers); + return OMAF_ERROR_SCVP_OPERATION_FAILED; + } + + headersSize += m_360scvpParam->outputBitstreamLen; + + std::map oneVideoHeader; + oneVideoHeader.insert(std::make_pair(headersSize, headers)); + videoHeaders.push_back(oneVideoHeader); + } + m_mergedVideoHeaders[qualityRanking] = std::move(videoHeaders); + } + } + + if ((qualityRanking > HIGHEST_QUALITY_RANKING) && (0 == m_mergedVideoHeaders[qualityRanking].size())) { + if (qualityRanking > HIGHEST_QUALITY_RANKING) { + std::map::iterator itPacket; + itPacket = packets.begin(); + if (itPacket == packets.end()) + { + OMAF_LOG(LOG_ERROR, "Packets map is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + MediaPacket *onePacket = itPacket->second; + if (!(onePacket->GetHasVideoHeader())) { + OMAF_LOG(LOG_ERROR, "There should be video headers here !\n"); + return OMAF_ERROR_INVALID_DATA; + } + //get original VPS/SPS/PPS + uint32_t hrdSize = onePacket->GetVideoHeaderSize(); + uint8_t *headersData = new uint8_t[hrdSize]; + if (!headersData) { + return OMAF_ERROR_NULL_PTR; + } + memcpy_s(headersData, hrdSize, onePacket->Payload(), hrdSize); + vector> videoHeaders; + for (uint32_t i = 0; i < layOut.size(); i++) { + //generate new VPS/SPS/PPS for merged video + uint32_t headersSize = 0; + uint8_t *headers = new uint8_t[1024]; + if (!headers) + { + DELETE_ARRAY(headersData); + return OMAF_ERROR_NULL_PTR; + } + memset(headers, 0, 1024); + uint32_t vpsLen = onePacket->GetVPSLen(); + uint32_t spsLen = onePacket->GetSPSLen(); + uint32_t ppsLen = onePacket->GetPPSLen(); + memcpy_s(headers, vpsLen, headersData, vpsLen); + headersSize += vpsLen; + + uint8_t *tmp = headers + vpsLen; + m_360scvpParam->pInputBitstream = headersData + vpsLen; + m_360scvpParam->inputBitstreamLen = spsLen; + m_360scvpParam->destWidth = layOut[i]->mergedWidth; + m_360scvpParam->destHeight = layOut[i]->mergedHeight; + m_360scvpParam->pOutputBitstream = tmp; + ret = I360SCVP_GenerateSPS(m_360scvpParam, m_360scvpHandle); + if (ret) { + DELETE_ARRAY(headersData); + DELETE_ARRAY(headers); + return OMAF_ERROR_SCVP_OPERATION_FAILED; + } + + headersSize += m_360scvpParam->outputBitstreamLen; + tmp += m_360scvpParam->outputBitstreamLen; + m_360scvpParam->pInputBitstream = headersData + vpsLen + spsLen; + m_360scvpParam->inputBitstreamLen = ppsLen; + m_360scvpParam->pOutputBitstream = tmp; + + ret = I360SCVP_GeneratePPS(m_360scvpParam, &(layOut[i]->tilesLayout), m_360scvpHandle); + if (ret) { + DELETE_ARRAY(headersData); + DELETE_ARRAY(headers); + return OMAF_ERROR_SCVP_OPERATION_FAILED; + } + + headersSize += m_360scvpParam->outputBitstreamLen; + + std::map oneVideoHeader; + oneVideoHeader.insert(std::make_pair(headersSize, headers)); + videoHeaders.push_back(oneVideoHeader); + } + m_mergedVideoHeaders[qualityRanking] = std::move(videoHeaders); + DELETE_ARRAY(headersData); + } + } + return ret; +} + +vector> OmafTilesStitch::GenerateMergedRWPK(QualityRank qualityRanking, bool packetLost, bool arrangeChanged) { + vector> rwpk; + if (m_projFmt == VCD::OMAF::ProjectionFormat::PF_ERP) { + rwpk = CalculateMergedRwpkForERP(qualityRanking, packetLost, arrangeChanged); + } else if (m_projFmt == VCD::OMAF::ProjectionFormat::PF_CUBEMAP) { + rwpk = CalculateMergedRwpkForCubeMap(qualityRanking, packetLost, arrangeChanged); + } else if (m_projFmt == VCD::OMAF::ProjectionFormat::PF_PLANAR) { + rwpk = CalculateMergedRwpkForPlanar(qualityRanking, packetLost, arrangeChanged); + } + + return rwpk; +} + +int32_t OmafTilesStitch::UpdateMergedVideoHeadersForLowQualityRank(bool isEmptyHeader, + std::map packets, QualityRank qualityRanking, + TilesMergeArrangement *layOut) { + int32_t ret = ERROR_NONE; + std::map::iterator itPacket; + if (isEmptyHeader) { + itPacket = packets.begin(); + if (itPacket == packets.end()) + { + OMAF_LOG(LOG_ERROR, "Packets map is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + MediaPacket *onePacket = itPacket->second; + if (!(onePacket->GetHasVideoHeader())) { + OMAF_LOG(LOG_ERROR, "There should be video headers here !\n"); + return OMAF_ERROR_INVALID_DATA; + } + //get original VPS/SPS/PPS + uint32_t hrdSize = onePacket->GetVideoHeaderSize(); + uint8_t *headersData = new uint8_t[hrdSize]; + if (!headersData) { + return OMAF_ERROR_NULL_PTR; + } + memcpy_s(headersData, hrdSize, onePacket->Payload(), hrdSize); + + //generate new VPS/SPS/PPS for merged video + uint32_t headersSize = 0; + uint8_t *headers = new uint8_t[1024]; + if (!headers) + { + DELETE_ARRAY(headersData); + return OMAF_ERROR_NULL_PTR; + } + memset(headers, 0, 1024); + uint32_t vpsLen = onePacket->GetVPSLen(); + uint32_t spsLen = onePacket->GetSPSLen(); + uint32_t ppsLen = onePacket->GetPPSLen(); + memcpy_s(headers, vpsLen, headersData, vpsLen); + headersSize += vpsLen; + + uint8_t *tmp = headers + vpsLen; + m_360scvpParam->pInputBitstream = headersData + vpsLen; + m_360scvpParam->inputBitstreamLen = spsLen; + m_360scvpParam->destWidth = layOut->mergedWidth; + m_360scvpParam->destHeight = layOut->mergedHeight; + m_360scvpParam->pOutputBitstream = tmp; + ret = I360SCVP_GenerateSPS(m_360scvpParam, m_360scvpHandle); + if (ret) { + DELETE_ARRAY(headersData); + DELETE_ARRAY(headers); + return OMAF_ERROR_SCVP_OPERATION_FAILED; + } + + headersSize += m_360scvpParam->outputBitstreamLen; + tmp += m_360scvpParam->outputBitstreamLen; + m_360scvpParam->pInputBitstream = headersData + vpsLen + spsLen; + m_360scvpParam->inputBitstreamLen = ppsLen; + m_360scvpParam->pOutputBitstream = tmp; + + ret = I360SCVP_GeneratePPS(m_360scvpParam, &(layOut->tilesLayout), m_360scvpHandle); + if (ret) { + DELETE_ARRAY(headersData); + DELETE_ARRAY(headers); + return OMAF_ERROR_SCVP_OPERATION_FAILED; + } + + headersSize += m_360scvpParam->outputBitstreamLen; + + std::map oneVideoHeader; + oneVideoHeader.insert(std::make_pair(headersSize, headers)); + m_mergedVideoHeaders[qualityRanking].push_back(std::move(oneVideoHeader)); + DELETE_ARRAY(headersData); + } + return ret; +} + +int32_t OmafTilesStitch::InitMergedDataAndRealSize(QualityRank qualityRanking, std::map packets, + char* mergedData, uint64_t* realSize, uint32_t index, + TilesMergeArrangement *tilesArr) { + if (packets.empty()) { + OMAF_LOG(LOG_ERROR, "packets is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + if (mergedData == NULL || realSize == NULL) { + OMAF_LOG(LOG_ERROR, "merged data or real size is null ptr!\n"); + return OMAF_ERROR_NULL_PTR; + } + if (m_needHeaders) { + vector> videoHeaders = m_mergedVideoHeaders[qualityRanking]; + bool isEmptyHeaders = (videoHeaders.empty() ? true : videoHeaders[index].empty()); + if (qualityRanking != HIGHEST_QUALITY_RANKING) { + if (ERROR_NONE != UpdateMergedVideoHeadersForLowQualityRank(isEmptyHeaders, packets, qualityRanking, tilesArr)) { + OMAF_LOG(LOG_ERROR, "Update merged video headers for low quality ranking failed!\n"); + return OMAF_ERROR_OPERATION; + } + } + if (m_mergedVideoHeaders[qualityRanking].empty()) { + OMAF_LOG(LOG_ERROR, "Video headers for Quality %d is empty!\n", qualityRanking); + return OMAF_ERROR_INVALID_DATA; + } + if (m_mergedVideoHeaders[qualityRanking][index].empty()) { + OMAF_LOG(LOG_ERROR, "Failed to generate merged video headers for quality ranking %d split %d\n", qualityRanking, index); + return OMAF_ERROR_INVALID_DATA; + } + std::map oneVideoHeader = (m_mergedVideoHeaders[qualityRanking][index]); + std::map::iterator itHdr = oneVideoHeader.begin(); + if (itHdr == oneVideoHeader.end()) + { + OMAF_LOG(LOG_ERROR, "Video header map is empty!\n"); + return OMAF_ERROR_INVALID_DATA; + } + uint8_t *headers = itHdr->second; + uint32_t headersLen = itHdr->first; + if (!headers) { + return OMAF_ERROR_NULL_PTR; + } + memcpy_s(mergedData, headersLen, headers, headersLen); + *realSize += headersLen; + } + return ERROR_NONE; +} + +int32_t OmafTilesStitch::UpdateMergedDataAndRealSize( + QualityRank qualityRanking, std::map packets, + uint8_t tileColsNum, bool arrangeChanged, uint32_t width, uint32_t height, + uint32_t initWidth, uint32_t initHeight, char *mergedData, uint64_t *realSize, + uint32_t index, vector needPacketSize, uint64_t layoutNum) { + + uint32_t tilesIdx = 0; + int32_t tileWidth = 0; + int32_t tileHeight = 0; + if (tileColsNum == 0) { + OMAF_LOG(LOG_ERROR, "tile column number cannot be zero!\n"); + return OMAF_ERROR_INVALID_DATA; + } + if (mergedData == NULL || realSize == NULL) { + OMAF_LOG(LOG_ERROR, "merged data or realSize is null ptr!\n"); + return OMAF_ERROR_NULL_PTR; + } + // calculate real size for merged packets + std::map::iterator itPacket = packets.begin(); + if (index > 0) + std::advance(itPacket, needPacketSize[index - 1]); + if (itPacket == packets.end()) + { + if (needPacketSize[layoutNum-1] > packets.size()) + { + itPacket = packets.begin(); + } + else + { + OMAF_LOG(LOG_ERROR, "ERROR in selected media packets for tiles stitching !\n"); + return OMAF_ERROR_INVALID_DATA; + } + } + + uint32_t packetSize = index == 0 ? needPacketSize[index] : needPacketSize[index] - needPacketSize[index - 1]; + uint32_t cnt = 0; + for (; ; itPacket++) { + if (itPacket == packets.end()) + { + if (needPacketSize[layoutNum-1] > packets.size()) + itPacket = packets.begin(); + } + cnt++; + if (cnt > packetSize) break; + if (itPacket == packets.end()) + { + OMAF_LOG(LOG_ERROR, "There is mismatch in needed packets size and actually selected media packets !\n"); + return OMAF_ERROR_INVALID_DATA; + } + MediaPacket *onePacket = NULL; + onePacket = itPacket->second; + if (!onePacket) + { + OMAF_LOG(LOG_ERROR, "Selected media packet is NULL !\n"); + return OMAF_ERROR_NULL_PTR; + } + //if (qualityRanking == HIGHEST_QUALITY_RANKING) { + std::map::iterator itSrc; + itSrc = m_sources.find(qualityRanking); + if (itSrc == m_sources.end()) + { + OMAF_LOG(LOG_ERROR, "Can't find source information corresponding to quality ranking %d\n", qualityRanking); + return OMAF_ERROR_INVALID_DATA; + } + SourceInfo srcInfo = itSrc->second; + + OMAF_LOG(LOG_INFO, "Original source width %d, height %d\n", srcInfo.width, srcInfo.height); + OMAF_LOG(LOG_INFO, "Merged source width %d, height %d\n", width, height); + if ((width != (uint32_t)(srcInfo.width)) || (height != (uint32_t)(srcInfo.height))) { + SRDInfo srd = onePacket->GetSRDInfo(); + if (!tileWidth || !tileHeight) { + tileWidth = srd.width; + tileHeight = srd.height; + } + + uint8_t colIdx = tilesIdx % tileColsNum; + uint8_t rowIdx = tilesIdx / tileColsNum; + uint16_t ctuIdx = + rowIdx * (tileHeight / LCU_SIZE) * ((tileWidth / LCU_SIZE) * tileColsNum) + colIdx * (tileWidth / LCU_SIZE); + // LOG(INFO)<< "SRD: " << srd.top << " and "<Payload(); + int32_t dataSize = onePacket->Size(); + if (!data || !dataSize) + { + OMAF_LOG(LOG_ERROR, "Invalid data in selected media packet !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + if (onePacket->GetHasVideoHeader()) { + data += (onePacket->GetVPSLen() + onePacket->GetSPSLen() + onePacket->GetPPSLen()); + dataSize -= (onePacket->GetVPSLen() + onePacket->GetSPSLen() + onePacket->GetPPSLen()); + } + if (!data || !dataSize) + { + OMAF_LOG(LOG_ERROR, "After video headers (VPS/SPS/PPS) are moved, invalid data in selected media packet !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + Nalu *nalu = new Nalu; + if (!nalu) { + return OMAF_ERROR_NULL_PTR; + } + + nalu->data = (uint8_t *)data; + nalu->dataSize = dataSize; + I360SCVP_ParseNAL(nalu, m_360scvpHandle); + + nalu->sliceHeaderLen = nalu->sliceHeaderLen - HEVC_NALUHEADER_LEN; + + m_360scvpParam->destWidth = (arrangeChanged ? width : initWidth); + m_360scvpParam->destHeight = (arrangeChanged ? height : initHeight); + m_360scvpParam->pInputBitstream = (uint8_t *)data; + m_360scvpParam->inputBitstreamLen = dataSize; + m_360scvpParam->pOutputBitstream = (uint8_t *)mergedData + *realSize; + I360SCVP_GenerateSliceHdr(m_360scvpParam, ctuIdx, m_360scvpHandle); + *realSize += m_360scvpParam->outputBitstreamLen; + memcpy_s(mergedData + *realSize, + (nalu->dataSize - (HEVC_STARTCODES_LEN + HEVC_NALUHEADER_LEN + nalu->sliceHeaderLen)), + (nalu->data + HEVC_STARTCODES_LEN + HEVC_NALUHEADER_LEN + nalu->sliceHeaderLen), + (nalu->dataSize - (HEVC_STARTCODES_LEN + HEVC_NALUHEADER_LEN + nalu->sliceHeaderLen))); + + *realSize += nalu->dataSize - (HEVC_STARTCODES_LEN + HEVC_NALUHEADER_LEN + nalu->sliceHeaderLen); + SAFE_DELETE(nalu); + tilesIdx++; + } else { + char *data = onePacket->Payload(); + int32_t dataSize = onePacket->Size(); + if (!data || !dataSize) + { + OMAF_LOG(LOG_ERROR, "Invalid data in selected media packet !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + if (onePacket->GetHasVideoHeader()) { + data += onePacket->GetVideoHeaderSize(); + dataSize -= onePacket->GetVideoHeaderSize(); + } + if (!data || !dataSize) + { + OMAF_LOG(LOG_ERROR, "After video headers (VPS/SPS/PPS) are moved, invalid data in selected media packet !\n"); + return OMAF_ERROR_INVALID_DATA; + } + memcpy_s(mergedData + *realSize, dataSize, data, dataSize); + *realSize += dataSize; + } + } + return ERROR_NONE; +} + +int32_t OmafTilesStitch::UpdateInitTilesMergeArr() { + + for (auto it = m_initTilesMergeArr.begin(); it != m_initTilesMergeArr.end();) { + vector layOut = it->second; + + for (uint32_t i = 0; i < layOut.size(); i++) { + if (layOut[i]) { + DELETE_ARRAY(layOut[i]->tilesLayout.tileRowHeight); + DELETE_ARRAY(layOut[i]->tilesLayout.tileColWidth); + } + SAFE_DELETE(layOut[i]); + } + layOut.clear(); + m_initTilesMergeArr.erase(it++); + } + m_initTilesMergeArr.clear(); + + for (auto it = m_updatedTilesMergeArr.begin(); it != m_updatedTilesMergeArr.end(); it++) { // for each quality + vector existedArr = it->second; + if (existedArr.empty()) return OMAF_ERROR_NULL_PTR; + vector arr; + for (uint32_t i = 0; i < existedArr.size(); i++) { // for each merged packet + TilesMergeArrangement *oneArr = new TilesMergeArrangement; + if (!oneArr) return OMAF_ERROR_NULL_PTR; + + oneArr->mergedWidth = existedArr[i]->mergedWidth; + oneArr->mergedHeight = existedArr[i]->mergedHeight; + oneArr->mostTopPos = existedArr[i]->mostTopPos; + oneArr->mostLeftPos = existedArr[i]->mostLeftPos; + oneArr->tilesLayout.tileRowsNum = existedArr[i]->tilesLayout.tileRowsNum; + oneArr->tilesLayout.tileColsNum = existedArr[i]->tilesLayout.tileColsNum; + oneArr->tilesLayout.tileRowHeight = new uint16_t[oneArr->tilesLayout.tileRowsNum]; + if (!(oneArr->tilesLayout.tileRowHeight)) + { + SAFE_DELETE(oneArr); + return OMAF_ERROR_NULL_PTR; + } + oneArr->tilesLayout.tileColWidth = new uint16_t[oneArr->tilesLayout.tileColsNum]; + if (!(oneArr->tilesLayout.tileColWidth)) { + delete[](oneArr->tilesLayout.tileRowHeight); + oneArr->tilesLayout.tileRowHeight = nullptr; + SAFE_DELETE(oneArr); + return OMAF_ERROR_NULL_PTR; + } + + for (uint8_t idx = 0; idx < oneArr->tilesLayout.tileRowsNum; idx++) + oneArr->tilesLayout.tileRowHeight[idx] = existedArr[i]->tilesLayout.tileRowHeight[idx]; + + for (uint8_t idx = 0; idx < oneArr->tilesLayout.tileColsNum; idx++) + oneArr->tilesLayout.tileColWidth[idx] = existedArr[i]->tilesLayout.tileColWidth[idx]; + arr.push_back(oneArr); + } // for each merged packet + m_initTilesMergeArr.insert(std::make_pair(it->first, arr)); + } // for each quality + return ERROR_NONE; +} + +int32_t OmafTilesStitch::GenerateOutputMergedPackets() { + if (m_outMergedStream.size()) { + m_outMergedStream.clear(); + } + // 1. generate m_updatedTilesMergeArr + int32_t ret = GenerateTilesMergeArrangement(); // GenerateTilesMergeArrAndRwpk(); + if (ret) return ret; + + if (0 == m_mergedVideoHeaders.size()) { + if (m_updatedTilesMergeArr.size()) { + OMAF_LOG(LOG_ERROR, "Incorrect operation in initialization stage !\n"); + return OMAF_ERROR_OPERATION; + } + } + + std::map> tilesMergeArr; + if (0 == m_updatedTilesMergeArr.size()) { + tilesMergeArr = m_initTilesMergeArr; + } else { + tilesMergeArr = m_updatedTilesMergeArr; + } + + bool isArrChanged = false; + // for each quality ranking + std::map>::iterator it; + for (it = tilesMergeArr.begin(); it != tilesMergeArr.end(); it++) { + auto qualityRanking = it->first; + bool packetLost = false; + bool arrangeChanged = false; + vector layOut = it->second; + if (layOut.empty()) return OMAF_ERROR_NULL_PTR; + vector initLayOut = m_initTilesMergeArr[qualityRanking]; + + // 1. check isArrChanged, packetLost and arrangeChanged flag. + ret = IsArrChanged(qualityRanking, layOut, initLayOut, &isArrChanged, &packetLost, &arrangeChanged); + if (ret != ERROR_NONE) + { + OMAF_LOG(LOG_ERROR, "error ocurrs in checking arrange changed!\n"); + return OMAF_ERROR_OPERATION; + } + + std::map packets = m_selectedTiles[qualityRanking]; + // 2. if arrangeChanged, then generate new merged video headers + ret = GenerateMergedVideoHeaders(arrangeChanged, qualityRanking, layOut, initLayOut, packets); + if (ret != ERROR_NONE) + { + OMAF_LOG(LOG_ERROR, "generate merged video headers failed! and error code is %d\n", ret); + return OMAF_ERROR_OPERATION; + } + // 3. generate rwpk structure for ERP/Cubemap + vector> rwpk = GenerateMergedRWPK(qualityRanking, packetLost, arrangeChanged); + if (rwpk.empty()) { + OMAF_LOG(LOG_ERROR, "Failed to generate merged rwpk!\n"); + return OMAF_ERROR_GENERATE_RWPK; + } + // 4. init mergedData and realSize with headers + std::map::iterator itPacket; + std::vector needAccumPacketSize(layOut.size(), 0); + for (uint32_t index = 0; index < layOut.size(); index++) { + uint32_t width = layOut[index]->mergedWidth; + uint32_t height = layOut[index]->mergedHeight; + uint32_t initWidth = initLayOut.size() < index + 1 ? 0 : initLayOut[index]->mergedWidth; + uint32_t initHeight = initLayOut.size() < index + 1 ? 0 : initLayOut[index]->mergedHeight; + MediaPacket *mergedPacket = new MediaPacket(); + uint32_t packetSize = ((width * height * 3) / 2) / 2; + mergedPacket->ReAllocatePacket(packetSize); + mergedPacket->SetRwpk(std::move(rwpk[index])); + char *mergedData = mergedPacket->Payload(); + uint64_t realSize = 0; + if (ERROR_NONE != InitMergedDataAndRealSize(qualityRanking, packets, mergedData, &realSize, index, layOut[index])) { + SAFE_DELETE(mergedPacket); + OMAF_LOG(LOG_ERROR, "Failed to calculated mergedData and realSize!\n"); + return OMAF_ERROR_OPERATION; + } + // 5. set params for merged packets + TilesMergeArrangement *arrange = nullptr; + if (m_updatedTilesMergeArr.empty()) + arrange = m_initTilesMergeArr[qualityRanking][index]; + else + arrange = m_updatedTilesMergeArr[qualityRanking][index]; + + if (!arrange) { + SAFE_DELETE(mergedPacket); + return OMAF_ERROR_NULL_PTR; + } + + uint8_t tileColsNum = arrange->tilesLayout.tileColsNum; + uint8_t tileRowsNum = arrange->tilesLayout.tileRowsNum; + uint32_t packetSizeForOneLayout = tileColsNum * tileRowsNum; + needAccumPacketSize[index] = index == 0 ? packetSizeForOneLayout : packetSizeForOneLayout + needAccumPacketSize[index - 1]; + itPacket = packets.begin(); + if (itPacket == packets.end()) + { + OMAF_LOG(LOG_ERROR, "Packet map is empty!\n"); + SAFE_DELETE(mergedPacket); + return OMAF_ERROR_INVALID_DATA; + } + MediaPacket *firstPacket = itPacket->second; + mergedPacket->SetVideoID(static_cast(qualityRanking) - 1); + mergedPacket->SetCodecType(firstPacket->GetCodecType()); + mergedPacket->SetPTS(firstPacket->GetPTS()); + mergedPacket->SetSegID(firstPacket->GetSegID()); + mergedPacket->SetQualityRanking(qualityRanking); + mergedPacket->SetVideoWidth((arrangeChanged ? width : initWidth)); + mergedPacket->SetVideoHeight((arrangeChanged ? height : initHeight)); + mergedPacket->SetQualityNum(1); + SourceResolution resolution; + resolution.qualityRanking = qualityRanking; + resolution.top = 0; + resolution.left = 0; + resolution.width = (arrangeChanged ? width : initWidth); + resolution.height = (arrangeChanged ? height : initHeight); + mergedPacket->SetSourceResolution(0, resolution); + mergedPacket->SetEOS(firstPacket->GetEOS()); + + if (ERROR_NONE != UpdateMergedDataAndRealSize( + qualityRanking, packets, tileColsNum, + arrangeChanged, width, height, initWidth, + initHeight, mergedData, &realSize, index, + needAccumPacketSize, layOut.size())) { + OMAF_LOG(LOG_ERROR, "Failed to update mergedData and realSize!\n"); + SAFE_DELETE(mergedPacket); + return OMAF_ERROR_OPERATION; + } + + mergedPacket->SetRealSize(realSize); + + m_outMergedStream.push_back(mergedPacket); + } + } // each quality ranking + + if (isArrChanged) { + if (ERROR_NONE != UpdateInitTilesMergeArr()) { + OMAF_LOG(LOG_ERROR, "Failed to update init tiles merge arrangement!\n"); + return OMAF_ERROR_OPERATION; + } + } + + return ERROR_NONE; +} + +std::list OmafTilesStitch::GetTilesMergedPackets() { + this->GenerateOutputMergedPackets(); + return m_outMergedStream; +} + +VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafTilesStitch.h b/src/OmafDashAccess/OmafTilesStitch.h new file mode 100644 index 00000000..3d0b21b7 --- /dev/null +++ b/src/OmafDashAccess/OmafTilesStitch.h @@ -0,0 +1,323 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! + +//! \file: OmafTilesStitch.h +//! \brief: the class for tiles merge +//! \detail: define the operation and related data for tiles merge. +//! +//! Created on May 22, 2020, 1:18 PM +//! + +#ifndef OMAFTILESSTITCH_H +#define OMAFTILESSTITCH_H + +#include "MediaPacket.h" +#include "general.h" + +#include + +VCD_OMAF_BEGIN + +#define LCU_SIZE 64 +#define HEVC_STARTCODES_LEN 4 +#define HEVC_NALUHEADER_LEN 2 + +// map of > +typedef std::map> PacketsMap; + +//! +//! \sturct: TilesMergeArrangement +//! \brief: tiles merge layout information, including merged resolution, +//! tiles layout and so on +//! +typedef struct TilesMergeArrangement { + uint32_t mergedWidth; + uint32_t mergedHeight; + int32_t mostTopPos; + int32_t mostLeftPos; + TileArrangement tilesLayout; +} TilesMergeArrangement; + +//! +//! \class OmafTilesStitch +//! \brief The class for tiles stitching +//! + +class OmafTilesStitch { + public: + //! + //! \brief Constructor + //! + OmafTilesStitch(); + + //! + //! \brief Destructor + //! + virtual ~OmafTilesStitch(); + + public: + //! + //! \brief Initialize stitching class + //! + //! \param [in] firstFramePackets + //! the first set of media packets for selected tiles + //! \param [in] needParams + //! denote whether VPS/SPS/PPS need to be added into merged packet + //! \param [in] projFmt + //! denote the projectin format of input source where tiles come + //! from + //! \param [in] allSources + //! map of to denote all video + //! sources + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t Initialize(std::map &firstFramePackets, bool needParams, + VCD::OMAF::ProjectionFormat projFmt, std::map allSources); + + //! + //! \brief Update the set of media packets of selected tiles for next frame + //! + //! \param [in] currPackets + //! the input new set of media packets of selected tiles for + //! next frame + //! \param [in] needParams + //! denote whether VPS/SPS/PPS need to be added into merged packet + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t UpdateSelectedTiles(std::map &currPackets, bool needParams); + + //! + //! \brief Get media packets for merged tiles for current frame + //! + //! \return std::list + //! the list of media packets for merged tiles for current + //! frame + //! + std::list GetTilesMergedPackets(); + + //! + //! \brief Get whether stitch class instance has been initialized + //! + //! \return bool + //! whether stitch class instance has been initialized + //! + bool IsInitialized() { return m_isInitialized; }; + + void SetMaxStitchResolution(uint32_t width, uint32_t height) { m_maxStitchWidth = width; m_maxStitchHeight = height; }; + + private: + //! + //! \brief Parse the VPS/SPS/PPS information + //! + //! \param [in] tilePacket + //! the media packet for one selected tile from highest + //! quality video frame which contains VPS/SPS/PPS + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t ParseVideoHeader(MediaPacket *tilePacket); + + //! + //! \brief Calculate tiles merge layout for selected tiles + //! + //! \return std::map + //! the map of tiles merge layout information for different + //! tiles sets of different quality ranking + //! + std::map> CalculateTilesMergeArrangement(); + + //! + //! \brief Calculate region wise packing information for + //! tiles set with specified quality ranking when + //! tiles come from ERP projection + //! + //! \param [in] qualityRanking + //! the quality ranking information for the tiles + //! set needed to be calculate region wise packing + //! \param [in] hasPacketLost + //! denote whether media packet is lost in packets + //! set + //! \param [in] hasLayoutChanged + //! denote whether current tiles merge layout has + //! changed compared to previous layout + //! + //! \return RegionWisePacking* + //! the pointer to the calculated region wise packing + //! information + //! + vector> CalculateMergedRwpkForERP(QualityRank qualityRanking, bool hasPacketLost, + bool hasLayoutChanged); + + //! + //! \brief Calculate region wise packing information for + //! tiles set with specified quality ranking when + //! tiles come from CubeMap projection + //! + //! \param [in] qualityRanking + //! the quality ranking information for the tiles + //! set needed to be calculate region wise packing + //! \param [in] hasPacketLost + //! denote whether media packet is lost in packets + //! set + //! \param [in] hasLayoutChanged + //! denote whether current tiles merge layout has + //! changed compared to previous layout + //! + //! \return RegionWisePacking* + //! the pointer to the calculated region wise packing + //! information + //! + vector> CalculateMergedRwpkForCubeMap(QualityRank qualityRanking, bool hasPacketLost, + bool hasLayoutChanged); + + //! + //! \brief Calculate region wise packing information for + //! tiles set with specified quality ranking when + //! tiles come from planar projection + //! + //! \param [in] qualityRanking + //! the quality ranking information for the tiles + //! set needed to be calculate region wise packing + //! \param [in] hasPacketLost + //! denote whether media packet is lost in packets + //! set + //! \param [in] hasLayoutChanged + //! denote whether current tiles merge layout has + //! changed compared to previous layout + //! + //! \return RegionWisePacking* + //! the pointer to the calculated region wise packing + //! information + //! + vector> CalculateMergedRwpkForPlanar(QualityRank qualityRanking, bool hasPacketLost, + bool hasLayoutChanged); + + //! \brief Generate tiles merge layout information + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GenerateTilesMergeArrangement(); + + //! + //! \brief Generate the output media packet after tiles merge + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GenerateOutputMergedPackets(); + +private: + + OmafTilesStitch& operator=(const OmafTilesStitch& other) { return *this; }; + OmafTilesStitch(const OmafTilesStitch& other) { /* do not create copies */ }; + + int32_t IsArrChanged(QualityRank qualityRanking, vector layOut, vector initLayOut, bool *isArrChanged, bool *packetLost, bool *arrangeChanged); + + int32_t GenerateMergedVideoHeaders(bool arrangeChanged, QualityRank qualityRanking, vector layOut, vector initLayOut, std::map packets); + + vector> GenerateMergedRWPK(QualityRank qualityRanking, bool packetLost, bool arrangeChanged); + + int32_t UpdateMergedVideoHeadersForLowQualityRank(bool isEmptyHeaders, std::map packets, QualityRank qualityRanking, TilesMergeArrangement *layOut); + + int32_t InitMergedDataAndRealSize(QualityRank qualityRanking, std::map packets, char* mergedData, uint64_t* realSize, uint32_t index, TilesMergeArrangement *tilesArr); + + int32_t UpdateMergedDataAndRealSize( + QualityRank qualityRanking, std::map packets, + uint8_t tileColsNum, bool arrangeChanged, uint32_t width, uint32_t height, + uint32_t initWidth, uint32_t initHeight, char *mergedData, uint64_t *realSize, + uint32_t index, vector needPacketSize, uint64_t layoutNum); + + int32_t UpdateInitTilesMergeArr(); + + vector> GenerateRowAndColArr(uint32_t packetsSize, uint32_t splitNum, uint32_t maxTile_x, uint32_t maxTile_y); + + private: + bool m_isInitialized; //> + + std::set m_allQualities; //> + m_initTilesMergeArr; //> + m_updatedTilesMergeArr; //>> + m_mergedVideoHeaders; //> + + uint32_t m_fullWidth; // + m_outMergedStream; // m_sources; //all video source information corresponding to different quality ranking +}; + +VCD_OMAF_END; + +#endif /* OMAFTILESSTITCH_H */ diff --git a/src/OmafDashAccess/OmafTracksSelector.cpp b/src/OmafDashAccess/OmafTracksSelector.cpp new file mode 100644 index 00000000..c9f1c016 --- /dev/null +++ b/src/OmafDashAccess/OmafTracksSelector.cpp @@ -0,0 +1,316 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ + +//! +//! \file: OmafTracksSelector.cpp +//! \brief: Tracks selector base class implementation +//! +//! Created on May 28, 2019, 1:19 PM +//! + +#include "OmafTracksSelector.h" + +VCD_OMAF_BEGIN + +OmafTracksSelector::OmafTracksSelector(int size) { + mSize = size; + m360ViewPortHandle = nullptr; + mParamViewport = nullptr; + mPose = nullptr; + mUsePrediction = false; + mPredictPluginName = ""; + mLibPath = ""; + mProjFmt = ProjectionFormat::PF_ERP; + mSegmentDur = 0; + mQualityRanksNum = 0; + memset_s(&(mI360ScvpPlugin), sizeof(PluginDef), 0); +} + +OmafTracksSelector::~OmafTracksSelector() { + if (m360ViewPortHandle) { + I360SCVP_unInit(m360ViewPortHandle); + m360ViewPortHandle = nullptr; + } + + if (mParamViewport) + { + if (mParamViewport->pStreamInfo) + { + delete [] (mParamViewport->pStreamInfo); + mParamViewport->pStreamInfo = nullptr; + } + } + SAFE_DELETE(mParamViewport); + + if (mPoseHistory.size()) { + //for (auto pose : mPoseHistory) { + // SAFE_DELETE(pose); + //} + std::list::iterator itPose; + for (itPose = mPoseHistory.begin(); itPose != mPoseHistory.end(); ) + { + HeadPose *onePose = *itPose; + SAFE_DELETE(onePose); + mPoseHistory.erase(itPose++); + } + + mPoseHistory.clear(); + } + if (mUsePrediction && !mPredictPluginMap.empty()) + { + ViewportPredictPlugin *plugin = mPredictPluginMap.at(mPredictPluginName); + if (plugin != nullptr) plugin->Destroy(); + } + + if (mPredictPluginMap.size()) { + for (auto &p : mPredictPluginMap) { + SAFE_DELETE(p.second); + } + + mPredictPluginMap.clear(); + } + + mUsePrediction = false; + + SAFE_DELETE(mPose); + + mTwoDQualityInfos.clear(); +} + +int OmafTracksSelector::SetInitialViewport(std::vector &pView, HeadSetInfo *headSetInfo, + OmafMediaStream *pStream) { + if (mProjFmt != ProjectionFormat::PF_PLANAR) + { + if (!headSetInfo || !headSetInfo->viewPort_hFOV || !headSetInfo->viewPort_vFOV || !headSetInfo->viewPort_Width || + !headSetInfo->viewPort_Height) { + return ERROR_INVALID; + } + } + else + { + if (!headSetInfo || !headSetInfo->viewPort_Width || !headSetInfo->viewPort_Height || !(mTwoDQualityInfos.size())) { + return ERROR_INVALID; + } + } + + mParamViewport = new param_360SCVP; + mParamViewport->usedType = E_VIEWPORT_ONLY; + mParamViewport->logFunction = (void*)logCallBack; + mParamViewport->pStreamInfo = NULL; + if (mProjFmt == ProjectionFormat::PF_ERP) { + mParamViewport->paramViewPort.viewportWidth = headSetInfo->viewPort_Width; + mParamViewport->paramViewPort.viewportHeight = headSetInfo->viewPort_Height; + mParamViewport->paramViewPort.viewPortPitch = headSetInfo->pose->pitch; + mParamViewport->paramViewPort.viewPortYaw = headSetInfo->pose->yaw; + mParamViewport->paramViewPort.viewPortFOVH = headSetInfo->viewPort_hFOV; + mParamViewport->paramViewPort.viewPortFOVV = headSetInfo->viewPort_vFOV; + mParamViewport->paramViewPort.geoTypeInput = + (EGeometryType)(mProjFmt); //(EGeometryType)headSetInfo->input_geoType; + mParamViewport->paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; //(EGeometryType)headSetInfo->output_geoType; + mParamViewport->paramViewPort.tileNumRow = pStream->GetRowSize(); + mParamViewport->paramViewPort.tileNumCol = pStream->GetColSize(); + mParamViewport->paramViewPort.usageType = E_VIEWPORT_ONLY; + mParamViewport->paramViewPort.faceWidth = pStream->GetStreamHighResWidth(); + mParamViewport->paramViewPort.faceHeight = pStream->GetStreamHighResHeight(); + mParamViewport->paramViewPort.paramVideoFP.cols = 1; + mParamViewport->paramViewPort.paramVideoFP.rows = 1; + mParamViewport->paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + mParamViewport->paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + } else if (mProjFmt == ProjectionFormat::PF_CUBEMAP) { + mParamViewport->paramViewPort.viewportWidth = headSetInfo->viewPort_Width; + mParamViewport->paramViewPort.viewportHeight = headSetInfo->viewPort_Height; + mParamViewport->paramViewPort.viewPortPitch = headSetInfo->pose->pitch; + mParamViewport->paramViewPort.viewPortYaw = headSetInfo->pose->yaw; + mParamViewport->paramViewPort.viewPortFOVH = headSetInfo->viewPort_hFOV; + mParamViewport->paramViewPort.viewPortFOVV = headSetInfo->viewPort_vFOV; + mParamViewport->paramViewPort.geoTypeInput = + (EGeometryType)(mProjFmt); //(EGeometryType)headSetInfo->input_geoType; + mParamViewport->paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; //(EGeometryType)headSetInfo->output_geoType; + mParamViewport->paramViewPort.tileNumRow = pStream->GetRowSize() / 2; + mParamViewport->paramViewPort.tileNumCol = pStream->GetColSize() / 3; + mParamViewport->paramViewPort.usageType = E_VIEWPORT_ONLY; + mParamViewport->paramViewPort.faceWidth = pStream->GetStreamHighResWidth() / 3; + mParamViewport->paramViewPort.faceHeight = pStream->GetStreamHighResHeight() / 2; + + mParamViewport->paramViewPort.paramVideoFP.cols = 3; + mParamViewport->paramViewPort.paramVideoFP.rows = 2; + mParamViewport->paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + mParamViewport->paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + mParamViewport->paramViewPort.paramVideoFP.faces[0][0].faceWidth = mParamViewport->paramViewPort.faceWidth; + mParamViewport->paramViewPort.paramVideoFP.faces[0][0].faceHeight = mParamViewport->paramViewPort.faceHeight; + mParamViewport->paramViewPort.paramVideoFP.faces[0][1].idFace = 1; + mParamViewport->paramViewPort.paramVideoFP.faces[0][1].rotFace = NO_TRANSFORM; + mParamViewport->paramViewPort.paramVideoFP.faces[0][2].idFace = 2; + mParamViewport->paramViewPort.paramVideoFP.faces[0][2].rotFace = NO_TRANSFORM; + mParamViewport->paramViewPort.paramVideoFP.faces[1][0].idFace = 3; + mParamViewport->paramViewPort.paramVideoFP.faces[1][0].rotFace = NO_TRANSFORM; + mParamViewport->paramViewPort.paramVideoFP.faces[1][1].idFace = 4; + mParamViewport->paramViewPort.paramVideoFP.faces[1][1].rotFace = NO_TRANSFORM; + mParamViewport->paramViewPort.paramVideoFP.faces[1][2].idFace = 5; + mParamViewport->paramViewPort.paramVideoFP.faces[1][2].rotFace = NO_TRANSFORM; + } else if (mProjFmt == ProjectionFormat::PF_PLANAR) { + mParamViewport->paramViewPort.viewportWidth = headSetInfo->viewPort_Width; + mParamViewport->paramViewPort.viewportHeight = headSetInfo->viewPort_Height; + mParamViewport->paramViewPort.geoTypeInput = E_SVIDEO_PLANAR; + mParamViewport->paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; + mParamViewport->paramViewPort.usageType = E_VIEWPORT_ONLY; + mParamViewport->sourceResolutionNum = mTwoDQualityInfos.size(); + mParamViewport->accessInterval = (float)(mSegmentDur); + mParamViewport->pStreamInfo = new Stream_Info[mParamViewport->sourceResolutionNum]; + if (!(mParamViewport->pStreamInfo)) + return ERROR_NULL_PTR; + + uint8_t strIdx = 0; + map::iterator itQua; + for(itQua = mTwoDQualityInfos.begin(); itQua != mTwoDQualityInfos.end(); itQua++) + { + TwoDQualityInfo oneQuality = itQua->second; + if (oneQuality.quality_ranking == 1) + { + mParamViewport->paramViewPort.faceWidth = oneQuality.orig_width; + mParamViewport->paramViewPort.faceHeight = oneQuality.orig_height; + } + mParamViewport->pStreamInfo[strIdx].FrameWidth = oneQuality.orig_width; + mParamViewport->pStreamInfo[strIdx].FrameHeight = oneQuality.orig_height; + mParamViewport->pStreamInfo[strIdx].TileWidth = oneQuality.region_width; + mParamViewport->pStreamInfo[strIdx].TileHeight = oneQuality.region_height; + + OMAF_LOG(LOG_INFO, "Planar video %d\n", strIdx); + OMAF_LOG(LOG_INFO, "Width %d, Height %d\n", mParamViewport->pStreamInfo[strIdx].FrameWidth, mParamViewport->pStreamInfo[strIdx].FrameHeight); + OMAF_LOG(LOG_INFO, "Tile Width %d, Tile Height %d\n", mParamViewport->pStreamInfo[strIdx].TileWidth, mParamViewport->pStreamInfo[strIdx].TileHeight); + + mTwoDStreamQualityMap.insert(std::make_pair(strIdx, oneQuality.quality_ranking)); + OMAF_LOG(LOG_INFO, "Insert one pair of stream index %d and its corresponding quality rankding %d\n", strIdx, oneQuality.quality_ranking); + + strIdx++; + } + + mParamViewport->paramViewPort.paramVideoFP.cols = 1; + mParamViewport->paramViewPort.paramVideoFP.rows = 1; + + mParamViewport->pluginDef = mI360ScvpPlugin; + if (mParamViewport->pluginDef.pluginLibPath == NULL) + { + OMAF_LOG(LOG_ERROR, "No assigned 360SCVP Plugin for planar video tiles selection, exit !\n"); + return ERROR_NULL_PTR; + } + OMAF_LOG(LOG_INFO, "Used 360SCVP Plugin is %s\n", mParamViewport->pluginDef.pluginLibPath); + } + + m360ViewPortHandle = I360SCVP_Init(mParamViewport); + if (!m360ViewPortHandle) return ERROR_NULL_PTR; + + // set current Pose; + mPose = new HeadPose; + if (!mPose) return ERROR_NULL_PTR; + + memcpy_s(mPose, sizeof(HeadPose), headSetInfo->pose, sizeof(HeadPose)); + + return UpdateViewport(mPose); +} + +int OmafTracksSelector::UpdateViewport(HeadPose *pose) { + if (!pose) return ERROR_NULL_PTR; + + std::lock_guard lock(mMutex); + HeadPose* input_pose = new HeadPose; + + if (!(input_pose)) return ERROR_NULL_PTR; + memcpy_s(input_pose, sizeof(HeadPose), pose, sizeof(HeadPose)); + + mPoseHistory.push_front(input_pose); + if (mPoseHistory.size() > (uint32_t)(this->mSize)) { + auto pit = mPoseHistory.back(); + SAFE_DELETE(pit); + mPoseHistory.pop_back(); + } + + // if using viewport prediction, set real time viewports. + if (mUsePrediction && !mPredictPluginMap.empty()) + { + ViewportPredictPlugin *plugin = mPredictPluginMap.at(mPredictPluginName); + ViewportAngle *angle = new ViewportAngle; + angle->yaw = pose->yaw; + angle->pitch = pose->pitch; + angle->pts = pose->pts; + plugin->SetViewport(angle); + } + + return ERROR_NONE; +} + +int OmafTracksSelector::EnablePosePrediction(std::string predictPluginName, std::string libPath, bool enableExtractor) { + mUsePrediction = true; + mPredictPluginName.assign(predictPluginName); + mLibPath.assign(libPath); + // 1. load plugin + int ret = InitializePredictPlugins(); + if (ret != ERROR_NONE) + { + OMAF_LOG(LOG_ERROR, "Failed in loading predict plugin\n"); + return ret; + } + // 2. initial plugin + ViewportPredictPlugin *plugin = mPredictPluginMap.at(mPredictPluginName); + PredictOption option; + option.usingFeedbackAngleAdjust = true; + if (enableExtractor){ + option.mode = PredictionMode::SingleViewpoint; + } + else{ + option.mode = PredictionMode::MultiViewpoints; + } + ret = plugin->Intialize(option); + if (ret != ERROR_NONE) + { + OMAF_LOG(LOG_ERROR, "Failed in initializing predict plugin\n"); + return ret; + } + return ERROR_NONE; +} + +int OmafTracksSelector::InitializePredictPlugins() { + if (mLibPath.empty() || mPredictPluginName.empty()) { + OMAF_LOG(LOG_ERROR, "Viewport predict plugin path OR name is invalid!\n"); + return ERROR_INVALID; + } + ViewportPredictPlugin *plugin = new ViewportPredictPlugin(); + if (!plugin) return ERROR_NULL_PTR; + + std::string pluginPath = mLibPath + mPredictPluginName; + int ret = plugin->LoadPlugin(pluginPath.c_str()); + if (ret != ERROR_NONE) { + OMAF_LOG(LOG_ERROR, "Load plugin failed!\n"); + SAFE_DELETE(plugin); + return ret; + } + mPredictPluginMap.insert(std::pair(mPredictPluginName, plugin)); + return ERROR_NONE; +} + +VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafTracksSelector.h b/src/OmafDashAccess/OmafTracksSelector.h new file mode 100644 index 00000000..ffcde1b3 --- /dev/null +++ b/src/OmafDashAccess/OmafTracksSelector.h @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ +//! +//! \file: OmafTracksSelector.h +//! \brief: Tracks selector base class definition +//! \detail: Define the operation of tracks selector base class based on viewport +//! Created on May 28, 2019, 1:19 PM +//! + +#ifndef OMAFTRACKSSELECTOR_H +#define OMAFTRACKSSELECTOR_H + +#include "360SCVPViewportAPI.h" +#include "OmafMediaStream.h" +#include "OmafViewportPredict/ViewportPredictPlugin.h" +#include "general.h" +#include + +using namespace VCD::OMAF; + +VCD_OMAF_BEGIN + +#define POSE_SIZE 10 + +constexpr uint64_t ptsInterval[4] = {0, 8, 15, 23}; // 30 38 45 53 + +typedef struct POSEINFO { + HeadPose *pose; + uint64_t time; +} PoseInfo; + +class OmafTracksSelector { + public: + //! + //! \brief construct + //! + OmafTracksSelector(int size = POSE_SIZE); + + //! + //! \brief de-construct + //! + virtual ~OmafTracksSelector(); + + //! + //! \brief Select tracks for the stream based on the latest pose. each time + //! the selector will select tracks based on the latest pose. the + //! information stored in mPoseHistory can be used for prediction for + //! further movement + //! + virtual int SelectTracks(OmafMediaStream *pStream) = 0; + + //! + //! \brief Set Init viewport + //! + int SetInitialViewport(std::vector &pView, HeadSetInfo *headSetInfo, OmafMediaStream *pStream); + + //! + //! \brief Update Viewport; each time pose update will be recorded, but only + //! the latest will be used when SelectTracks is called. + //! + int UpdateViewport(HeadPose *pose); + + //! + //! \brief Load viewport prediction plugin + //! + int EnablePosePrediction(std::string predictPluginName, std::string libPath, bool enableExtractor); + + //! + //! \brief Get the priority of the segment + //! + // virtual int GetSegmentPriority(OmafSegment *segment) = 0; + + void SetProjectionFmt(ProjectionFormat projFmt) { mProjFmt = projFmt; }; + + //! + //! \brief Set total video qualities number + //! + void SetVideoQualityRanksNum(uint32_t qualitiesNum) { mQualityRanksNum = qualitiesNum; }; + + //! + //! \brief Set map for all planar video sources + //! + void SetTwoDQualityInfos(std::map twoDQualities) { mTwoDQualityInfos = std::move(twoDQualities); }; + + //! + //! \brief Set segment duration in microsecond + //! + void SetSegmentDuration(uint32_t segDur) { mSegmentDur = segDur; }; + + //! + //! \brief Set 360SCVP library plugin + //! + void SetI360SCVPPlugin(PluginDef i360scvp_plugin) + { + mI360ScvpPlugin.pluginType = i360scvp_plugin.pluginType; + mI360ScvpPlugin.pluginFormat = i360scvp_plugin.pluginFormat; + mI360ScvpPlugin.pluginLibPath = i360scvp_plugin.pluginLibPath; + }; + +private: + OmafTracksSelector& operator=(const OmafTracksSelector& other) { return *this; }; + OmafTracksSelector(const OmafTracksSelector& other) { /* do not create copies */ }; + + private: + //! + //! \brief Initialize viewport prediction plugin + //! + int InitializePredictPlugins(); + + protected: + std::list mPoseHistory; + int mSize; + std::mutex mMutex; + HeadPose *mPose; + void *m360ViewPortHandle; + param_360SCVP *mParamViewport; + bool mUsePrediction; + std::string mPredictPluginName; + std::string mLibPath; + std::map mPredictPluginMap; + ProjectionFormat mProjFmt; + uint32_t mQualityRanksNum; + map mTwoDQualityInfos; + map mTwoDStreamQualityMap; + uint32_t mSegmentDur; + PluginDef mI360ScvpPlugin; +}; + +VCD_OMAF_END; + +#endif /* OMAFTRACKSSELECTOR_H */ diff --git a/src/OmafDashAccess/OmafTypes.h b/src/OmafDashAccess/OmafTypes.h new file mode 100644 index 00000000..110eefc5 --- /dev/null +++ b/src/OmafDashAccess/OmafTypes.h @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + * + */ + +//! +//! \file: OmafTypes.h +//! \brief: +//! \detail: +//! Created on May 20, 2020, 10:07 AM +//! +#ifndef OMAF_TYPES_H +#define OMAF_TYPES_H + +#include +#include +extern "C" { +#include "safestringlib/safe_mem_lib.h" +} + +namespace VCD { +namespace OMAF { + +const long DEFAULT_MAX_PARALLEL_TRANSFERS = 20; +const int32_t DEFAULT_SEGMENT_OPEN_TIMEOUT = 3000; + +class OmafDashHttpProxy { + public: + std::string http_proxy_; + std::string https_proxy_; + std::string no_proxy_; + std::string proxy_user_; + std::string proxy_passwd_; + std::string to_string() { + std::stringstream ss; + ss << "http proxy: {" << std::endl; + ss << "\thttp proxy: " << http_proxy_ << ", " << std::endl; + ss << "\thttps proxy: " << https_proxy_ << ", " << std::endl; + ss << "\tno proxy: " << no_proxy_ << ", " << std::endl; + ss << "\tproxy user: " << proxy_user_ << "" << std::endl; + ss << "}"; + return ss.str(); + } +}; + +class OmafDashHttpParams { + public: + long conn_timeout_ = -1; + long total_timeout_ = -1; + + int32_t retry_times_ = -1; + bool bssl_verify_peer_ = false; + bool bssl_verify_host_ = false; + std::string to_string() { + std::stringstream ss; + ss << "http params: {" << std::endl; + ss << "\tconnection timeout: " << conn_timeout_ << ", " << std::endl; + ss << "\ttotal timeout: " << total_timeout_ << ", " << std::endl; + ss << "\tretry times: " << retry_times_ << "" << std::endl; + ss << "\tssl verify peer state: " << bssl_verify_peer_ << "" << std::endl; + ss << "\tssl verify host state: " << bssl_verify_host_ << "" << std::endl; + ss << "}"; + return ss.str(); + } +}; + +class OmafDashStatisticsParams { + public: + int32_t window_size_ms_ = 10000; // 10s + bool enable_ = false; + std::string to_string() { + std::stringstream ss; + ss << "dash statistics params: {" << std::endl; + ss << "\tstate: " << enable_ << std::endl; + ss << "\twindow size: " << window_size_ms_ << " ms," << std::endl; + ss << "}" << std::endl; + return ss.str(); + } +}; + +struct _omafDashSynchronizerParams { + int32_t segment_range_size_ = 20; + bool enable_ = false; + std::string to_string() { + std::stringstream ss; + ss << "dash segment syncer params: {" << std::endl; + ss << "\tstate: " << enable_ << std::endl; + ss << "\tsegment window size: " << segment_range_size_ << std::endl; + ss << "}" << std::endl; + return ss.str(); + } +}; +using OmafDashSynchronizerParams = struct _omafDashSynchronizerParams; + +struct _omafDashPredictorParams { + std::string name_; + std::string libpath_; + bool enable_ = false; + std::string to_string() { + std::stringstream ss; + ss << "dash statistics params: {" << std::endl; + ss << "\tstate: " << enable_ << std::endl; + ss << "\tname: " << name_ << std::endl; + ss << "\tlib path: " << libpath_ << std::endl; + ss << "}" << std::endl; + return ss.str(); + } +}; +using OmafDashPredictorParams = struct _omafDashPredictorParams; + +class OmafDashParams { + public: + public: + // for download + OmafDashHttpProxy http_proxy_; + OmafDashHttpParams http_params_; + OmafDashStatisticsParams stats_params_; + OmafDashSynchronizerParams syncer_params_; + OmafDashPredictorParams prediector_params_; + long max_parallel_transfers_ = DEFAULT_MAX_PARALLEL_TRANSFERS; + int32_t segment_open_timeout_ms_ = DEFAULT_SEGMENT_OPEN_TIMEOUT; + // for stitch + uint32_t max_decode_width_; + uint32_t max_decode_height_; + + std::string to_string() { + std::stringstream ss; + ss << http_proxy_.to_string(); + ss << http_params_.to_string(); + ss << "\tmax parallel transfers: " << max_parallel_transfers_ << ", " << std::endl; + ss << stats_params_.to_string(); + ss << syncer_params_.to_string(); + ss << prediector_params_.to_string(); + return ss.str(); + } +}; + +enum class TaskPriority { + HIGH = 0, + NORMAL = 1, + LOW = 2, + END = 3, +}; + +inline std::string priority(TaskPriority p) { + switch (p) { + case TaskPriority::HIGH: + return "HIGH"; + case TaskPriority::NORMAL: + return "NORMAL"; + case TaskPriority::LOW: + return "LOW"; + default: + return "unknown"; + } +} + +class DashSegmentSourceParams { + public: + int64_t timeline_point_ = -1; + std::string dash_url_; // unique in the system + TaskPriority priority_ = TaskPriority::LOW; + std::string to_string() const noexcept { + std::stringstream ss; + ss << "url=" << dash_url_; + ss << ", priority=" << priority(priority_); + ss << ", timeline_point=" << timeline_point_; + return ss.str(); + } +}; + +} // namespace OMAF +} // namespace VCD + +#endif // !OMAF_TYPES_H diff --git a/src/OmafDashAccess/OmafViewportPredict/ViewportPredictPlugin.cpp b/src/OmafDashAccess/OmafViewportPredict/ViewportPredictPlugin.cpp new file mode 100644 index 00000000..5c9188f1 --- /dev/null +++ b/src/OmafDashAccess/OmafViewportPredict/ViewportPredictPlugin.cpp @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! + +//! \file: ViewportPredictPlugin.cpp +//! \brief: Defines structure and functions in viewport predict plugin. +//! \detail: +//! +//! Created on April 7, 2020, 2:39 PM +//! + +#include "ViewportPredictPlugin.h" +#include + +VCD_OMAF_BEGIN + +ViewportPredictPlugin::ViewportPredictPlugin() +{ + m_libHandler = NULL; + m_predictHandler = NULL; + m_predictFunc = NULL; + m_initFunc = NULL; + m_setViewportFunc = NULL; + m_destroyFunc = NULL; +} + +ViewportPredictPlugin::~ViewportPredictPlugin() +{ + if (m_libHandler) + { + dlclose(m_libHandler); + m_libHandler = NULL; + } +} + +int ViewportPredictPlugin::LoadPlugin(const char* lib_path) +{ + if (NULL == lib_path) + { + return ERROR_NULL_PTR; + } + m_libHandler = dlopen(lib_path, RTLD_LAZY); + if (!m_libHandler) + { + OMAF_LOG(LOG_ERROR,"failed to open predict library path!\n"); + return ERROR_NULL_PTR; + } + m_initFunc = (INIT_FUNC)dlsym(m_libHandler, "ViewportPredict_Init"); + if (dlerror() != NULL) + { + OMAF_LOG(LOG_ERROR,"failed to load ViewportPredict_Init func!\n"); + dlclose(m_libHandler); + return ERROR_INVALID; + } + m_setViewportFunc = (SETVIEWPORT_FUNC)dlsym(m_libHandler, "ViewportPredict_SetViewport"); + if (dlerror() != NULL) + { + OMAF_LOG(LOG_ERROR,"failed to load ViewportPredict_SetViewport func!\n"); + dlclose(m_libHandler); + return ERROR_INVALID; + } + m_predictFunc = (PREDICTPOSE_FUNC)dlsym(m_libHandler, "ViewportPredict_PredictPose"); + if (dlerror() != NULL) + { + OMAF_LOG(LOG_ERROR,"failed to load ViewportPredict_PredictPose func!\n"); + dlclose(m_libHandler); + return ERROR_INVALID; + } + m_destroyFunc = (DESTROY_FUNC)dlsym(m_libHandler, "ViewportPredict_unInit"); + if (dlerror() != NULL) + { + OMAF_LOG(LOG_ERROR,"failed to load ViewportPredict_unInit func!\n"); + dlclose(m_libHandler); + return ERROR_INVALID; + } + return ERROR_NONE; +} + +int ViewportPredictPlugin::Intialize(PredictOption option) +{ + Handler predict_handler = m_initFunc(option); + if (NULL == predict_handler) + { + OMAF_LOG(LOG_ERROR,"handler init failed!\n"); + return ERROR_NULL_PTR; + } + m_predictHandler = predict_handler; + return ERROR_NONE; +} + +int ViewportPredictPlugin::SetViewport(ViewportAngle *angle) +{ + if (angle == nullptr) + { + OMAF_LOG(LOG_ERROR, " Viewport angle is null!\n"); + return ERROR_NULL_PTR; + } + return m_setViewportFunc(m_predictHandler, angle); +} + +int ViewportPredictPlugin::Predict(uint64_t pre_first_pts, std::map& predict_viewport_list) +{ + return m_predictFunc(m_predictHandler, pre_first_pts, predict_viewport_list); +} + +int ViewportPredictPlugin::Destroy() +{ + return m_destroyFunc(m_predictHandler); +} + +VCD_OMAF_END diff --git a/src/OmafDashAccess/OmafViewportPredict/ViewportPredictPlugin.h b/src/OmafDashAccess/OmafViewportPredict/ViewportPredictPlugin.h new file mode 100644 index 00000000..02e59c5d --- /dev/null +++ b/src/OmafDashAccess/OmafViewportPredict/ViewportPredictPlugin.h @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + + */ + +//! + +//! \file: ViewportPredictPlugin.h +//! \brief: Defines structure and functions in viewport predict plugin. +//! \detail: +//! +//! Created on April 7, 2020, 2:39 PM +//! + +#ifndef VIEWPORTPREDICTPLUGIN_H +#define VIEWPORTPREDICTPLUGIN_H + +#include "../general.h" +#include +#include +#include + +VCD_OMAF_BEGIN + +typedef void* Handler; +// define function point of plugin interfaces +typedef Handler (*INIT_FUNC)(PredictOption); +typedef int32_t (*SETVIEWPORT_FUNC)(Handler, ViewportAngle*); +typedef int32_t (*PREDICTPOSE_FUNC)(Handler, uint64_t, std::map&); +typedef int32_t (*DESTROY_FUNC)(Handler); + +class ViewportPredictPlugin +{ +public: + //! + //! \brief construct + //! + ViewportPredictPlugin(); + //! + //! \brief de-construct + //! + ~ViewportPredictPlugin(); + //! \brief load viewport predict plugin by lib path + //! + //! \param [in] const char * + //! lib path + //! \return int + //! ERROR code + //! + int LoadPlugin(const char* lib_path); + //! \brief intialize plugin + //! + //! \param [in] PredictOption + //! predict option + //! + //! \return int + //! ERROR code + //! + int Intialize(PredictOption option); + //! \brief set original viewport angle + //! + //! \param [in] ViewportAngle* + //! original viewport angle + //! + int SetViewport(ViewportAngle *angle); + //! \brief viewport prediction process + //! + //! \param [in] uint64_t + //! first pts of predict angle + //! [in] std::map& + //! output predict angle list + //! \return int + //! ERROR code + //! + int Predict(uint64_t pre_first_pts, std::map& predict_viewport_list); + //! + //! \brief viewport prediction destroy function + //! + int Destroy(); + +private: + Handler m_libHandler; + Handler m_predictHandler; + INIT_FUNC m_initFunc; + SETVIEWPORT_FUNC m_setViewportFunc; + PREDICTPOSE_FUNC m_predictFunc; + DESTROY_FUNC m_destroyFunc; +}; + +VCD_OMAF_END; +#endif /* VIEWPORTPREDICTPLUGIN_H */ diff --git a/src/OmafDashAccess/common.h b/src/OmafDashAccess/common.h new file mode 100644 index 00000000..abf8ebd2 --- /dev/null +++ b/src/OmafDashAccess/common.h @@ -0,0 +1,78 @@ + +/* + * Copyright (c) 2018, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * File: common.h + * Author: media + * + * Created on 2020/04/22 + */ +#ifndef VCD_UTILS_COMMON_H +#define VCD_UTILS_COMMON_H +#include + +namespace VCD { +class NonCopyable { + protected: + NonCopyable() = default; + + // Non-moveable. + NonCopyable(NonCopyable &&) noexcept = delete; + NonCopyable &operator=(NonCopyable &&) noexcept = delete; + + // Non-copyable. + NonCopyable(const NonCopyable &) = delete; + NonCopyable &operator=(const NonCopyable &) = delete; +}; +} // namespace VCD + +// +// @template make_unique_vcaa +// @brief std11 not support this feature +// +template +std::unique_ptr make_unique_vcd(Args &&... args) { + return std::unique_ptr(new T(std::forward(args)...)); +} + +#ifndef UNUSED +#define UNUSED(prama) (void)prama +#endif // !UNUSED + +#ifndef IN +#define IN +#endif // !IN + +#ifndef OUT +#define OUT +#endif // !OUT + +#ifndef INOUT +#define INOUT +#endif // !INOUT + +#endif // !VCD_UTILS_COMMON_H \ No newline at end of file diff --git a/src/OmafDashAccess/general.h b/src/OmafDashAccess/general.h index b4ed6b01..85628d02 100644 --- a/src/OmafDashAccess/general.h +++ b/src/OmafDashAccess/general.h @@ -46,74 +46,94 @@ #include "../utils/data_type.h" #include "../utils/Threadable.h" #include "../utils/Singleton.h" +#include "OmafDashAccessLog.h" #include "glog/logging.h" #include "OmafStructure.h" -#define SAFE_DELETE(x) if(NULL != (x)) { delete (x); (x)=NULL; }; -#define SAFE_FREE(x) if(NULL != (x)) { free((x)); (x)=NULL; }; +#ifdef _ANDROID_NDK_OPTION_ +#include +#define LOG_TAG "OmafDashAccess" +#define ANDROID_LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) +#endif + +#define SAFE_DELETE(x) \ + if (NULL != (x)) { \ + delete (x); \ + (x) = NULL; \ + }; +#define SAFE_DELARRAY(x) \ + if (NULL != (x)) { \ + delete [] (x); \ + (x) = NULL; \ + }; +#define SAFE_FREE(x) \ + if (NULL != (x)) { \ + free((x)); \ + (x) = NULL; \ + }; VCD_OMAF_BEGIN -typedef struct FRACTIONAL{ - uint32_t num; - uint32_t den; -}Fractional; - -typedef struct VIDEOINFO{ - uint32_t width; - uint32_t height; - Fractional frame_Rate; - uint32_t bit_rate; - Fractional sar; -}VideoInfo; - -typedef struct AUDIOINFO{ - uint32_t channels; ///for audio - uint32_t channel_bytes; ///for audio - uint32_t sample_rate; ///for audio -}AudioInfo; +typedef struct SourceInfo { + QualityRank qualityRanking; + int32_t width; + int32_t height; +} SourceInfo; + +typedef struct FRACTIONAL { + uint32_t num; + uint32_t den; +} Fractional; + +typedef struct VIDEOINFO { + uint32_t width; + uint32_t height; + Fractional frame_Rate; + uint32_t bit_rate; + Fractional sar; +} VideoInfo; + +typedef struct AUDIOINFO { + uint32_t channels; /// for audio + uint32_t channel_bytes; /// for audio + uint32_t sample_rate; /// for audio +} AudioInfo; //! //! \brief function to parse string to data type //! -bool parse_bool ( const char * const attr ); -uint32_t parse_int ( const char * const attr ); -uint64_t parse_long_int ( const char * const attr ); -double parse_double ( const char * const attr ); -uint64_t parse_date ( const char * const attr ); -uint64_t parse_duration ( const char * const duration ); -uint32_t parse_duration_u32 ( const char* const duration ); -uint32_t sys_clock ( ); -uint64_t sys_clock_high_res ( ); +bool parse_bool(const char* const attr); +uint32_t parse_int(const char* const attr); +uint64_t parse_long_int(const char* const attr); +double parse_double(const char* const attr); +uint64_t parse_date(const char* const attr); +uint64_t parse_duration(const char* const duration); +uint32_t parse_duration_u32(const char* const duration); +uint32_t sys_clock(); +uint64_t sys_clock_high_res(); //! //! \brief function to deal with time //! -time_t mktime_utc ( struct tm *tm ); -int32_t net_get_timezone ( ); -int32_t net_get_ntp_diff_ms ( uint64_t ntp ); -uint64_t net_get_ntp_ts ( ); -void net_get_ntp ( uint32_t *sec, uint32_t *frac ); -uint64_t net_get_utc ( ); -void net_set_ntp_shift ( int32_t shift ); -uint64_t net_parse_date ( const char *val ); +time_t mktime_utc(struct tm* tm); +int32_t net_get_timezone(); +int32_t net_get_ntp_diff_ms(uint64_t ntp); +uint64_t net_get_ntp_ts(); +void net_get_ntp(uint32_t* sec, uint32_t* frac); +uint64_t net_get_utc(); +void net_set_ntp_shift(int32_t shift); +uint64_t net_parse_date(const char* val); //! //! \brief function to deal with string //! -void SplitString ( const std::string& s, - std::vector& v, - const std::string& c ); -std::string GetSubstr ( std::string str, - char sep, - bool bBefore ); -char* strlwr ( char *s ); +void SplitString(const std::string& s, std::vector& v, const std::string& c); +std::string GetSubstr(std::string str, char sep, bool bBefore); +char* strlwr(char* s); -std::string PathSplice ( std::string basePath, - std::string appendedPath); +std::string PathSplice(std::string basePath, std::string appendedPath); int32_t StringToInt(string str); VCD_OMAF_END; #endif /* GENERAL_H */ - diff --git a/src/OmafDashAccess/iso_structure.h b/src/OmafDashAccess/iso_structure.h index ca7f62cd..db72dc17 100644 --- a/src/OmafDashAccess/iso_structure.h +++ b/src/OmafDashAccess/iso_structure.h @@ -34,411 +34,78 @@ #ifndef ISO_STRUCTURE_H #define ISO_STRUCTURE_H +#include "../isolib/dash_parser/Mp4DataTypes.h" +#include "../isolib/include/Common.h" #include "general.h" VCD_OMAF_BEGIN -struct FourCC -{ - char symbol[5]; - inline FourCC() - : symbol{} - { - } - - inline FourCC(uint32_t sym) - { - symbol[0] = char((sym >> 24) & 0xff); - symbol[1] = char((sym >> 16) & 0xff); - symbol[2] = char((sym >> 8) & 0xff); - symbol[3] = char((sym >> 0) & 0xff); - symbol[4] = '\0'; - } - inline FourCC(const char* string) - { - symbol[0] = string[0]; - symbol[1] = string[1]; - symbol[2] = string[2]; - symbol[3] = string[3]; - symbol[4] = '\0'; - } - inline FourCC(const FourCC& F) - { - symbol[0] = F.symbol[0]; - symbol[1] = F.symbol[1]; - symbol[2] = F.symbol[2]; - symbol[3] = F.symbol[3]; - symbol[4] = '\0'; - } - inline FourCC& operator=(const FourCC& F) - { - symbol[0] = F.symbol[0]; - symbol[1] = F.symbol[1]; - symbol[2] = F.symbol[2]; - symbol[3] = F.symbol[3]; - symbol[4] = '\0'; - return *this; - } - inline bool operator==(const FourCC& F) const - { - return (symbol[0] == F.symbol[0]) && (symbol[1] == F.symbol[1]) && (symbol[2] == F.symbol[2]) && - (symbol[3] == F.symbol[3]); - } - inline bool operator!=(const FourCC& F) const - { - return (symbol[0] != F.symbol[0]) || (symbol[1] != F.symbol[1]) || (symbol[2] != F.symbol[2]) || - (symbol[3] != F.symbol[3]); - } - inline bool operator<(const FourCC& F) const - { - return (symbol[0] < F.symbol[0]) - ? true - : (symbol[0] > F.symbol[0]) - ? false - : (symbol[1] < F.symbol[1]) - ? true - : (symbol[1] > F.symbol[1]) - ? false - : (symbol[2] < F.symbol[2]) - ? true - : (symbol[2] > F.symbol[2]) - ? false - : (symbol[3] < F.symbol[3]) - ? true - : (symbol[3] > F.symbol[3]) ? false : false; - } - inline bool operator<=(const FourCC& F) const - { - return *this == F || *this < F; - } - inline bool operator>=(const FourCC& F) const - { - return !(*this < F); - } - inline bool operator>(const FourCC& F) const - { - return !(*this <= F); - } -}; - -enum TrackSampleType -{ - out_ref, - out_non_ref, - non_out_ref, - display, - samples, -}; - -enum class ViewIdc : uint8_t -{ - MONOSCOPIC = 0, - LEFT = 1, - RIGHT = 2, - LEFT_AND_RIGHT = 3, - INVALID = 0xff -}; - -typedef struct ChannelLayout -{ - uint8_t speakerPosition; - int16_t azimuthDegree; - int8_t elevationDegree; -}ChannelLayout; - -typedef struct chnlProperty -{ - uint8_t streamStruct; - uint8_t definedLayout; - uint64_t omittedChannelsMap; - uint8_t objectNumber; - uint16_t channelNumber; - std::vector channelLayoutArrays; -}chnlProperty; - - -enum class RegionWisePackingType : uint8_t -{ - RECTANGULAR = 0 -}; - -enum class CoverageShapeType : uint8_t -{ - FOUR_CIRCLES = 0, - TWO_AZIMUTH_TWO_ELEVATION_CIRCLES -}; - -typedef struct CoverageSphereRegion -{ - ViewIdc viewIdc; - int32_t azimuthCentre; - int32_t elevationCentre; - int32_t centreTilt; - uint32_t azimuthRange; - uint32_t elevationRange; - bool interpolate; -}CoverageSphereRegion; - -typedef struct CoverageInformationProperty -{ - CoverageShapeType covShapeType; - bool viewIdcPresenceFlag; - ViewIdc viewIdc; - std::vector sphereRegions; -}CoverageInformationProperty; - -typedef struct SpatialAudioProperty -{ - uint8_t version; - uint8_t ambisonicType; - uint32_t ambisonicOrder; - uint8_t ambisonicChannelOrder; - uint8_t ambisonicNorm; - std::vector channelMap; -}SpatialAudioProperty; - -enum class StereoScopic3DProperty : uint8_t -{ - MONO = 0, - STEREO_TOP_BOTTOM = 1, - STEREO_LEFT_RIGHT = 2, - STEREO_STEREO = 3 -}; - -typedef struct PoseDegreesFP -{ - int32_t yawFP; - int32_t pitchFP; - int32_t rollFP; -}PoseDegreesFP; - -typedef struct CubemapProjection -{ - uint32_t layout; - uint32_t padding; -}CubemapProjection; - -typedef struct EquirectangularProjection -{ - uint32_t topFP; - uint32_t bottomFP; - uint32_t leftFP; - uint32_t rightFP; -}EquirectangularProjection; - -enum class ProjectionType : uint8_t -{ - UNKOWN = 0, - CUBEMAP = 1, - EQUIRECTANGULAR = 2, - MESH = 3 -}; - -typedef struct SphericalVideoV2Property -{ - PoseDegreesFP pose; - ProjectionType projectionType; - union Projection { - CubemapProjection cubemap; - EquirectangularProjection equirectangular; - } projection; -}SphericalVideoV2Property; - -typedef struct SphericalVideoV1Property -{ - bool isSpherical; - bool isStitched; - ProjectionType projectionType; - uint32_t sourceNumber; - PoseDegreesFP initialViewport; - uint64_t timestamp; - uint32_t fullPanoWidthPixels; - uint32_t fullPanoHeightPixels; - uint32_t croppedAreaImageWidthPixels; - uint32_t croppedAreaImageHeightPixels; - uint32_t croppedAreaLeftPixels; - uint32_t croppedAreaTopPixels; -}SphericalVideoV1Property; - -enum DecSpecInfoType -{ - AVC_SPS = 7, - AVC_PPS = 8, - HEVC_VPS = 32, - HEVC_SPS = 33, - HEVC_PPS = 34, - AudioSpecificConfig -}; - -typedef struct DecoderSpecificInfo -{ - DecSpecInfoType decodeSpecInfoType; - std::vector decodeSpecInfoData; -}DecoderSpecificInfo; - -typedef struct TimestampIDPair -{ - uint64_t timeStamp; - uint32_t itemId; -}TimestampIDPair; + +typedef struct SRDInfo { + int32_t left = 0; + int32_t top = 0; + int32_t width = 0; + int32_t height = 0; +} SRDInfo; + +using FourCC = VCD::MP4::FourCC; + +using TrackSampleType = VCD::MP4::SampleFrameType; + +using ViewIdc = VCD::MP4::OmniViewIdc; + +using ChannelLayout = VCD::MP4::ChannelLayout; + +using chnlProperty = VCD::MP4::ChnlProperty; + +using RegionWisePackingType = VCD::MP4::OmniRWPKType; + +using RectRegionPacking = VCD::MP4::RectRWPKRegion; + +using RegionWisePackingRegion = VCD::MP4::RWPKRegion; + +using RegionWisePackingProperty = VCD::MP4::RWPKProperty; + +using CoverageShapeType = VCD::MP4::COVIShapeType; + +using CoverageSphereRegion = VCD::MP4::COVIRegion; + +using CoverageInformationProperty = VCD::MP4::COVIInformation; + +using SpatialAudioProperty = VCD::MP4::SpatialAudioProperty; + +using StereoScopic3DProperty = VCD::MP4::OmniStereoScopic3D; + +using SphericalVideoV2Property = VCD::MP4::SphericalVideoV2Property; + +using SphericalVideoV1Property = VCD::MP4::SphericalVideoV1Property; + +using DecSpecInfoType = VCD::MP4::MediaCodecInfoType; + +using DecoderSpecificInfo = VCD::MP4::MediaCodecSpecInfo; + +using TimestampIDPair = VCD::MP4::TStampID; typedef uint32_t FeatureBitMask; -namespace TrackFeatureEnum -{ - enum Feature - { - isVideoTrack = 1u, - isAudioTrack = 1u << 1, - isMetadataTrack = 1u << 2, - hasAlternatives = 1u << 3, - hasSampleGroups = 1u << 4, - hasAssociatedDepthTrack = 1u << 5 - }; - - enum VRFeature - { - isAudioLSpeakerChnlStructTrack = 1u << 2, - isVRGoogleSpatialAudioTrack = 1u << 8, - isVRGoogleNonDiegeticAudioTrack = 1u << 9, - hasVRGoogleStereoscopic3D = 1u << 12, - hasVRGoogleV1SpericalVideo = 1u << 13, - hasVRGoogleV2SpericalVideo = 1u << 14, - }; -}; - -typedef struct TypeToTrackIDs -{ - FourCC type; - std::vector trackIds; -}TypeToTrackIDs; - -typedef struct SampleFlagsType -{ - uint32_t reserved : 4, - isLeading : 2, - sampleDependsOn : 2, - sampleIsDependedOn : 2, - sampleHasRedundancy : 2, - samplePaddingValue : 3, - sampleIsNonSyncSample : 1, - sampleDegradationPriority : 16; -}SampleFlagsType; - -union SampleFlags { - uint32_t flagsAsUInt; - SampleFlagsType flags; -}; - -enum SampleType -{ - OUTPUT_NON_REF_FRAME, - OUTPUT_REF_FRAME, - NON_OUTPUT_REF_FRAME -}; - -typedef struct SampleInformation -{ - uint32_t id; - FourCC entryType; - uint32_t descriptionIndex; - SampleType type; - uint32_t initSegmentId; - uint32_t segmentId; - uint64_t earliestTimestamp; - SampleFlags flags; - uint64_t durationTS; - uint64_t earliestTimestampTS; -}SampleInformation; - -typedef struct TrackTypeInformation -{ - FourCC majorBrand; - uint32_t minorVersion; - std::vector compatibleBrandArrays; -}TrackTypeInformation; - -typedef struct TrackInformation -{ - uint32_t trackId; - uint32_t initSegId; - uint32_t alternateGroupId; - FeatureBitMask featureBM; - FeatureBitMask vrFeatureBM; - std::vector trackURI; - std::vector alternateTrackIdArrays; - std::vector referenceTrackIdArrays; - std::vector trackGroupIdArrays; - std::vector samplePropertyArrays; - uint32_t maxSampleSize; - uint32_t timeScale; - Fractional frameRate; - bool hasTypeInformation; - TrackTypeInformation type; -}TrackInformation; - -typedef struct SegmentInformation -{ - uint32_t refId; - uint32_t timescale; - bool refType; - uint64_t earliestPTSinTS; - uint32_t durationInTS; - uint64_t startDataOffset; - uint32_t dataSize; - bool isStartedWithSAP; - uint8_t SAPType; -}SegmentInformation; - -typedef struct SchemeType -{ - FourCC type; - uint32_t version; - std::vector uri; -}SchemeType; - -typedef struct SchemeTypesProperty -{ - SchemeType mainScheme; - std::vector compatibleSchemeTypes; -}SchemeTypesProperty; - -enum OmafProjectionType -{ - EQUIRECTANGULAR = 0, - CUBEMAP -}; - -typedef struct ProjectionFormatProperty -{ - OmafProjectionType format; -}ProjectionFormatProperty; - -enum PodvStereoVideoConfiguration -{ - TOP_BOTTOM_PACKING = 3, - SIDE_BY_SIDE_PACKING = 4, - TEMPORAL_INTERLEAVING = 5, - MONOSCOPIC = 0x8f -}; - -typedef struct Rotation -{ - int32_t yaw; - int32_t pitch; - int32_t roll; -}Rotation; - -typedef struct SphereRegionProperty -{ - int32_t azimuthCentre; - int32_t elevationCentre; - int32_t centreTilt; - uint32_t azimuthRange; - uint32_t elevationRange; - bool interpolate; -}SphereRegionProperty; +using Feature = VCD::MP4::FeatureOfTrack; +using VRFeature = VCD::MP4::ImmersiveProperty; + +using TypeToTrackIDs = VCD::MP4::TypeToTrackIDs; + +using SampleType = VCD::MP4::FrameCodecType; + +using SampleInformation = VCD::MP4::TrackSampInfo; + +using TrackInformation = VCD::MP4::TrackInformation; + +using SchemeTypesProperty = VCD::MP4::SchemeTypesProperty; + +using ProjectionFormatProperty = VCD::MP4::ProjFormat; + +using PodvStereoVideoConfiguration = VCD::MP4::VideoFramePackingType; + +using Rotation = VCD::MP4::Rotation; VCD_OMAF_END; #endif /* ISO_STRUCTURE_H */ - diff --git a/src/OmafDashAccess/test/compile.sh b/src/OmafDashAccess/test/compile.sh index 6050c40b..48de07c8 100755 --- a/src/OmafDashAccess/test/compile.sh +++ b/src/OmafDashAccess/test/compile.sh @@ -2,17 +2,21 @@ cp ../../google_test/libgtest.a . -g++ -I../../google_test -std=c++11 -I../util/ -g -c testMediaSource.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -g++ -I../../google_test -std=c++11 -I../util/ -g -c testMPDParser.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -g++ -I../../google_test -std=c++11 -I../util/ -g -c testOmafReader.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -g++ -I../../google_test -std=c++11 -I../util/ -g -c testOmafReaderManager.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../../isolib -I../../google_test -std=c++11 -I../util/ -g -c testMediaSource.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../../isolib -I../../google_test -std=c++11 -I../util/ -g -c testMPDParser.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../../isolib -I../../google_test -std=c++11 -I../util/ -g -c testOmafReader.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../../isolib -I../../google_test -std=c++11 -I../util/ -g -c testOmafReaderManager.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../../isolib -I../../google_test -std=c++11 -I../util/ -g -c testDownloader.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../../isolib -I../../google_test -std=c++11 -I../util/ -g -c testDownloaderPerf.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -LD_FLAGS="-I/usr/local/include/ -lcurl -lstdc++ -lOmafDashAccess -lpthread -lglog -l360SCVP -lm -L/usr/local/lib" -g++ -L/usr/local/lib testMediaSource.o testMPDParser.o testOmafReader.o testOmafReaderManager.o libgtest.a -o testLib ${LD_FLAGS} +LD_FLAGS="-I/usr/local/include/ -lcurl -lstdc++ -lOmafDashAccess -lsafestring_shared -llttng-ust -ldl -lpthread -lglog -l360SCVP -lm -L/usr/local/lib" +g++ -L/usr/local/lib testDownloaderPerf.o testDownloader.o testMediaSource.o testMPDParser.o testOmafReader.o testOmafReaderManager.o libgtest.a -o testLib ${LD_FLAGS} g++ -L/usr/local/lib testMediaSource.o libgtest.a -o testMediaSource ${LD_FLAGS} g++ -L/usr/local/lib testMPDParser.o libgtest.a -o testMPDParser ${LD_FLAGS} g++ -L/usr/local/lib testOmafReader.o libgtest.a -o testOmafReader ${LD_FLAGS} g++ -L/usr/local/lib testOmafReaderManager.o libgtest.a -o testOmafReaderManager ${LD_FLAGS} +g++ -L/usr/local/lib testDownloader.o libgtest.a -o testDownloader ${LD_FLAGS} +g++ -L/usr/local/lib testDownloaderPerf.o libgtest.a -o testDownloaderPerf ${LD_FLAGS} ./run.sh if [ $? -ne 0 ]; then exit 1; fi diff --git a/src/OmafDashAccess/test/run.sh b/src/OmafDashAccess/test/run.sh index b4f85e09..4f7e0821 100755 --- a/src/OmafDashAccess/test/run.sh +++ b/src/OmafDashAccess/test/run.sh @@ -3,9 +3,19 @@ # Download files ################################ curl -ukang1:AP2hWmPtk4RkLXUn2SMWfocyUq2 -O "https://ubit-artifactory-sh.intel.com/artifactory/npg_validation-sh-local/Immersive_Media_test/segs_for_readertest_0909.tar.gz" && tar zxvf segs_for_readertest_0909.tar.gz - +#tar zxvf segs_for_readertest_0909.tar.gz # Run test cases ################################ + +./testOmafReaderManager +if [ $? -ne 0 ]; then exit 1; fi + +./testDownloaderPerf +if [ $? -ne 0 ]; then exit 1; fi + +./testDownloader +if [ $? -ne 0 ]; then exit 1; fi + ./testMediaSource --gtest_filter=*_static if [ $? -ne 0 ]; then exit 1; fi ./testMediaSource --gtest_filter=*_live @@ -20,10 +30,12 @@ if [ $? -ne 0 ]; then exit 1; fi if [ $? -ne 0 ]; then exit 1; fi ./testMPDParser if [ $? -ne 0 ]; then exit 1; fi + +curl -ukang1:AP2hWmPtk4RkLXUn2SMWfocyUq2 -O "https://ubit-artifactory-sh.intel.com/artifactory/npg_validation-sh-local/Immersive_Media_test/segs_for_readertest_0909.tar.gz" && tar zxvf segs_for_readertest_0909.tar.gz +#tar zxvf segs_for_readertest_0909.tar.gz + ./testOmafReader if [ $? -ne 0 ]; then exit 1; fi -./testOmafReaderManager -if [ $? -ne 0 ]; then exit 1; fi # All caes passed ################################ diff --git a/src/OmafDashAccess/test/testDownloader.cpp b/src/OmafDashAccess/test/testDownloader.cpp new file mode 100644 index 00000000..2308c936 --- /dev/null +++ b/src/OmafDashAccess/test/testDownloader.cpp @@ -0,0 +1,221 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +#include "gtest/gtest.h" +#include +#include +#include +#include + +#include "../OmafDashDownload/OmafDownloader.h" + +using namespace VCD::OMAF; + +namespace { + +class DownloaderTest : public testing::Test { + public: + virtual void SetUp() { + OMAF_LOG(LOG_INFO, "Calling setup\n"); + + outsite_url = "https://www.baidu.com"; + valid_url = "http://10.67.112.194:8080/testOMAFlive/Test.mpd"; + + invalid_url = invalid_url + "invalid"; + no_proxy = "127.0.0.1,*.intel.com,10.67.112.194"; + proxy_url = "http://child-prc.intel.com:913"; + invalid_proxy_url = "http://chil-prc.intel.com:913"; + + dash_client_ = OmafDashSegmentHttpClient::create(10); + + client_params.bssl_verify_host_ = false; + client_params.bssl_verify_peer_ = false; + client_params.conn_timeout_ = 5000; // 5s + client_params.total_timeout_ = 30000; // 30s + + client_params.retry_times_ = 3; + + dash_client_->setParams(client_params); + } + + virtual void TearDown() { + OMAF_LOG(LOG_INFO, "Calling TearDown\n"); + + OMAF_STATUS ret = dash_client_->stop(); + EXPECT_TRUE(ret == ERROR_NONE); + } + + OmafDashSegmentHttpClient::Ptr dash_client_ = nullptr; + + std::string valid_url; + std::string invalid_url; + std::string outsite_url; + std::string proxy_url; + std::string invalid_proxy_url; + std::string no_proxy; + OmafDashHttpParams client_params; +}; + +TEST_F(DownloaderTest, Create) { + OmafDashSegmentHttpClient::Ptr dash_client = OmafDashSegmentHttpClient::create(10); + EXPECT_TRUE(dash_client != nullptr); +} + +TEST_F(DownloaderTest, downloadSuccess) { + OMAF_STATUS ret = dash_client_->start(); + EXPECT_TRUE(ret == ERROR_NONE); + + DashSegmentSourceParams ds; + ds.dash_url_ = valid_url; + ds.timeline_point_ = 1; + + bool isState = false; + dash_client_->open( + ds, + [](std::unique_ptr sb) { + EXPECT_TRUE(sb != nullptr); + EXPECT_TRUE(sb->size() > 0); + }, + [&isState](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state: %d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::SUCCESS); + isState = true; + }); + while (!isState) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } +} +TEST_F(DownloaderTest, downloadFailure) { + OMAF_STATUS ret = dash_client_->start(); + EXPECT_TRUE(ret == ERROR_NONE); + DashSegmentSourceParams ds; + ds.dash_url_ = invalid_url; + ds.timeline_point_ = 1; + bool isState = false; + dash_client_->open( + ds, + [](std::unique_ptr sb) { + + }, + [&isState](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state: %d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::TIMEOUT); + isState = true; + }); + while (!isState) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } +} +TEST_F(DownloaderTest, proxy_success) { + DashSegmentSourceParams ds; + ds.dash_url_ = outsite_url; + ds.timeline_point_ = 1; + + OmafDashHttpProxy proxy; + proxy.http_proxy_ = proxy_url; + // proxy.https_proxy_ = proxy_url; + + dash_client_->setProxy(proxy); + + OMAF_STATUS ret = dash_client_->start(); + EXPECT_TRUE(ret == ERROR_NONE); + + bool isState = false; + dash_client_->open( + ds, + [](std::unique_ptr sb) { + EXPECT_TRUE(sb != nullptr); + EXPECT_TRUE(sb->size() > 0); + }, + [&isState](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state: %d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::SUCCESS); + isState = true; + }); + while (!isState) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } +} + +TEST_F(DownloaderTest, invalid_proxy) { + DashSegmentSourceParams ds; + ds.dash_url_ = outsite_url; + ds.timeline_point_ = 1; + + OmafDashHttpProxy proxy; + + proxy.https_proxy_ = invalid_proxy_url; + + dash_client_->setProxy(proxy); + + OMAF_STATUS ret = dash_client_->start(); + EXPECT_TRUE(ret == ERROR_NONE); + + bool isState = false; + dash_client_->open( + ds, + [](std::unique_ptr sb) { + + }, + [&isState](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state: %d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::TIMEOUT); + isState = true; + }); + while (!isState) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } +} + +TEST_F(DownloaderTest, no_proxy_success) { + DashSegmentSourceParams ds; + ds.dash_url_ = valid_url; + ds.timeline_point_ = 1; + + OmafDashHttpProxy proxy; + proxy.http_proxy_ = proxy_url; + proxy.no_proxy_ = no_proxy; + dash_client_->setProxy(proxy); + OMAF_STATUS ret = dash_client_->start(); + EXPECT_TRUE(ret == ERROR_NONE); + bool isState = false; + dash_client_->open( + ds, + [](std::unique_ptr sb) { + EXPECT_TRUE(sb != nullptr); + EXPECT_TRUE(sb->size() > 0); + }, + [&isState](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state: %d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::SUCCESS); + isState = true; + }); + while (!isState) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } +} + +} // namespace diff --git a/src/OmafDashAccess/test/testDownloaderPerf.cpp b/src/OmafDashAccess/test/testDownloaderPerf.cpp new file mode 100644 index 00000000..2bf78283 --- /dev/null +++ b/src/OmafDashAccess/test/testDownloaderPerf.cpp @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +#include "gtest/gtest.h" +#include +#include +#include +#include +#include + +#include "../OmafDashDownload/OmafDownloader.h" + +using namespace VCD::OMAF; + +namespace { + +class DownloaderPerfTest : public testing::Test { + public: + virtual void SetUp() { + OMAF_LOG(LOG_INFO, "Calling setup\n"); + + valid_url = "http://10.67.112.194:8080/testOMAFlive/Test.mpd"; + + invalid_url = invalid_url + "invalid"; + + dash_client_ = OmafDashSegmentHttpClient::create(10); + dash_client_->setStatisticsWindows(10000); + + client_params.bssl_verify_host_ = false; + client_params.bssl_verify_peer_ = false; + client_params.conn_timeout_ = 3000; + client_params.total_timeout_ = 5000; + client_params.retry_times_ = 2; + + dash_client_->setParams(client_params); + OMAF_STATUS ret = dash_client_->start(); + EXPECT_TRUE(ret == ERROR_NONE); + } + + virtual void TearDown() { + OMAF_LOG(LOG_INFO, "Calling TearDown\n"); + + OMAF_STATUS ret = dash_client_->stop(); + EXPECT_TRUE(ret == ERROR_NONE); + } + + OmafDashSegmentHttpClient::Ptr dash_client_ = nullptr; + + std::string valid_url; + std::string invalid_url; + OmafDashHttpParams client_params; +}; + +TEST_F(DownloaderPerfTest, Create) { + OmafDashSegmentHttpClient::Ptr dash_client = OmafDashSegmentHttpClient::create(10); + EXPECT_TRUE(dash_client != nullptr); +} + +TEST_F(DownloaderPerfTest, perf_success) { + DashSegmentSourceParams ds; + ds.dash_url_ = valid_url; + ds.timeline_point_ = 1; + const size_t TEST_COUNT = 1; + std::atomic_int32_t sync_num(0); + dash_client_->open( + ds, + [](std::unique_ptr sb) { + EXPECT_TRUE(sb != nullptr); + EXPECT_TRUE(sb->size() > 0); + }, + [&sync_num](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state: %d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::SUCCESS); + sync_num++; + }); + + while (sync_num.load() != TEST_COUNT) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } + std::unique_ptr stats = dash_client_->statistics(); + EXPECT_TRUE(stats != nullptr); + + EXPECT_TRUE(stats->success_.count_total_ == TEST_COUNT); + EXPECT_TRUE(stats->timeout_.count_total_ == 0); + EXPECT_TRUE(stats->failure_.count_total_ == 0); +} + +TEST_F(DownloaderPerfTest, perf_timeout) { + DashSegmentSourceParams ds; + ds.dash_url_ = invalid_url; + ds.timeline_point_ = 1; + const size_t TEST_COUNT = 1; + std::atomic_int32_t sync_num(0); + dash_client_->open( + ds, + [](std::unique_ptr sb) { + EXPECT_TRUE(sb != nullptr); + EXPECT_TRUE(sb->size() > 0); + }, + [&sync_num](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state:%d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::TIMEOUT); + sync_num++; + }); + while (sync_num.load() != TEST_COUNT) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } + std::unique_ptr stats = dash_client_->statistics(); + EXPECT_TRUE(stats != nullptr); + + EXPECT_TRUE(stats->success_.count_total_ == 0); + EXPECT_TRUE(stats->timeout_.count_total_ == TEST_COUNT); + EXPECT_TRUE(stats->failure_.count_total_ == 0); +} + +TEST_F(DownloaderPerfTest, perf_success_100) { + DashSegmentSourceParams ds; + ds.dash_url_ = valid_url; + ds.timeline_point_ = 1; + + std::atomic_int32_t sync_num(0); + const size_t TEST_COUNT = 100; + for (size_t i = 0; i < TEST_COUNT; i++) { + dash_client_->open( + ds, + [](std::unique_ptr sb) { + EXPECT_TRUE(sb != nullptr); + EXPECT_TRUE(sb->size() > 0); + }, + [&sync_num](OmafDashSegmentClient::State state) { + OMAF_LOG(LOG_INFO, "Receive the state:%d\n", static_cast(state)); + EXPECT_TRUE(state == OmafDashSegmentClient::State::SUCCESS); + sync_num++; + }); + } + + while (sync_num.load() != TEST_COUNT) { + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } + std::unique_ptr stats = dash_client_->statistics(); + EXPECT_TRUE(stats != nullptr); + + EXPECT_TRUE(stats->success_.count_total_ == TEST_COUNT); + EXPECT_TRUE(stats->timeout_.count_total_ == 0); + EXPECT_TRUE(stats->failure_.count_total_ == 0); +} + +} // namespace diff --git a/src/OmafDashAccess/test/testMediaSource.cpp b/src/OmafDashAccess/test/testMediaSource.cpp index f2e85da7..56b50611 100644 --- a/src/OmafDashAccess/test/testMediaSource.cpp +++ b/src/OmafDashAccess/test/testMediaSource.cpp @@ -33,242 +33,253 @@ VCD_USE_VRVIDEO; -namespace{ +namespace { class MediaSourceTest : public testing::Test { -public: - virtual void SetUp(){ - - url_live = "http://10.67.112.194:8080/testOMAFlive/Test.mpd"; - //url_static = "https://10.67.119.113:443/UT_testOMAFstatic/Test.mpd"; - //url_static = "http://10.67.119.113:8080/UT_testOMAFstatic/Test.mpd"; - url_static = "http://10.67.112.194:8080/testOMAFstatic/Test.mpd"; - cache = "./cache";//getpwuid(getuid())->pw_dir + std::string("/cache"); - - clientInfo = new HeadSetInfo; - clientInfo->input_geoType = 0; - clientInfo->output_geoType = E_SVIDEO_VIEWPORT; - clientInfo->pose = new HeadPose; - clientInfo->pose->yaw = -90; - clientInfo->pose->pitch = 0; - clientInfo->viewPort_hFOV = 80; - clientInfo->viewPort_vFOV = 80; - clientInfo->viewPort_Width = 960; - clientInfo->viewPort_Height = 960; - - pose = new HeadPose; - pose->yaw = 90; - pose->pitch = 0; - } - - virtual void TearDown(){ - free(clientInfo->pose); - clientInfo->pose = nullptr; - - free(clientInfo); - clientInfo = nullptr; - - free(pose); - pose = nullptr; + public: + virtual void SetUp() { + url_live = "http://10.67.112.194:8080/testOMAFlive/Test.mpd"; + // url_static = "https://10.67.119.113:443/UT_testOMAFstatic/Test.mpd"; + // url_static = "http://10.67.119.113:8080/UT_testOMAFstatic/Test.mpd"; + url_static = "http://10.67.112.194:8080/testOMAFstatic/Test.mpd"; + cache = "./cache"; // getpwuid(getuid())->pw_dir + std::string("/cache"); + pluginName = "libViewportPredict_LR.so"; + libPath = "../../plugins/ViewportPredict_Plugin/predict_LR/"; + + clientInfo = new HeadSetInfo; + //clientInfo->input_geoType = 0; + //clientInfo->output_geoType = E_SVIDEO_VIEWPORT; + clientInfo->pose = new HeadPose; + clientInfo->pose->yaw = -90; + clientInfo->pose->pitch = 0; + clientInfo->viewPort_hFOV = 80; + clientInfo->viewPort_vFOV = 80; + clientInfo->viewPort_Width = 960; + clientInfo->viewPort_Height = 960; + + pose = new HeadPose; + pose->yaw = 90; + pose->pitch = 0; + } + + virtual void TearDown() { + free(clientInfo->pose); + clientInfo->pose = nullptr; + + free(clientInfo); + clientInfo = nullptr; + + free(pose); + pose = nullptr; + } + + int GetFileCntUnderCache() { + // check the downloaded files number > 0 + string downloadedFileCnt = "ls -l " + cache + " | grep -v ^l | wc -l"; + std::array buffer; + std::string result; + FILE* pipe = popen(downloadedFileCnt.c_str(), "r"); + if (!pipe) cout << "failed to get downloaded files count!" << endl; + + while (pipe && fgets(buffer.data(), buffer.size(), pipe) != nullptr) { + result += buffer.data(); } - int GetFileCntUnderCache() - { - // check the downloaded files number > 0 - string downloadedFileCnt = "ls -l " + cache + " | grep -v ^l | wc -l"; - std::array buffer; - std::string result; - FILE* pipe = popen(downloadedFileCnt.c_str(), "r"); - if(!pipe) - cout<<"failed to get downloaded files count!"<SetupHeadSetInfo(clientInfo); - EXPECT_TRUE(ret == ERROR_NONE); + int ret = dashSource->SetupHeadSetInfo(clientInfo); + EXPECT_TRUE(ret == ERROR_NONE); - ret = dashSource->OpenMedia(url_static, cache); - EXPECT_TRUE(ret == ERROR_NONE); + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = dashSource->OpenMedia(url_static, cache, NULL, i360ScvpPlugin, true, false, "", ""); + EXPECT_TRUE(ret == ERROR_NONE); + dashSource->StartStreaming(); - sleep(5); - dashSource->CloseMedia(); + sleep(5); + dashSource->CloseMedia(); - // check the downloaded files number > 0 - int32_t cnt = GetFileCntUnderCache(); - EXPECT_TRUE(cnt > 1); + // check the downloaded files number > 0 + int32_t cnt = GetFileCntUnderCache(); + EXPECT_TRUE(cnt > 1); - delete dashSource; + delete dashSource; } -TEST_F(MediaSourceTest, OpenMedia_live) -{ - const string command = "rm -rf " + cache + "/*"; - system(command.c_str()); +TEST_F(MediaSourceTest, OpenMedia_live) { + const string command = "rm -rf " + cache + "/*"; + system(command.c_str()); - OmafMediaSource* dashSource = new OmafDashSource(); - EXPECT_TRUE(dashSource != NULL); + OmafMediaSource* dashSource = new OmafDashSource(); + EXPECT_TRUE(dashSource != NULL); - int ret = dashSource->SetupHeadSetInfo(clientInfo); - EXPECT_TRUE(ret == ERROR_NONE); + int ret = dashSource->SetupHeadSetInfo(clientInfo); + EXPECT_TRUE(ret == ERROR_NONE); - ret = dashSource->OpenMedia(url_live, cache); - EXPECT_TRUE(ret == ERROR_NONE); + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = dashSource->OpenMedia(url_live, cache, NULL, i360ScvpPlugin, true, false, "", ""); + EXPECT_TRUE(ret == ERROR_NONE); + dashSource->StartStreaming(); - sleep(5); - dashSource->CloseMedia(); + sleep(5); + dashSource->CloseMedia(); - // check the downloaded files number > 0 - int32_t cnt = GetFileCntUnderCache(); + // check the downloaded files number > 0 + int32_t cnt = GetFileCntUnderCache(); - EXPECT_TRUE(cnt > 1); + EXPECT_TRUE(cnt > 1); - delete dashSource; + delete dashSource; } -TEST_F(MediaSourceTest, OpenMedia_static_withPredictor) -{ - const string command = "rm -rf " + cache + "/*"; - system(command.c_str()); +TEST_F(MediaSourceTest, OpenMedia_static_withPredictor) { + const string command = "rm -rf " + cache + "/*"; + system(command.c_str()); - OmafMediaSource* dashSource = new OmafDashSource(); - EXPECT_TRUE(dashSource != NULL); + OmafMediaSource* dashSource = new OmafDashSource(); + EXPECT_TRUE(dashSource != NULL); - int ret = dashSource->SetupHeadSetInfo(clientInfo); - EXPECT_TRUE(ret == ERROR_NONE); + int ret = dashSource->SetupHeadSetInfo(clientInfo); + EXPECT_TRUE(ret == ERROR_NONE); - ret = dashSource->OpenMedia(url_static, cache, true); - EXPECT_TRUE(ret == ERROR_NONE); + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = dashSource->OpenMedia(url_static, cache, NULL, i360ScvpPlugin, true, true, pluginName, libPath); + EXPECT_TRUE(ret == ERROR_NONE); + dashSource->StartStreaming(); - sleep(15); - dashSource->CloseMedia(); + sleep(15); + dashSource->CloseMedia(); - // check the downloaded files number > 0 - int32_t cnt = GetFileCntUnderCache(); + // check the downloaded files number > 0 + int32_t cnt = GetFileCntUnderCache(); - EXPECT_TRUE(cnt > 1); + EXPECT_TRUE(cnt > 1); - delete dashSource; + delete dashSource; } -TEST_F(MediaSourceTest, OpenMedia_live_withPredictor) -{ - const string command = "rm -rf " + cache + "/*"; - system(command.c_str()); +TEST_F(MediaSourceTest, OpenMedia_live_withPredictor) { + const string command = "rm -rf " + cache + "/*"; + system(command.c_str()); - OmafMediaSource* dashSource = new OmafDashSource(); - EXPECT_TRUE(dashSource != NULL); + OmafMediaSource* dashSource = new OmafDashSource(); + EXPECT_TRUE(dashSource != NULL); - int ret = dashSource->SetupHeadSetInfo(clientInfo); - EXPECT_TRUE(ret == ERROR_NONE); + int ret = dashSource->SetupHeadSetInfo(clientInfo); + EXPECT_TRUE(ret == ERROR_NONE); - ret = dashSource->OpenMedia(url_live, cache, true); - EXPECT_TRUE(ret == ERROR_NONE); + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = dashSource->OpenMedia(url_live, cache, NULL, i360ScvpPlugin, true, true, pluginName, libPath); + EXPECT_TRUE(ret == ERROR_NONE); + dashSource->StartStreaming(); - sleep(15); - dashSource->CloseMedia(); + sleep(15); + dashSource->CloseMedia(); - // check the downloaded files number > 0 - int32_t cnt = GetFileCntUnderCache(); + // check the downloaded files number > 0 + int32_t cnt = GetFileCntUnderCache(); - EXPECT_TRUE(cnt > 1); + EXPECT_TRUE(cnt > 1); - delete dashSource; + delete dashSource; } -TEST_F(MediaSourceTest, OpenMedia_static_changeViewport) -{ - string command = "rm -rf " + cache + "/*"; - system(command.c_str()); +TEST_F(MediaSourceTest, OpenMedia_static_changeViewport) { + string command = "rm -rf " + cache + "/*"; + system(command.c_str()); - OmafMediaSource* dashSource = new OmafDashSource(); - EXPECT_TRUE(dashSource != NULL); + OmafMediaSource* dashSource = new OmafDashSource(); + EXPECT_TRUE(dashSource != NULL); - int ret = dashSource->SetupHeadSetInfo(clientInfo); - EXPECT_TRUE(ret == ERROR_NONE); + int ret = dashSource->SetupHeadSetInfo(clientInfo); + EXPECT_TRUE(ret == ERROR_NONE); - ret = dashSource->OpenMedia(url_static, cache); - EXPECT_TRUE(ret == ERROR_NONE); + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = dashSource->OpenMedia(url_static, cache, NULL, i360ScvpPlugin, true, false, "", ""); + EXPECT_TRUE(ret == ERROR_NONE); + dashSource->StartStreaming(); - int16_t vpcnt = 200; - while(vpcnt > 0) - { - pose->yaw += 10; - pose->yaw = pose->yaw > 180 ? pose->yaw - 360 : pose->yaw; - ret = dashSource->ChangeViewport(pose); - EXPECT_TRUE(ret == ERROR_NONE); - usleep(10000); - vpcnt--; - } + int16_t vpcnt = 200; + while (vpcnt > 0) { + pose->yaw += 10; + pose->yaw = pose->yaw > 180 ? pose->yaw - 360 : pose->yaw; + ret = dashSource->ChangeViewport(pose); + EXPECT_TRUE(ret == ERROR_NONE); + usleep(10000); + vpcnt--; + } - sleep(5); - dashSource->CloseMedia(); + sleep(5); + dashSource->CloseMedia(); - // check the downloaded files number > 0 - int32_t cnt = GetFileCntUnderCache(); + // check the downloaded files number > 0 + int32_t cnt = GetFileCntUnderCache(); - EXPECT_TRUE(cnt > 1); + EXPECT_TRUE(cnt > 1); - delete dashSource; + delete dashSource; } -TEST_F(MediaSourceTest, OpenMedia_live_changeViewport) -{ - const string command = "rm -rf " + cache + "/*"; - system(command.c_str()); +TEST_F(MediaSourceTest, OpenMedia_live_changeViewport) { + const string command = "rm -rf " + cache + "/*"; + system(command.c_str()); - OmafMediaSource* dashSource = new OmafDashSource(); - EXPECT_TRUE(dashSource != NULL); + OmafMediaSource* dashSource = new OmafDashSource(); + EXPECT_TRUE(dashSource != NULL); - int ret = dashSource->SetupHeadSetInfo(clientInfo); - EXPECT_TRUE(ret == ERROR_NONE); + int ret = dashSource->SetupHeadSetInfo(clientInfo); + EXPECT_TRUE(ret == ERROR_NONE); - ret = dashSource->OpenMedia(url_live, cache); - EXPECT_TRUE(ret == ERROR_NONE); + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = dashSource->OpenMedia(url_live, cache, NULL, i360ScvpPlugin, true, false, "", ""); + EXPECT_TRUE(ret == ERROR_NONE); + dashSource->StartStreaming(); - sleep(1); + sleep(1); - ret = dashSource->ChangeViewport(pose); - EXPECT_TRUE(ret == ERROR_NONE); + ret = dashSource->ChangeViewport(pose); + EXPECT_TRUE(ret == ERROR_NONE); - sleep(4); - dashSource->CloseMedia(); + sleep(4); + dashSource->CloseMedia(); - // check the downloaded files number > 0 - int32_t cnt = GetFileCntUnderCache(); + // check the downloaded files number > 0 + int32_t cnt = GetFileCntUnderCache(); - EXPECT_TRUE(cnt > 1); + EXPECT_TRUE(cnt > 1); - delete dashSource; + delete dashSource; } -} +} // namespace diff --git a/src/OmafDashAccess/test/testOmafReader.cpp b/src/OmafDashAccess/test/testOmafReader.cpp index c42a918b..40d4deef 100644 --- a/src/OmafDashAccess/test/testOmafReader.cpp +++ b/src/OmafDashAccess/test/testOmafReader.cpp @@ -36,369 +36,478 @@ #include "../OmafReader.h" #include "../OmafMP4VRReader.h" +#include + VCD_USE_VROMAF; VCD_USE_VRVIDEO; -namespace { -class OmafReaderTest : public testing::Test -{ -public: - virtual void SetUp() - { - std::string mpdUrl = "./segs_for_readertest/Test.mpd"; - - m_mpdParser = new OmafMPDParser(); - if (!m_mpdParser) - return; +// comments +// this is not a good unit-test cases :( - m_mpdParser->ParseMPD(mpdUrl, m_listStream); - - m_mpdParser->GetMPDInfo(); +namespace { +class OmafReaderTest : public testing::Test { + public: + virtual void SetUp() { + std::string mpdUrl = "./segs_for_readertest/Test.mpd"; + + m_mpdParser = new OmafMPDParser(); + if (!m_mpdParser) return; + m_mpdParser->SetExtractorEnabled(true); + + int ret = m_mpdParser->ParseMPD(mpdUrl, m_listStream); + EXPECT_TRUE(ret == ERROR_NONE); + // m_mpdParser->GetMPDInfo(); + + m_reader = new OmafMP4VRReader(); + if (!m_reader) return; + } + virtual void TearDown() { + SAFE_DELETE(m_mpdParser); + SAFE_DELETE(m_reader); + } + uint32_t buildDashTrackId(uint32_t id) noexcept { return id & static_cast(0xffff); } + uint32_t buildReaderTrackId(uint32_t trackId, uint32_t initSegId) noexcept { return (initSegId << 16) | trackId; } + std::shared_ptr findTrackInformation(OmafReader *reader, uint32_t tackId) noexcept { + try { + std::vector track_infos; + + OMAF_STATUS ret = reader->getTrackInformations(track_infos); + if (ERROR_NONE != ret) { + OMAF_LOG(LOG_ERROR, "Failed to get the trackinformation list from reader, code=%d\n", ret); + return nullptr; + } + + // get the required track information and release the old data + std::shared_ptr track_info; + for (auto &track : track_infos) { + if (track != nullptr) { + if (buildDashTrackId(track->trackId) == tackId) { + track_info = std::make_shared(); + *(track_info.get()) = *track; + } + delete track; + } - m_reader = new OmafMP4VRReader(); - if (!m_reader) - return; + track = nullptr; + } + track_infos.clear(); + return std::move(track_info); + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when find the track information! ex: %s\n", ex.what() ); + return nullptr; } - virtual void TearDown() - { - SAFE_DELETE(m_mpdParser); - SAFE_DELETE(m_reader); + } + + bool findSampleIndexRange(std::shared_ptr track_info, uint32_t segid, size_t &begin, + size_t &end) noexcept { + try { + if (track_info.get() == nullptr) { + return false; + } + bool found = false; + for (size_t index = 0; index < track_info->sampleProperties.size; index++) { + if (segid == track_info->sampleProperties[index].segmentId) { + end++; + if (!found) { + found = true; + begin = track_info->sampleProperties[index].sampleId; + } + } + } + return found; + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Exception when find the start index! ex: %s\n", ex.what() ); + return false; } + } - OmafMPDParser *m_mpdParser; - OMAFSTREAMS m_listStream; - OmafReader *m_reader; + OmafMPDParser *m_mpdParser; + OMAFSTREAMS m_listStream; + OmafReader *m_reader; }; -TEST_F(OmafReaderTest, ParseInitialSegment) -{ - int ret = ERROR_NONE; - uint32_t initSegID = 0; - std::vector trackInfos; - std::map mapSegCnt; - std::string cacheFileName; - char storedFileName[1024]; +TEST_F(OmafReaderTest, ParseInitialSegment) { + int ret = ERROR_NONE; + uint32_t initSegID = 0; + std::vector trackInfos; - for (auto it = m_listStream.begin(); it != m_listStream.end(); it++) - { - OmafMediaStream *stream = (OmafMediaStream*)(*it); - EXPECT_TRUE(stream != NULL); + std::string cacheFileName; + char storedFileName[1024]; - std::map normalAS = stream->GetMediaAdaptationSet(); - std::map extractorAS = stream->GetExtractors(); + for (auto it = m_listStream.begin(); it != m_listStream.end(); it++) { + OmafMediaStream *stream = (OmafMediaStream *)(*it); + EXPECT_TRUE(stream != NULL); - for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) - { - OmafAdaptationSet *pAS = (OmafAdaptationSet*)(itAS->second); - EXPECT_TRUE(pAS != NULL); + std::map normalAS = stream->GetMediaAdaptationSet(); + std::map extractorAS = stream->GetExtractors(); - ret = pAS->LoadLocalInitSegment(); - EXPECT_TRUE(ret == ERROR_NONE); + for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) { + OmafAdaptationSet *pAS = (OmafAdaptationSet *)(itAS->second); + EXPECT_TRUE(pAS != NULL); - OmafSegment *initSeg = pAS->GetInitSegment(); - EXPECT_TRUE(initSeg != NULL); + ret = pAS->LoadLocalInitSegment(); + EXPECT_TRUE(ret == ERROR_NONE); - memset(storedFileName, 0, 1024); - std::string repId = pAS->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); - cacheFileName = storedFileName; + OmafSegment::Ptr initSeg = pAS->GetInitSegment(); + EXPECT_TRUE(initSeg != NULL); - initSeg->SetSegmentCacheFile(cacheFileName); - initSeg->SetSegStored(); - ret = m_reader->parseInitializationSegment(initSeg, initSegID); - EXPECT_TRUE(ret == ERROR_NONE); + memset(storedFileName, 0, 1024); + std::string repId = pAS->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); - initSeg->SetInitSegID(initSegID); - initSeg->SetSegID(initSegID); + cacheFileName = storedFileName; - mapSegCnt[initSegID] = 0; + initSeg->SetSegmentCacheFile(cacheFileName); + initSeg->SetSegStored(); + ret = m_reader->parseInitializationSegment(initSeg.get(), initSegID); + EXPECT_TRUE(ret == ERROR_NONE); - initSegID++; - } - - for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) - { - OmafExtractor *extractor = (OmafExtractor*)(itAS->second); - EXPECT_TRUE(extractor != NULL); + initSegID++; + } - ret = extractor->LoadLocalInitSegment(); - EXPECT_TRUE(ret == ERROR_NONE); + for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) { + OmafExtractor *extractor = (OmafExtractor *)(itAS->second); + EXPECT_TRUE(extractor != NULL); - OmafSegment *initSeg = extractor->GetInitSegment(); - EXPECT_TRUE(initSeg != NULL); + ret = extractor->LoadLocalInitSegment(); + EXPECT_TRUE(ret == ERROR_NONE); - memset(storedFileName, 0, 1024); - std::string repId = extractor->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); - cacheFileName = storedFileName; + OmafSegment::Ptr initSeg = extractor->GetInitSegment(); + EXPECT_TRUE(initSeg != NULL); - initSeg->SetSegmentCacheFile(cacheFileName); - initSeg->SetSegStored(); - ret = m_reader->parseInitializationSegment(initSeg, initSegID); - EXPECT_TRUE(ret == ERROR_NONE); + memset(storedFileName, 0, 1024); + std::string repId = extractor->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); - initSeg->SetInitSegID(initSegID); - initSeg->SetSegID(initSegID); + cacheFileName = storedFileName; - mapSegCnt[initSegID] = 0; + initSeg->SetSegmentCacheFile(cacheFileName); + initSeg->SetSegStored(); + ret = m_reader->parseInitializationSegment(initSeg.get(), initSegID); + EXPECT_TRUE(ret == ERROR_NONE); - initSegID++; - } + initSegID++; } + } - EXPECT_TRUE(initSegID == 18); - - m_reader->getTrackInformations(trackInfos); - -/* - uint16_t idx = 0; - for ( auto it = trackInfos.begin(); it != trackInfos.end(); it++) - { - TrackInformation trackInfo = *it; - idx++; - - //EXPECT_TRUE(trackInfo.trackId == idx); - //EXPECT_TRUE(trackInfo.initSegId == (idx - 1)); - //EXPECT_TRUE(trackInfo.alternateGroupID == - //printf("trackInfo.alternateGroupID %d \n", trackInfo.alternateGroupId); - //printf("trackInfo.features %d \n", trackInfo.features); - //printf("trackInfo.vrFeatures %d \n", trackInfo.vrFeatures); - //printf("trackInfo.maxSampleSize %d \n", trackInfo.maxSampleSize); - //printf("trackInfo.timeScale %d \n", trackInfo.timeScale); - //printf("trackInfo.hasTypeInformation %d \n", trackInfo.hasTypeInformation); - //EXPECT_TRUE(trackInfo.frameRate.den == 1); - //EXPECT_TRUE(trackInfo.frameRate.num == 25); - - //EXPECT_TRUE(trackInfo.trackURI.size() != 0); - for (auto it1 = trackInfo.trackURI.begin(); - it1 != trackInfo.trackURI.end(); it1++) - { - //printf("The character in trackURI is %c \n", (*it1)); - } - - //EXPECT_TRUE(trackInfo.alternateTrackIds.size() != 0); - for (auto it1 = trackInfo.alternateTrackIds.begin(); - it1 != trackInfo.alternateTrackIds.end(); it1++) - { - //printf("The value in alternateTrackIds is %d \n", (*it1)); - } + EXPECT_TRUE(initSegID == 18); - //EXPECT_TRUE(trackInfo.referenceTrackIds.size() != 0); - for (auto it1 = trackInfo.referenceTrackIds.begin(); - it1 != trackInfo.referenceTrackIds.end(); it1++) - { - TypeToTrackIDs referenceTrackId = *it1; - //printf("referenceTrackId.type is %s \n", referenceTrackId.type); - //EXPECT_TRUE(referenceTrackId.trackIds.size() != 0); - for (auto it2 = referenceTrackId.trackIds.begin(); - it2 != referenceTrackId.trackIds.end(); it2++) - { - //printf("reference track id is %d \n", (*it2)); - } - } + try { + // 1. get the track information - //EXPECT_TRUE(trackInfo.trackGroupIds.size() != 0); - for (auto it1 = trackInfo.trackGroupIds.begin(); - it1 != trackInfo.trackGroupIds.end(); it1++) - { - TypeToTrackIDs trackGroupId = *it1; - //printf("trackGroupId.type is %s \n", trackGroupId.type); - //EXPECT_TRUE(trackGroupId.trackIds.size() != 0); - for (auto it2 = trackGroupId.trackIds.begin(); - it2 != trackGroupId.trackIds.end(); it2++) - { - //printf("track id in trackGroupIds is %d \n", (*it2)); - } - } - - //EXPECT_TRUE(trackInfo.sampleProperties.size() != 0); - for (auto it1 = trackInfo.sampleProperties.begin(); - it1 != trackInfo.sampleProperties.end(); it1++) - { - SampleInformation info = *it1; - //printf("sample info earliestTimestamp %ld \n", info.earliestTimestamp); - //printf("sample info earliestTimestampTS %ld \n", info.earliestTimestampTS); - //printf("sample info sampleDescriptionIndex %d \n", info.sampleDescriptionIndex); - //printf("sample info initSegmentId %d \n", info.initSegmentId); - //printf("sample info sampleDurationTS %ld \n", info.sampleDurationTS); - //printf("sample info sampleEntryType %s \n", info.sampleEntryType); - //printf("sample info sampleId %d \n", info.sampleId); - //printf("sample info segmentId %d \n", info.segmentId); - //printf("sample info sampleType %d \n", info.sampleType); - //printf("sample info sampleFlags flagsAsUInt %d \n", info.sampleFlags.flagsAsUInt); - //printf("sample info sampleFlags flags is_leading %d \n", info.sampleFlags.flags.is_leading); - //printf("sample info sampleFlags flags reserved %d \n", info.sampleFlags.flags.reserved); - //printf("sample info sampleFlags flags sample_degradation_priority %d \n", info.sampleFlags.flags.sample_degradation_priority); - //printf("sample info sampleFlags flags sample_has_redundancy %d \n", info.sampleFlags.flags.sample_has_redundancy); - //printf("sample info sampleFlags flags sample_depends_on %d \n", info.sampleFlags.flags.sample_depends_on); - //printf("sample info sampleFlags flags sample_is_non_sync_sample %d \n", info.sampleFlags.flags.sample_is_non_sync_sample); - //printf("sample info sampleFlags flags sample_padding_value %d \n", info.sampleFlags.flags.sample_padding_value); - } + m_reader->getTrackInformations(trackInfos); + // 2. go through the track information + for (auto track : trackInfos) { + if (track == nullptr) { + OMAF_LOG(LOG_ERROR, "Meet empty track!\n"); + continue; + } + + // FIXME there would has bug, if more than one stream. + // or we need update the logic for more than one stream + for (auto &pStream : m_listStream) { + // 2.1.1 check the adaptation set + std::map pMediaAS = pStream->GetMediaAdaptationSet(); + for (auto as : pMediaAS) { + OmafAdaptationSet *pAS = (OmafAdaptationSet *)as.second; + // FIXME GetInitSegID or GetSegID + if (pAS->GetInitSegment()->GetInitSegID() == track->initSegmentId) { + auto dash_track_id = buildDashTrackId(track->trackId); + pAS->SetTrackNumber(static_cast(dash_track_id)); + pAS->GetInitSegment()->SetTrackId(dash_track_id); + + OMAF_LOG(LOG_INFO, "Initse id=%u, trackid=%u\n", track->initSegmentId, dash_track_id ); + break; + } + } // end for adaptation set loop + + // 2.1.3 check the extractors + std::map pExtratorAS = pStream->GetExtractors(); + for (auto &extractor : pExtratorAS) { + OmafExtractor *pExAS = extractor.second; + // FIXME GetInitSegID or GetSegID + if (pExAS->GetInitSegment()->GetInitSegID() == track->initSegmentId) { + auto dash_track_id = buildDashTrackId(track->trackId); + pExAS->SetTrackNumber(static_cast(dash_track_id)); + pExAS->GetInitSegment()->SetTrackId(dash_track_id); + + OMAF_LOG(LOG_INFO, "Initse id=%u, trackid=%u\n", track->initSegmentId, dash_track_id); + break; + } + } // end for extractors loop + } // end stream loop + } // end for track loop + + } catch (const std::exception &ex) { + OMAF_LOG(LOG_ERROR, "Failed to parse the init segment, ex: %s\n", ex.what()); + } + + /* + uint16_t idx = 0; + for ( auto it = trackInfos.begin(); it != trackInfos.end(); it++) + { + TrackInformation trackInfo = *it; + idx++; + + //EXPECT_TRUE(trackInfo.trackId == idx); + //EXPECT_TRUE(trackInfo.initSegId == (idx - 1)); + //EXPECT_TRUE(trackInfo.alternateGroupID == + //printf("trackInfo.alternateGroupID %d \n", trackInfo.alternateGroupId); + //printf("trackInfo.features %d \n", trackInfo.features); + //printf("trackInfo.vrFeatures %d \n", trackInfo.vrFeatures); + //printf("trackInfo.maxSampleSize %d \n", trackInfo.maxSampleSize); + //printf("trackInfo.timeScale %d \n", trackInfo.timeScale); + //printf("trackInfo.hasTypeInformation %d \n", trackInfo.hasTypeInformation); + //EXPECT_TRUE(trackInfo.frameRate.den == 1); + //EXPECT_TRUE(trackInfo.frameRate.num == 25); + + //EXPECT_TRUE(trackInfo.trackURI.size() != 0); + for (auto it1 = trackInfo.trackURI.begin(); + it1 != trackInfo.trackURI.end(); it1++) + { + //printf("The character in trackURI is %c \n", (*it1)); + } + + //EXPECT_TRUE(trackInfo.alternateTrackIds.size() != 0); + for (auto it1 = trackInfo.alternateTrackIds.begin(); + it1 != trackInfo.alternateTrackIds.end(); it1++) + { + //printf("The value in alternateTrackIds is %d \n", (*it1)); + } + + //EXPECT_TRUE(trackInfo.referenceTrackIds.size() != 0); + for (auto it1 = trackInfo.referenceTrackIds.begin(); + it1 != trackInfo.referenceTrackIds.end(); it1++) + { + TypeToTrackIDs referenceTrackId = *it1; + //printf("referenceTrackId.type is %s \n", referenceTrackId.type); + //EXPECT_TRUE(referenceTrackId.trackIds.size() != 0); + for (auto it2 = referenceTrackId.trackIds.begin(); + it2 != referenceTrackId.trackIds.end(); it2++) + { + //printf("reference track id is %d \n", (*it2)); + } + } + + //EXPECT_TRUE(trackInfo.trackGroupIds.size() != 0); + for (auto it1 = trackInfo.trackGroupIds.begin(); + it1 != trackInfo.trackGroupIds.end(); it1++) + { + TypeToTrackIDs trackGroupId = *it1; + //printf("trackGroupId.type is %s \n", trackGroupId.type); + //EXPECT_TRUE(trackGroupId.trackIds.size() != 0); + for (auto it2 = trackGroupId.trackIds.begin(); + it2 != trackGroupId.trackIds.end(); it2++) + { + //printf("track id in trackGroupIds is %d \n", (*it2)); + } + } + + //EXPECT_TRUE(trackInfo.sampleProperties.size() != 0); + for (auto it1 = trackInfo.sampleProperties.begin(); + it1 != trackInfo.sampleProperties.end(); it1++) + { + SampleInformation info = *it1; + //printf("sample info earliestTimestamp %ld \n", info.earliestTimestamp); + //printf("sample info earliestTimestampTS %ld \n", info.earliestTimestampTS); + //printf("sample info sampleDescriptionIndex %d \n", info.sampleDescriptionIndex); + //printf("sample info initSegmentId %d \n", info.initSegmentId); + //printf("sample info sampleDurationTS %ld \n", info.sampleDurationTS); + //printf("sample info sampleEntryType %s \n", info.sampleEntryType); + //printf("sample info sampleId %d \n", info.sampleId); + //printf("sample info segmentId %d \n", info.segmentId); + //printf("sample info sampleType %d \n", info.sampleType); + //printf("sample info sampleFlags flagsAsUInt %d \n", info.sampleFlags.flagsAsUInt); + //printf("sample info sampleFlags flags is_leading %d \n", info.sampleFlags.flags.is_leading); + //printf("sample info sampleFlags flags reserved %d \n", info.sampleFlags.flags.reserved); + //printf("sample info sampleFlags flags sample_degradation_priority %d \n", + info.sampleFlags.flags.sample_degradation_priority); + //printf("sample info sampleFlags flags sample_has_redundancy %d \n", + info.sampleFlags.flags.sample_has_redundancy); + //printf("sample info sampleFlags flags sample_depends_on %d \n", + info.sampleFlags.flags.sample_depends_on); + //printf("sample info sampleFlags flags sample_is_non_sync_sample %d \n", + info.sampleFlags.flags.sample_is_non_sync_sample); + //printf("sample info sampleFlags flags sample_padding_value %d \n", + info.sampleFlags.flags.sample_padding_value); + } + } + */ + std::list cached_segments; // dirty implmentation + + for (auto it = m_listStream.begin(); it != m_listStream.end(); it++) { + OmafMediaStream *stream = (OmafMediaStream *)(*it); + EXPECT_TRUE(stream != NULL); + + std::map normalAS = stream->GetMediaAdaptationSet(); + std::map extractorAS = stream->GetExtractors(); + + for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) { + OmafAdaptationSet *pAS = (OmafAdaptationSet *)(itAS->second); + EXPECT_TRUE(pAS != NULL); + + pAS->Enable(true); + + ret = pAS->LoadLocalSegment(); + EXPECT_TRUE(ret == ERROR_NONE); + + OmafSegment::Ptr newSeg = pAS->GetLocalNextSegment(); + EXPECT_TRUE(newSeg != NULL); + + memset(storedFileName, 0, 1024); + std::string repId = pAS->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.1.mp4", repId.c_str()); + cacheFileName = storedFileName; + OMAF_LOG(LOG_INFO, "segment file=%s\n", cacheFileName.c_str()); + OMAF_LOG(LOG_INFO, "init seg=%u\n", newSeg->GetInitSegID()); + OMAF_LOG(LOG_INFO, "segid=%d\n", newSeg->GetSegID()); + newSeg->SetSegmentCacheFile(cacheFileName); + newSeg->SetSegStored(); + + ret = m_reader->parseSegment(newSeg.get(), newSeg->GetInitSegID(), newSeg->GetSegID()); + + EXPECT_TRUE(ret == ERROR_NONE); + cached_segments.push_back(std::move(newSeg)); + // ret = m_reader->getTrackInformations(trackInfos); + // EXPECT_TRUE(ret == ERROR_NONE); } -*/ - for (auto it = m_listStream.begin(); it != m_listStream.end(); it++) - { - OmafMediaStream *stream = (OmafMediaStream*)(*it); - EXPECT_TRUE(stream != NULL); - std::map normalAS = stream->GetMediaAdaptationSet(); - std::map extractorAS = stream->GetExtractors(); + for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) { + OmafExtractor *extractor = (OmafExtractor *)(itAS->second); + EXPECT_TRUE(extractor != NULL); - for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) - { - OmafAdaptationSet *pAS = (OmafAdaptationSet*)(itAS->second); - EXPECT_TRUE(pAS != NULL); + extractor->Enable(true); - pAS->Enable(true); + ret = extractor->LoadLocalSegment(); + EXPECT_TRUE(ret == ERROR_NONE); - ret = pAS->LoadLocalSegment(); - EXPECT_TRUE(ret == ERROR_NONE); + OmafSegment::Ptr newSeg = extractor->GetLocalNextSegment(); + EXPECT_TRUE(newSeg != NULL); - OmafSegment *newSeg = pAS->GetLocalNextSegment(); - EXPECT_TRUE(newSeg != NULL); + memset(storedFileName, 0, 1024); + std::string repId = extractor->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.1.mp4", repId.c_str()); - OmafSegment *initSeg = pAS->GetInitSegment(); - EXPECT_TRUE(initSeg != NULL); + cacheFileName = storedFileName; + OMAF_LOG(LOG_INFO, "segment file=%s\n", cacheFileName.c_str()); + OMAF_LOG(LOG_INFO, "init seg=%u\n", newSeg->GetInitSegID()); + OMAF_LOG(LOG_INFO, "segid=%d\n", newSeg->GetSegID()); + newSeg->SetSegmentCacheFile(cacheFileName); + newSeg->SetSegStored(); - memset(storedFileName, 0, 1024); - std::string repId = pAS->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.1.mp4", repId.c_str()); - cacheFileName = storedFileName; - - newSeg->SetSegmentCacheFile(cacheFileName); - newSeg->SetSegStored(); - uint32_t initSegID = initSeg->GetInitSegID(); - uint32_t segID = ++(mapSegCnt[initSegID]); - ret = m_reader->parseSegment(newSeg, initSegID, segID); - EXPECT_TRUE(ret == ERROR_NONE); - - ret = m_reader->getTrackInformations(trackInfos); - EXPECT_TRUE(ret == ERROR_NONE); + ret = m_reader->parseSegment(newSeg.get(), newSeg->GetInitSegID(), newSeg->GetSegID()); + EXPECT_TRUE(ret == ERROR_NONE); + cached_segments.push_back(std::move(newSeg)); + // only parse one extractor + break; + } + } + + ret = m_reader->getTrackInformations(trackInfos); + EXPECT_TRUE(ret == ERROR_NONE); + + FILE *fp = NULL; + char fileName[256]; + memset(fileName, 0, 256); + + uint8_t vps[256] = {0}; + uint8_t sps[256] = {0}; + uint8_t pps[256] = {0}; + uint8_t vpsLen = 0; + uint8_t spsLen = 0; + uint8_t ppsLen = 0; + for (auto it = m_listStream.begin(); it != m_listStream.end(); it++) { + OmafMediaStream *stream = (OmafMediaStream *)(*it); + std::map extractorAS = stream->GetExtractors(); + for (auto &it : stream->GetExtractors()) { + OmafExtractor *extractor = it.second; + // for (uint32_t initSegIndex = 10; initSegIndex < 18; initSegIndex++) { + // uint32_t correspondTrackIdx = initSegIndex + 990; + // uint32_t trackIdx = (initSegIndex << 16) | correspondTrackIdx; + + auto trackId = extractor->GetTrackNumber(); + auto initsegid = extractor->GetInitSegment()->GetInitSegID(); + auto trackIdx = buildReaderTrackId(trackId, initsegid); + OMAF_LOG(LOG_INFO, "The trackid=%d\n", trackId); + OMAF_LOG(LOG_INFO, "init segid=%d\n",initsegid); + OMAF_LOG(LOG_INFO, "reader trackid=%d\n", trackIdx); + + snprintf(fileName, 256, "Viewport%d.h265", initsegid - 999); + fp = fopen(fileName, "wb+"); + EXPECT_TRUE(fp != NULL); + if (!fp) continue; + + auto trackInf = findTrackInformation(m_reader, trackId); + size_t begin = 0; + size_t end = 0; + auto segId = 1; + if (findSampleIndexRange(trackInf, segId, begin, end)) { + OMAF_LOG(LOG_INFO, "The begin index=%lld, end=%lld\n", begin, end); + } + + uint32_t sampleIdx = begin; + for (; sampleIdx < end; sampleIdx++) { + uint32_t packetSize = ((7680 * 3840 * 3) / 2) / 2; + MediaPacket *packet = new MediaPacket(); + if (NULL == packet) { + break; } + packet->ReAllocatePacket(packetSize); - for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) - { - OmafExtractor *extractor = (OmafExtractor*)(itAS->second); - EXPECT_TRUE(extractor != NULL); - - extractor->Enable(true); - - ret = extractor->LoadLocalSegment(); - EXPECT_TRUE(ret == ERROR_NONE); - - OmafSegment *newSeg = extractor->GetLocalNextSegment(); - EXPECT_TRUE(newSeg != NULL); - - OmafSegment *initSeg = extractor->GetInitSegment(); - EXPECT_TRUE(initSeg != NULL); - - memset(storedFileName, 0, 1024); - std::string repId = extractor->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.1.mp4", repId.c_str()); - cacheFileName = storedFileName; - - newSeg->SetSegmentCacheFile(cacheFileName); - newSeg->SetSegStored(); - uint32_t initSegID = initSeg->GetInitSegID(); - uint32_t segID = ++(mapSegCnt[initSegID]); - ret = m_reader->parseSegment(newSeg, initSegID, segID); - EXPECT_TRUE(ret == ERROR_NONE); - - ret = m_reader->getTrackInformations(trackInfos); - EXPECT_TRUE(ret == ERROR_NONE); + if (!packet->Payload()) { + delete packet; + break; } - } - FILE *fp = NULL; - char fileName[256]; - memset(fileName, 0, 256); - - uint8_t vps[256] = { 0 }; - uint8_t sps[256] = { 0 }; - uint8_t pps[256] = { 0 }; - uint8_t vpsLen = 0; - uint8_t spsLen = 0; - uint8_t ppsLen = 0; - for (uint32_t initSegIndex = 10; initSegIndex < 18; initSegIndex++) - { - uint32_t correspondTrackIdx = initSegIndex + 990; - uint32_t trackIdx = ( initSegIndex << 16 ) | correspondTrackIdx; - - snprintf(fileName, 256, "Viewport%d.h265", correspondTrackIdx - 999); - fp = fopen(fileName, "wb+"); - EXPECT_TRUE(fp != NULL); - if(!fp) continue; - - uint32_t sampleIdx = 0; - for ( ; sampleIdx < 25; sampleIdx++) - { - uint32_t packetSize = ((7680 * 3840 * 3) / 2) / 2; - MediaPacket *packet = new MediaPacket(); - if (NULL == packet) - { - break; + ret = m_reader->getExtractorTrackSampleData(trackIdx, sampleIdx, (char *)(packet->Payload()), packetSize); + OMAF_LOG(LOG_INFO, "Extractor track sample data, ret=%d\n", ret); + EXPECT_TRUE(ret == ERROR_NONE); + + if (sampleIdx == 0) { + std::vector parameterSets; + ret = m_reader->getDecoderConfiguration(trackIdx, sampleIdx, parameterSets); + EXPECT_TRUE(ret == ERROR_NONE); + + for (auto const ¶meter : parameterSets) { + if (parameter.codecSpecInfoType == VCD::MP4::HEVC_VPS) { + vpsLen = parameter.codecSpecInfoBits.size; + for (uint32_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + vps[i] = parameter.codecSpecInfoBits[i]; + } } - packet->ReAllocatePacket(packetSize); - if(!packet->Payload()) - { - delete packet; - break; + if (parameter.codecSpecInfoType == VCD::MP4::HEVC_SPS) { + spsLen = parameter.codecSpecInfoBits.size; + for (uint32_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + sps[i] = parameter.codecSpecInfoBits[i]; + } } - ret = m_reader->getExtractorTrackSampleData(trackIdx, sampleIdx, (char*)(packet->Payload()), packetSize); - EXPECT_TRUE(ret == ERROR_NONE); - - if (sampleIdx == 0) - { - std::vector parameterSets; - ret = m_reader->getDecoderConfiguration(trackIdx, sampleIdx, parameterSets); - EXPECT_TRUE(ret == ERROR_NONE); - - for (auto const& parameter : parameterSets) - { - if (parameter.decodeSpecInfoType == VCD::OMAF::HEVC_VPS) - { - vpsLen = parameter.decodeSpecInfoData.size(); - for (uint32_t i = 0; i < parameter.decodeSpecInfoData.size(); i++) - { - vps[i] = parameter.decodeSpecInfoData[i]; - } - } - - if (parameter.decodeSpecInfoType == VCD::OMAF::HEVC_SPS) - { - spsLen = parameter.decodeSpecInfoData.size(); - for (uint32_t i = 0; i < parameter.decodeSpecInfoData.size(); i++) - { - sps[i] = parameter.decodeSpecInfoData[i]; - } - } - - if (parameter.decodeSpecInfoType == VCD::OMAF::HEVC_PPS) - { - ppsLen = parameter.decodeSpecInfoData.size(); - for (uint32_t i = 0; i < parameter.decodeSpecInfoData.size(); i++) - { - pps[i] = parameter.decodeSpecInfoData[i]; - } - } - } - - fwrite(vps, 1, vpsLen, fp); - fwrite(sps, 1, spsLen, fp); - fwrite(pps, 1, ppsLen, fp); + if (parameter.codecSpecInfoType == VCD::MP4::HEVC_PPS) { + ppsLen = parameter.codecSpecInfoBits.size; + for (uint32_t i = 0; i < parameter.codecSpecInfoBits.size; i++) { + pps[i] = parameter.codecSpecInfoBits[i]; + } } + } - fwrite((uint8_t*)(packet->Payload()), 1, packetSize, fp); - - delete packet; - packet = NULL; + fwrite(vps, 1, vpsLen, fp); + fwrite(sps, 1, spsLen, fp); + fwrite(pps, 1, ppsLen, fp); } - fclose(fp); - fp = NULL; + fwrite((uint8_t *)(packet->Payload()), 1, packetSize, fp); + + delete packet; + packet = NULL; + } + // } + break; } -} -} + + fclose(fp); + fp = NULL; + } +} // namespace +} // namespace diff --git a/src/OmafDashAccess/test/testOmafReaderManager.cpp b/src/OmafDashAccess/test/testOmafReaderManager.cpp index ab39e034..f7eaf8ee 100644 --- a/src/OmafDashAccess/test/testOmafReaderManager.cpp +++ b/src/OmafDashAccess/test/testOmafReaderManager.cpp @@ -31,224 +31,284 @@ //! Created on July 17, 2019, 6:04 AM //! -#include "gtest/gtest.h" +#include "../OmafDashSource.h" #include "../OmafReader.h" #include "../OmafReaderManager.h" +#include "gtest/gtest.h" VCD_USE_VROMAF; VCD_USE_VRVIDEO; namespace { -class OmafReaderManagerTest : public testing::Test -{ -public: - virtual void SetUp() - { - m_clientInfo = new HeadSetInfo; - m_clientInfo->input_geoType = 0; - m_clientInfo->output_geoType = E_SVIDEO_VIEWPORT; - m_clientInfo->pose = new HeadPose; - m_clientInfo->pose->yaw = -90; - m_clientInfo->pose->pitch = 0; - m_clientInfo->viewPort_hFOV = 80; - m_clientInfo->viewPort_vFOV = 90; - m_clientInfo->viewPort_Width = 1024; - m_clientInfo->viewPort_Height = 1024; - - m_pose = new HeadPose; - m_pose->yaw = 90; - m_pose->pitch = 0; - - m_source = new OmafDashSource(); - if (!m_source) - return; - - int ret = m_source->SetupHeadSetInfo(m_clientInfo); - if (ret) - return; - - std::string mpdUrl = "./segs_for_readertest/Test.mpd"; - - ret = m_source->OpenMedia(mpdUrl, "./cache", false); - if (ret) - { - printf("Failed to open media \n"); - return; - } - READERMANAGER::GetInstance()->StartThread(); +class OmafReaderManagerTest : public testing::Test { + public: + virtual void SetUp() { + m_clientInfo = new HeadSetInfo; + // m_clientInfo->input_geoType = 0; + // m_clientInfo->output_geoType = E_SVIDEO_VIEWPORT; + m_clientInfo->pose = new HeadPose; + m_clientInfo->pose->yaw = -90; + m_clientInfo->pose->pitch = 0; + m_clientInfo->viewPort_hFOV = 80; + m_clientInfo->viewPort_vFOV = 90; + m_clientInfo->viewPort_Width = 1024; + m_clientInfo->viewPort_Height = 1024; + + m_pose = new HeadPose; + m_pose->yaw = 90; + m_pose->pitch = 0; + + m_source = new OmafDashSource(); + if (!m_source) return; + + int ret = m_source->SetupHeadSetInfo(m_clientInfo); + if (ret) return; + + std::string mpdUrl = "./segs_for_readertest/Test.mpd"; + + PluginDef i360ScvpPlugin; + i360ScvpPlugin.pluginLibPath = NULL; + ret = m_source->OpenMedia(mpdUrl, "./cache", NULL, i360ScvpPlugin, true, false); + if (ret) { + printf("Failed to open media \n"); + return; } - virtual void TearDown() - { - delete (m_clientInfo->pose); - m_clientInfo->pose = NULL; + m_source->StartStreaming(); + OmafReaderManager::OmafReaderParams params; + params.duration_ = 1000; + params.mode_ = OmafDashMode::EXTRACTOR; + params.stream_type_ = DASH_STREAM_STATIC; + + m_readerMgr = std::make_shared(nullptr, params); + ret = m_readerMgr->Initialize(m_source); + EXPECT_TRUE(ret == ERROR_NONE); + } + + virtual void TearDown() { + delete (m_clientInfo->pose); + m_clientInfo->pose = NULL; + + delete m_clientInfo; + m_clientInfo = NULL; + + delete m_pose; + m_pose = NULL; + + m_source->CloseMedia(); + SAFE_DELETE(m_source); + } + + HeadSetInfo *m_clientInfo; + OmafMediaSource *m_source; + HeadPose *m_pose; + OmafReaderManager::Ptr m_readerMgr; +}; - delete m_clientInfo; - m_clientInfo = NULL; +TEST_F(OmafReaderManagerTest, ReaderTrackSegments) { + int ret = ERROR_NONE; + char storedFileName[1024]; + std::string cacheFileName; - delete m_pose; - m_pose = NULL; + EXPECT_TRUE(m_source->GetStreamCount() == 1); + OmafMediaStream *stream = m_source->GetStream(0); + EXPECT_TRUE(stream != NULL); - m_source->CloseMedia(); - SAFE_DELETE(m_source); - } + char genFileName[1024]; + memset(genFileName, 0, 1024); + uint32_t extractorTrackID = 1000; + for (; extractorTrackID < 1001; extractorTrackID++) { + memset(genFileName, 0, 1024); + snprintf(genFileName, 1024, "Viewport_Extractor%d.h265", (extractorTrackID - 999)); + FILE *fpGen = fopen(genFileName, "wb+"); + if (!fpGen) continue; - HeadSetInfo *m_clientInfo; - OmafMediaSource *m_source; - HeadPose *m_pose; -}; + ret = m_source->SelectSpecialSegments(extractorTrackID); + EXPECT_TRUE(ret == ERROR_NONE); -TEST_F(OmafReaderManagerTest, ReaderTrackSegments) -{ - int ret = ERROR_NONE; - char storedFileName[1024]; - std::string cacheFileName; + std::map normalAS = stream->GetMediaAdaptationSet(); + std::map extractorAS = stream->GetExtractors(); - EXPECT_TRUE(m_source->GetStreamCount() == 1); - OmafMediaStream *stream = m_source->GetStream(0); - EXPECT_TRUE(stream != NULL); + for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) { + OmafAdaptationSet *pAS = (OmafAdaptationSet *)(itAS->second); + EXPECT_TRUE(pAS != NULL); - char genFileName[1024]; - memset(genFileName, 0, 1024); - uint32_t extractorTrackID = 1000; - for ( ; extractorTrackID < 1001; extractorTrackID++) - { - memset(genFileName, 0, 1024); - snprintf(genFileName, 1024, "Viewport_Extractor%d.h265", (extractorTrackID - 999)); - FILE *fpGen = fopen(genFileName, "wb+"); - if(!fpGen) continue; - - ret = m_source->SelectSpecialSegments(extractorTrackID); - EXPECT_TRUE(ret == ERROR_NONE); + memset(storedFileName, 0, 1024); + std::string repId = pAS->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); + cacheFileName = storedFileName; - std::map normalAS = stream->GetMediaAdaptationSet(); - std::map extractorAS = stream->GetExtractors(); - - for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) - { - OmafAdaptationSet *pAS = (OmafAdaptationSet*)(itAS->second); - EXPECT_TRUE(pAS != NULL); + FILE *fp = fopen(storedFileName, "rb"); + if (!fp) { + fclose(fpGen); + fpGen = NULL; + return; + } + fseek(fp, 0L, SEEK_END); + uint64_t segSize = ftell(fp); + fseek(fp, 0L, SEEK_SET); + fclose(fp); + fp = NULL; + + ret = pAS->LoadAssignedInitSegment(cacheFileName); + EXPECT_TRUE(ret == ERROR_NONE); + + OmafSegment::Ptr initSeg = pAS->GetInitSegment(); + EXPECT_TRUE(initSeg != NULL); + + initSeg->SetSegSize(segSize); + OMAF_LOG(LOG_INFO, "To open local OpenLocalInitSegment--1!\n"); + ret = m_readerMgr->OpenLocalInitSegment(initSeg); + EXPECT_TRUE(ret == ERROR_NONE); + } - memset(storedFileName, 0, 1024); - std::string repId = pAS->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); - cacheFileName = storedFileName; + for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) { + OmafExtractor *extractor = (OmafExtractor *)(itAS->second); + EXPECT_TRUE(extractor != NULL); - ret = pAS->LoadAssignedInitSegment(cacheFileName); - EXPECT_TRUE(ret == ERROR_NONE); + memset(storedFileName, 0, 1024); + std::string repId = extractor->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); + cacheFileName = storedFileName; - OmafSegment *initSeg = pAS->GetInitSegment(); - EXPECT_TRUE(initSeg != NULL); + FILE *fp = fopen(storedFileName, "rb"); + if (!fp) { + fclose(fpGen); + fpGen = NULL; + return; + } + fseek(fp, 0L, SEEK_END); + uint64_t segSize = ftell(fp); + fseek(fp, 0L, SEEK_SET); + fclose(fp); + fp = NULL; - uint32_t initSegID = 0; - ret = READERMANAGER::GetInstance()->AddInitSegment(initSeg, initSegID); - EXPECT_TRUE(ret == ERROR_NONE); - } + ret = extractor->LoadAssignedInitSegment(cacheFileName); + EXPECT_TRUE(ret == ERROR_NONE); - for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) - { - OmafExtractor *extractor = (OmafExtractor*)(itAS->second); - EXPECT_TRUE(extractor != NULL); + OmafSegment::Ptr initSeg = extractor->GetInitSegment(); + EXPECT_TRUE(initSeg != NULL); - memset(storedFileName, 0, 1024); - std::string repId = extractor->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.init.mp4", repId.c_str()); - cacheFileName = storedFileName; + initSeg->SetSegSize(segSize); - ret = extractor->LoadAssignedInitSegment(cacheFileName); - EXPECT_TRUE(ret == ERROR_NONE); + OMAF_LOG(LOG_INFO, "To open local OpenLocalInitSegment--2!\n"); + ret = m_readerMgr->OpenLocalInitSegment(initSeg); + EXPECT_TRUE(ret == ERROR_NONE); + } - OmafSegment *initSeg = extractor->GetInitSegment(); - EXPECT_TRUE(initSeg != NULL); + usleep(1000000); + for (uint8_t segID = 1; segID < 5; segID++) { + for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) { + OmafAdaptationSet *pAS = (OmafAdaptationSet *)(itAS->second); + EXPECT_TRUE(pAS != NULL); - uint32_t initSegID = 0; - ret = READERMANAGER::GetInstance()->AddInitSegment(initSeg, initSegID); - EXPECT_TRUE(ret == ERROR_NONE); + if (extractorTrackID == 1000 || extractorTrackID == 1004) { + if ((itAS->first == 4) || (itAS->first == 8)) { + continue; + } } - usleep(1000000); - for (uint8_t segID = 1; segID < 5; segID++) - { - for (auto itAS = normalAS.begin(); itAS != normalAS.end(); itAS++) - { - OmafAdaptationSet *pAS = (OmafAdaptationSet*)(itAS->second); - EXPECT_TRUE(pAS != NULL); - - if (extractorTrackID == 1000 || extractorTrackID == 1004) - { - if ((itAS->first == 4) || (itAS->first == 8)) - { - continue; - } - } - - pAS->Enable(true); - - memset(storedFileName, 0, 1024); - std::string repId = pAS->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.%d.mp4", repId.c_str(), segID); - cacheFileName = storedFileName; - - OmafSegment *newSeg = pAS->LoadAssignedSegment(cacheFileName); - EXPECT_TRUE(newSeg != NULL); - if(!newSeg) break; - - uint32_t newSegID = 0; - ret = READERMANAGER::GetInstance()->AddSegment(newSeg, newSeg->GetInitSegID(), newSegID); - EXPECT_TRUE(ret == ERROR_NONE); - } - - for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) - { - OmafExtractor *extractor = (OmafExtractor*)(itAS->second); - EXPECT_TRUE(extractor != NULL); - - if ((itAS->first) != extractorTrackID) - { - continue; - } - - extractor->Enable(true); - - memset(storedFileName, 0, 1024); - std::string repId = extractor->GetRepresentationId(); - snprintf(storedFileName, 1024, "./segs_for_readertest/%s.%d.mp4", repId.c_str(), segID); - cacheFileName = storedFileName; - - OmafSegment *newSeg = extractor->LoadAssignedSegment(cacheFileName); - EXPECT_TRUE(newSeg != NULL); - if(!newSeg) break; - - uint32_t newSegID = 0; - ret = READERMANAGER::GetInstance()->AddSegment(newSeg, newSeg->GetInitSegID(), newSegID); - EXPECT_TRUE(ret == ERROR_NONE); - } + pAS->Enable(true); + + memset(storedFileName, 0, 1024); + std::string repId = pAS->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.%d.mp4", repId.c_str(), segID); + cacheFileName = storedFileName; + + FILE *fp = fopen(storedFileName, "rb"); + if (!fp) { + fclose(fpGen); + fpGen = NULL; + return; } + fseek(fp, 0L, SEEK_END); + uint64_t segSize = ftell(fp); + fseek(fp, 0L, SEEK_SET); + fclose(fp); + fp = NULL; + + OmafSegment::Ptr newSeg = pAS->LoadAssignedSegment(cacheFileName); + EXPECT_TRUE(newSeg != NULL); + if (!newSeg) break; + + newSeg->SetSegSize(segSize); + OMAF_LOG(LOG_INFO, "To open local segment--1!\n"); + ret = m_readerMgr->OpenLocalSegment(newSeg, pAS->IsExtractor()); + EXPECT_TRUE(ret == ERROR_NONE); + } - usleep(10000000); - std::list pkts; - bool clearBuf = false; - m_source->GetPacket(0, &pkts, true, clearBuf); + for (auto itAS = extractorAS.begin(); itAS != extractorAS.end(); itAS++) { + OmafExtractor *extractor = (OmafExtractor *)(itAS->second); + EXPECT_TRUE(extractor != NULL); - for (uint8_t frameIdx = 1; frameIdx < 100; frameIdx++) - { - m_source->GetPacket(0, &pkts, false, clearBuf); + if ((itAS->first) != extractorTrackID) { + continue; } - EXPECT_TRUE(pkts.size() == 100); + extractor->Enable(true); - for (auto itPacket = pkts.begin(); itPacket != pkts.end(); itPacket++) - { - uint32_t size = (*itPacket)->Size(); - char *data = (*itPacket)->Payload(); - if(data) fwrite(data, 1, size, fpGen); + memset(storedFileName, 0, 1024); + std::string repId = extractor->GetRepresentationId(); + snprintf(storedFileName, 1024, "./segs_for_readertest/%s.%d.mp4", repId.c_str(), segID); + cacheFileName = storedFileName; - delete (*itPacket); + FILE *fp = fopen(storedFileName, "rb"); + if (!fp) { + fclose(fpGen); + fpGen = NULL; + return; } + fseek(fp, 0L, SEEK_END); + uint64_t segSize = ftell(fp); + fseek(fp, 0L, SEEK_SET); + fclose(fp); + fp = NULL; + + OmafSegment::Ptr newSeg = extractor->LoadAssignedSegment(cacheFileName); + EXPECT_TRUE(newSeg != NULL); + if (!newSeg) break; + + newSeg->SetSegSize(segSize); + OMAF_LOG(LOG_INFO, "To open local segment--2!\n"); + ret = m_readerMgr->OpenLocalSegment(newSeg, extractor->IsExtractor()); + EXPECT_TRUE(ret == ERROR_NONE); + } + } - pkts.clear(); + usleep(10000000); + std::list pkts; + bool clearBuf = false; - fclose(fpGen); - fpGen = NULL; + MediaPacket *pPacket; + + m_readerMgr->GetNextPacket(1000, pPacket, true); + if (pPacket) { + pkts.push_back(pPacket); } + // m_source->GetPacket(0, &pkts, true, clearBuf); + + for (uint8_t frameIdx = 1; frameIdx < 100; frameIdx++) { + // m_source->GetPacket(0, &pkts, false, clearBuf); + m_readerMgr->GetNextPacket(1000, pPacket, false); + if (pPacket) { + pkts.push_back(pPacket); + } + } + + OMAF_LOG(LOG_INFO, "Packet size=%lld\n", pkts.size()); + EXPECT_TRUE(pkts.size() == 100); + + for (auto itPacket = pkts.begin(); itPacket != pkts.end(); itPacket++) { + uint32_t size = (*itPacket)->Size(); + char *data = (*itPacket)->Payload(); + if (data) fwrite(data, 1, size, fpGen); + + delete (*itPacket); + } + + pkts.clear(); + + fclose(fpGen); + fpGen = NULL; + } } -} +} // namespace diff --git a/src/OmafDashAccess/util_func.cpp b/src/OmafDashAccess/util_func.cpp index 343ade02..43068d48 100644 --- a/src/OmafDashAccess/util_func.cpp +++ b/src/OmafDashAccess/util_func.cpp @@ -77,63 +77,64 @@ uint64_t net_parse_date(const char *val) { uint64_t current_time; char szDay[50], szMonth[50]; - int32_t year, month, day, h, m, s, ms; + int32_t curr_year, curr_month, curr_day, curr_hour, curr_min, curr_sec, ms; int32_t oh, om; - float secs; + float seconds; bool neg_time_zone = false; struct tm t; memset(&t, 0, sizeof(struct tm)); szDay[0] = szMonth[0] = 0; - year = month = day = h = m = s = 0; + curr_year = curr_month = curr_day = curr_hour = curr_min = curr_sec = 0; oh = om = 0; - secs = 0; + seconds = 0; - if (sscanf(val, "%d-%d-%dT%d:%d:%gZ", &year, &month, &day, &h, &m, &secs) == 6) { + if (sscanf(val, "%d-%d-%dT%d:%d:%gZ", &curr_year, &curr_month, &curr_day, &curr_hour, &curr_min, &seconds) == 6) { } - else if (sscanf(val, "%d-%d-%dT%d:%d:%g-%d:%d", &year, &month, &day, &h, &m, &secs, &oh, &om) == 8) { + else if (sscanf(val, "%d-%d-%dT%d:%d:%g-%d:%d", &curr_year, &curr_month, &curr_day, &curr_hour, &curr_min, &seconds, &oh, &om) == 8) { neg_time_zone = true; } - else if (sscanf(val, "%d-%d-%dT%d:%d:%g+%d:%d", &year, &month, &day, &h, &m, &secs, &oh, &om) == 8) { + else if (sscanf(val, "%d-%d-%dT%d:%d:%g+%d:%d", &curr_year, &curr_month, &curr_day, &curr_hour, &curr_min, &seconds, &oh, &om) == 8) { } - else if (sscanf(val, "%3s, %d %3s %d %d:%d:%d", szDay, &day, szMonth, &year, &h, &m, &s)==7) { - secs = (float) s; + else if (sscanf(val, "%3s, %d %3s %d %d:%d:%d", szDay, &curr_day, szMonth, &curr_year, &curr_hour, &curr_min, &curr_sec)==7) { + seconds = (float) curr_sec; } - else if (sscanf(val, "%9s, %d-%3s-%d %02d:%02d:%02d GMT", szDay, &day, szMonth, &year, &h, &m, &s)==7) { - secs = (float) s; + else if (sscanf(val, "%9s, %d-%3s-%d %02d:%02d:%02d GMT", szDay, &curr_day, szMonth, &curr_year, &curr_hour, &curr_min, &curr_sec)==7) { + seconds = (float) curr_sec; } - else if (sscanf(val, "%3s %3s %d %02d:%02d:%02d %d", szDay, szMonth, &day, &year, &h, &m, &s)==7) { - secs = (float) s; + else if (sscanf(val, "%3s %3s %d %02d:%02d:%02d %d", szDay, szMonth, &curr_day, &curr_year, &curr_hour, &curr_min, &curr_sec)==7) { + seconds = (float) curr_sec; } else { - LOG(ERROR) << "[Core] Cannot parse date string" << val; + OMAF_LOG(LOG_ERROR, "[Core] Cannot parse date string %s\n", val); return 0; } - - if (month) { - month -= 1; + if (curr_month <= 12 && curr_month >= 0) { + if (curr_month) { + curr_month -= 1; } else { - if (!strcmp(szMonth, "Jan")) month = 0; - else if (!strcmp(szMonth, "Feb")) month = 1; - else if (!strcmp(szMonth, "Mar")) month = 2; - else if (!strcmp(szMonth, "Apr")) month = 3; - else if (!strcmp(szMonth, "May")) month = 4; - else if (!strcmp(szMonth, "Jun")) month = 5; - else if (!strcmp(szMonth, "Jul")) month = 6; - else if (!strcmp(szMonth, "Aug")) month = 7; - else if (!strcmp(szMonth, "Sep")) month = 8; - else if (!strcmp(szMonth, "Oct")) month = 9; - else if (!strcmp(szMonth, "Nov")) month = 10; - else if (!strcmp(szMonth, "Dec")) month = 11; - } - - t.tm_year = year>1000 ? year-1900 : year; - t.tm_mday = day; - t.tm_hour = h; - t.tm_min = m; - t.tm_sec = (uint32_t) secs; - t.tm_mon = month; + if (!strcmp(szMonth, "Jan")) curr_month = 0; + else if (!strcmp(szMonth, "Feb")) curr_month = 1; + else if (!strcmp(szMonth, "Mar")) curr_month = 2; + else if (!strcmp(szMonth, "Apr")) curr_month = 3; + else if (!strcmp(szMonth, "May")) curr_month = 4; + else if (!strcmp(szMonth, "Jun")) curr_month = 5; + else if (!strcmp(szMonth, "Jul")) curr_month = 6; + else if (!strcmp(szMonth, "Aug")) curr_month = 7; + else if (!strcmp(szMonth, "Sep")) curr_month = 8; + else if (!strcmp(szMonth, "Oct")) curr_month = 9; + else if (!strcmp(szMonth, "Nov")) curr_month = 10; + else if (!strcmp(szMonth, "Dec")) curr_month = 11; + } + } + if (curr_year > INT32_MAX - 1 || curr_year < 0) return 0; + t.tm_year = curr_year>1000 ? curr_year-1900 : curr_year; + t.tm_mday = curr_day; + t.tm_hour = curr_hour; + t.tm_min = curr_min; + t.tm_sec = (uint32_t) seconds; + t.tm_mon = curr_month; if (strlen(szDay) ) { if (!strcmp(szDay, "Mon") || !strcmp(szDay, "Monday")) t.tm_wday = 0; @@ -162,8 +163,26 @@ uint64_t net_parse_date(const char *val) current_time = current_time + diff; } current_time *= 1000; - ms = (uint32_t) ( (secs - (uint32_t) secs) * 1000); - return current_time + ms; + if (current_time > UINT64_MAX - 1) return 0; + if (seconds > 10000000000) return 0; + uint32_t currs = seconds - (uint32_t) seconds; + if (currs >= UINT32_MAX - 1) return 0; + uint32_t currms = currs * 1000; + if (currms < UINT32_MAX - 1) + { + ms = currms; + } + else + { + OMAF_LOG(LOG_ERROR, "invalid ms input!\n"); + return 0; + } + uint64_t ret_time = current_time + ms; + if (ret_time < UINT64_MAX - 1) + return ret_time; + else + return 0; + } uint64_t net_get_utc() @@ -196,7 +215,7 @@ uint64_t net_get_ntp_ts() net_get_ntp(&sec, &frac); res = sec; res<<= 32; - res |= frac; + res |= (uint64_t)frac; return res; } @@ -284,7 +303,7 @@ uint64_t parse_duration(const char * const duration) double s; const char *startT; if (!duration) { - LOG(ERROR) << "[MPD] Error parsing duration: no value indicated\n"; + OMAF_LOG(LOG_ERROR, "[MPD] Error parsing duration: no value indicated\n"); return ERROR_PARSE; } i = 0; @@ -296,18 +315,18 @@ uint64_t parse_duration(const char * const duration) } } if (duration[i] != 'P') { - LOG(ERROR) << "[MPD] Error parsing duration: no value indicated\n"; + OMAF_LOG(LOG_ERROR, "[MPD] Error parsing duration: no value indicated\n"); return ERROR_PARSE; } startT = strchr(duration+1, 'T'); if (duration[i+1] == 0) { - LOG(ERROR) << "[MPD] Error parsing duration: no value indicated\n"; + OMAF_LOG(LOG_ERROR, "[MPD] Error parsing duration: no value indicated\n"); return ERROR_PARSE; } if (! startT) { - LOG(ERROR) << "[MPD] Error parsing duration: no Time section found\n"; + OMAF_LOG(LOG_ERROR, "[MPD] Error parsing duration: no Time section found\n"); return ERROR_PARSE; } @@ -335,8 +354,22 @@ uint64_t parse_duration(const char * const duration) s = atof(sep2); *sep1 = 'S'; } - - return (uint64_t)((h*3600+m*60+s)*(uint64_t)1000); + if (h < 1000 && m < 1000) + { + uint64_t tmp_h = h * 3600 * 1000; + uint64_t tmp_m = m * 60 * 1000; + if (tmp_h > UINT64_MAX - 1 || tmp_m > UINT64_MAX - 1) return 0; + uint64_t tmp_time = tmp_h + tmp_m + s * 1000; + if (tmp_time < UINT64_MAX - 1) { + return tmp_time; + } + } + else + { + OMAF_LOG(LOG_ERROR, "[MPD] Error parsing duration: time overflow\n"); + return ERROR_PARSE; + } + return 0; } uint32_t mpd_parse_duration_u32(const char* const duration) @@ -345,7 +378,7 @@ uint32_t mpd_parse_duration_u32(const char* const duration) if (dur <= UINT_MAX) { return (uint32_t)dur; } else { - LOG(ERROR) << "[MPD] Parsed duration" << dur << "doesn't fit on 32 bits! Setting to the 32 bits max.\n"; + OMAF_LOG(LOG_ERROR, "[MPD] Parsed duration %ld doesn't fit on 32 bits! Setting to the 32 bits max.\n", dur); return UINT_MAX; } } diff --git a/src/README.md b/src/README.md index 8da96f64..fab14b84 100644 --- a/src/README.md +++ b/src/README.md @@ -3,10 +3,10 @@ [![Stable release](https://img.shields.io/badge/latest_release-v1.0.0-green.svg)](CHANGELOG.md) [![Contributions](https://img.shields.io/badge/contributions-welcome-blue.svg)](https://github.com/OpenVisualCloud/Immersive-Video-Sample/wiki) -The Immersive Video Delivery solution provides basic components for OMAF-Compliant Tiled 360 Video Delivery, including MCTS-based HEVC transcoding, OMAF-compliant streaming packing, client OMAF dash access library; and FFMPEG plugins for quick trial for these components to setup E2E OMAF-Compliant 360 video streaming. The project is a reference solution for those want to build up tiled-based 360 video delivery based on Intel Platform +The Immersive Video Delivery solution provides basic components for OMAF-Compliant Tiled 360 Video Delivery, including MCTS-based HEVC transcoding, OMAF-compliant streaming packing, client OMAF dash access library; and FFMPEG plugins for quick trial for these components to setup E2E OMAF-Compliant 360 video streaming. The project is a reference solution for those want to build up tiled-based 360 video delivery based on Intel Platform. # License -The Immersive Video Delivery solution is licensed under the OSI-approved BSD 3-Clause license and LGPLv2. See [LICENSE](LICENSE) for details. +The Immersive Video Delivery solution is licensed under the BSD 3-Clause "New" or "Revised" License, except that "FFMPEG Plugins" is under the LGPLv2.0 license. See [LICENSE](LICENSE) for details. MP4 base library is from Nokia/OMAF in OMAF Packing Library and OMAF Dash Access Library, so the license should compliant with Nokia/OMAF license. # How to contribute @@ -23,7 +23,7 @@ The Immersive Video Delivery Reference solution contains below components: - [OMAF Dash Access Library](doc/Immersive_Video_Delivery_DashAccess.md) - [360 Video Processing Library](doc/Immersive_Video_Delivery_360SCVP.md) - [Reference OMAF Player](doc/Immersive_Video_Delivery_RefPlayer.md) -- FFMPEG Plugins +- [FFMPEG Plugins](doc/Immersive_Video_Delivery_FFmpeg_usage.md) - Documents # System requirements diff --git a/src/VROmafPacking/CMakeLists.txt b/src/VROmafPacking/CMakeLists.txt index 0ca69b45..6222e14a 100644 --- a/src/VROmafPacking/CMakeLists.txt +++ b/src/VROmafPacking/CMakeLists.txt @@ -1,34 +1,52 @@ -cmake_minimum_required(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +PROJECT(VROmafPacking) -project(VROmafPacking) +OPTION(USE_TRACE + "Use trace" + OFF +) AUX_SOURCE_DIRECTORY(. DIR_SRC) AUX_SOURCE_DIRECTORY(../utils/ UTIL_SRC) -ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -z noexecstack -z relro -z now -fstack-protector-strong -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat -Wformat-security -Wl,-S -Wall -Werror") - -INCLUDE_DIRECTORIES(/usr/local/include ../utils ../360SCVP) -LINK_DIRECTORIES(/usr/local/lib) - -set(DIR_SRC +ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -z noexecstack -z relro -z now -fstack-protector-strong + -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wl,-S -Wall -Werror") + +IF(USE_TRACE) + ADD_DEFINITIONS("-D_USE_TRACE_") + INCLUDE_DIRECTORIES(/usr/local/include ../utils ../360SCVP ../isolib ../trace ../plugins/StreamProcess_Plugin) + LINK_DIRECTORIES(/usr/local/lib ../isolib/dash_writer ../trace) +ELSE() + INCLUDE_DIRECTORIES(/usr/local/include ../utils ../360SCVP ../isolib ../plugins/StreamProcess_Plugin) + LINK_DIRECTORIES(/usr/local/lib ../isolib/dash_writer) +ENDIF() + +SET(DIR_SRC ${DIR_SRC} ${UTIL_SRC} - ) +) -ADD_LIBRARY(VROmafPacking SHARED ${DIR_SRC}) +ADD_LIBRARY(VROmafPacking SHARED ${DIR_SRC}) -TARGET_LINK_LIBRARIES(VROmafPacking streamsegmenter_static_fpic) -TARGET_LINK_LIBRARIES(VROmafPacking streamsegmenter_static) -TARGET_LINK_LIBRARIES(VROmafPacking mp4vr_static_fpic) -TARGET_LINK_LIBRARIES(VROmafPacking mp4vr_static) +TARGET_LINK_LIBRARIES(VROmafPacking dashwriter) TARGET_LINK_LIBRARIES(VROmafPacking glog) - -install(TARGETS VROmafPacking +TARGET_LINK_LIBRARIES(VROmafPacking dl) +TARGET_LINK_LIBRARIES(VROmafPacking safestring_shared) +IF(USE_TRACE) + TARGET_LINK_LIBRARIES(VROmafPacking trace) + TARGET_LINK_LIBRARIES(VROmafPacking lttng-ust) +ENDIF() + +INSTALL(TARGETS VROmafPacking RUNTIME DESTINATION bin LIBRARY DESTINATION lib ARCHIVE DESTINATION lib/static) -install(FILES ${PROJECT_SOURCE_DIR}/../utils/error.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/VROmafPacking_data.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/VROmafPackingAPI.h DESTINATION include) -install(FILES ${PROJECT_SOURCE_DIR}/VROmafPacking.pc DESTINATION lib/pkgconfig) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/error.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/common_data.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/Log.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/../utils/VROmafPacking_data.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/VROmafPackingAPI.h DESTINATION include) +INSTALL(FILES ${PROJECT_SOURCE_DIR}/VROmafPacking.pc DESTINATION lib/pkgconfig) diff --git a/src/VROmafPacking/DashInitSegmenter.cpp b/src/VROmafPacking/DashInitSegmenter.cpp index 5c6695ac..d5b1deb2 100644 --- a/src/VROmafPacking/DashInitSegmenter.cpp +++ b/src/VROmafPacking/DashInitSegmenter.cpp @@ -37,6 +37,8 @@ #include "DashSegmenter.h" +using namespace std; + VCD_NS_BEGIN @@ -51,111 +53,151 @@ DashInitSegmenter::DashInitSegmenter(InitSegConfig *aConfig) DashInitSegmenter::~DashInitSegmenter() = default; -StreamSegmenter::Segmenter::InitSegment DashInitSegmenter::MakeInitSegment(bool flagFrag) +VCD::MP4::InitialSegment DashInitSegmenter::MakeInitSegment(bool isFraged) { - StreamSegmenter::Segmenter::MovieDescription des_of_mov; - des_of_mov.creationTime = 0; - des_of_mov.modificationTime = 0; - des_of_mov.matrix = { 1, 0, 0, 0, - 0, 1, 0, 0, - 0, 0, 1, 0, - 0, 0, 0, 1 }; - StreamSegmenter::BrandSpec typeOfFile = { std::string("isom"), 512, { "isom", "iso6" } }; //Should be iso9 ? + VCD::MP4::MovieDescription moovDes; + moovDes.creationTime = 0; + moovDes.modificationTime = 0; + vector tempVec(16, 0); + moovDes.matrix = tempVec; + moovDes.matrix[0] = 1; + moovDes.matrix[5] = 1; + moovDes.matrix[10] = 1; + moovDes.matrix[15] = 1; + + VCD::MP4::BrandSpec brandSpec = { string("isom"), 512, { "isom", "iso6" } }; //Should be iso9 ? if (!m_omafVideoTrackBrand.empty()) { - typeOfFile.compatibleBrands.push_back(m_omafVideoTrackBrand); + brandSpec.compatibleBrands.push_back(m_omafVideoTrackBrand); } if (!m_omafAudioTrackBrand.empty()) { - typeOfFile.compatibleBrands.push_back(m_omafAudioTrackBrand); + brandSpec.compatibleBrands.push_back(m_omafAudioTrackBrand); } - des_of_mov.fileType = typeOfFile; - auto segInit = StreamSegmenter::Segmenter::makeInitSegment(m_trackDescriptions, des_of_mov, flagFrag); - return segInit; + moovDes.fileType = brandSpec; + VCD::MP4::InitialSegment initialSeg = VCD::MP4::GenInitSegment(m_trackDescriptions, moovDes, isFraged); + return initialSeg; } -void DashInitSegmenter::AddH264VideoTrack(TrackId indexTracked, CodedMeta& metaData) +void DashInitSegmenter::AddH264VideoTrack(VCD::MP4::TrackId trackId, CodedMeta& inMetaData) { - std::vector sps = metaData.decoderConfig.at(ConfigType::SPS); - std::vector pps = metaData.decoderConfig.at(ConfigType::PPS); - StreamSegmenter::TrackMeta trackMeta = m_config.tracks.at(indexTracked).meta; - StreamSegmenter::Segmenter::MediaDescription mediaDescription; - //memset(&(mediaDescription), 0, sizeof(StreamSegmenter::Segmenter::MediaDescription)); - mediaDescription.creationTime = 0; - mediaDescription.modificationTime = 0; + vector avcSPS = inMetaData.decoderConfig.at(ConfigType::SPS); + vector avcPPS = inMetaData.decoderConfig.at(ConfigType::PPS); + VCD::MP4::TrackMeta trackMeta = m_config.tracks.at(trackId).meta; + VCD::MP4::FileInfo trackFileInfo; + trackFileInfo.creationTime = 0; + trackFileInfo.modificationTime = 0; + VCD::MP4::AvcVideoSampleEntry avcEntry; - StreamSegmenter::Segmenter::AvcVideoSampleEntry avcVidEnter; + avcEntry.width = inMetaData.width; + avcEntry.height = inMetaData.height; - avcVidEnter.width = metaData.width; - avcVidEnter.height = metaData.height; + avcEntry.sps = avcSPS; + avcEntry.pps = avcPPS; - avcVidEnter.sps = sps; - avcVidEnter.pps = pps; + m_trackDescriptions.insert(make_pair(trackId, VCD::MP4::TrackDescription(trackMeta, trackFileInfo, avcEntry))); +} - m_trackDescriptions.insert(std::make_pair(indexTracked, StreamSegmenter::Segmenter::TrackDescription(trackMeta, mediaDescription, avcVidEnter))); +void DashInitSegmenter::AddH265VideoTrack(VCD::MP4::TrackId trackId, CodedMeta& inMetaData) +{ + vector hevcSPS = inMetaData.decoderConfig.at(ConfigType::SPS); + vector hevcPPS = inMetaData.decoderConfig.at(ConfigType::PPS); + vector hevcVPS = inMetaData.decoderConfig.at(ConfigType::VPS); + VCD::MP4::TrackMeta trackMeta = m_config.tracks.at(trackId).meta; + VCD::MP4::FileInfo trackFileInfo; + trackFileInfo.creationTime = 0; + trackFileInfo.modificationTime = 0; + VCD::MP4::HevcVideoSampleEntry hevcEntry{}; + + hevcEntry.width = inMetaData.width; + hevcEntry.height = inMetaData.height; + hevcEntry.frameRate = inMetaData.duration.per1().asDouble(); + + hevcEntry.sps = hevcSPS; + hevcEntry.pps = hevcPPS; + hevcEntry.vps = hevcVPS; + + FillOmafStructures(trackId, inMetaData, hevcEntry, trackMeta); + + m_trackDescriptions.insert(make_pair(trackId, VCD::MP4::TrackDescription(trackMeta, trackFileInfo, hevcEntry))); } -void DashInitSegmenter::AddH265VideoTrack(TrackId indexTracked, CodedMeta& metaData) +void DashInitSegmenter::AddH265ExtractorTrack(VCD::MP4::TrackId trackId, CodedMeta& inMetaData) { - std::vector vector_sps = metaData.decoderConfig.at(ConfigType::SPS); - std::vector vector_pps = metaData.decoderConfig.at(ConfigType::PPS); - std::vector vector_vps = metaData.decoderConfig.at(ConfigType::VPS); - StreamSegmenter::TrackMeta metaDataTracked = m_config.tracks.at(indexTracked).meta; - StreamSegmenter::Segmenter::MediaDescription mediaDescription; - mediaDescription.creationTime = 0; - mediaDescription.modificationTime = 0; - StreamSegmenter::Segmenter::HevcVideoSampleEntry hevcVidEnter{}; - - hevcVidEnter.width = metaData.width; - hevcVidEnter.height = metaData.height; - hevcVidEnter.framerate = metaData.duration.per1().asDouble(); - - hevcVidEnter.sps = vector_sps; - hevcVidEnter.pps = vector_pps; - hevcVidEnter.vps = vector_vps; - - FillOmafStructures(indexTracked, metaData, hevcVidEnter, metaDataTracked); - - m_trackDescriptions.insert(std::make_pair(indexTracked, StreamSegmenter::Segmenter::TrackDescription(metaDataTracked, mediaDescription, hevcVidEnter))); + vector hevcSPS = inMetaData.decoderConfig.at(ConfigType::SPS); + vector hevcPPS = inMetaData.decoderConfig.at(ConfigType::PPS); + vector hevcVPS = inMetaData.decoderConfig.at(ConfigType::VPS); + VCD::MP4::TrackMeta trackMeta = m_config.tracks.at(trackId).meta; + VCD::MP4::FileInfo trackFileInfo; + trackFileInfo.creationTime = 0; + trackFileInfo.modificationTime = 0; + VCD::MP4::HevcVideoSampleEntry hevcEntry{}; + + hevcEntry.width = inMetaData.width; + hevcEntry.height = inMetaData.height; + hevcEntry.frameRate = inMetaData.duration.per1().asDouble(); + hevcEntry.sampleEntryType = "hvc2"; + + hevcEntry.sps = hevcSPS; + hevcEntry.pps = hevcPPS; + hevcEntry.vps = hevcVPS; + + FillOmafStructures(trackId, inMetaData, hevcEntry, trackMeta); + + using TrackId = VCD::MP4::TrackId; + map::const_iterator iter = m_config.tracks.find(trackId); + if (iter == m_config.tracks.end()) + { + OMAF_LOG(LOG_ERROR, "Can't find specified track index !\n"); + return; + } + TrackConfig trackCfg = iter->second; + VCD::MP4::TrackDescription trackDes = VCD::MP4::TrackDescription(trackMeta, trackFileInfo, hevcEntry); + trackDes.trackReferences = trackCfg.trackReferences; + m_trackDescriptions.insert(make_pair(trackId, move(trackDes))); } -void DashInitSegmenter::AddH265ExtractorTrack(TrackId indexTracked, CodedMeta& metaData) +void DashInitSegmenter::AddAACTrack(VCD::MP4::TrackId trackId, CodedMeta& inMetaData) { - std::vector vector_sps = metaData.decoderConfig.at(ConfigType::SPS); - std::vector vector_pps = metaData.decoderConfig.at(ConfigType::PPS); - std::vector vector_vps = metaData.decoderConfig.at(ConfigType::VPS); - StreamSegmenter::TrackMeta trackMeta = m_config.tracks.at(indexTracked).meta; - StreamSegmenter::Segmenter::MediaDescription infoMedia; - infoMedia.creationTime = 0; - infoMedia.modificationTime = 0; - StreamSegmenter::Segmenter::HevcVideoSampleEntry hevcVidEnter{}; - - hevcVidEnter.width = metaData.width; - hevcVidEnter.height = metaData.height; - hevcVidEnter.framerate = metaData.duration.per1().asDouble(); - hevcVidEnter.sampleEntryType = "hvc2"; - - hevcVidEnter.sps = vector_sps; - hevcVidEnter.pps = vector_pps; - hevcVidEnter.vps = vector_vps; - - FillOmafStructures(indexTracked, metaData, hevcVidEnter, trackMeta); - - auto& track = m_config.tracks.at(indexTracked); - auto trackDescription = StreamSegmenter::Segmenter::TrackDescription(trackMeta, infoMedia, hevcVidEnter); - trackDescription.trackReferences = track.trackReferences; - m_trackDescriptions.insert(std::make_pair(indexTracked, std::move(trackDescription))); + std::vector audioSpecInfo = inMetaData.decoderConfig.at(ConfigType::AudioSpecificConfig); + VCD::MP4::TrackMeta trackMeta = m_config.tracks.at(trackId).meta; + VCD::MP4::FileInfo trackFileInfo; + trackFileInfo.creationTime = 0; + trackFileInfo.modificationTime = 0; + VCD::MP4::MP4AudioSampleEntry sampleEntry; + + sampleEntry.sizeOfSample = 16; + sampleEntry.cntOfChannels = inMetaData.channelCfg; + sampleEntry.rateOfSample = inMetaData.samplingFreq; + sampleEntry.idOfES = 1; + sampleEntry.esIdOfDepends = 0; + sampleEntry.sizeOfBuf = 0; + sampleEntry.maxBitrate = inMetaData.bitrate.maxBitrate; + sampleEntry.avgBitrate = inMetaData.bitrate.avgBitrate; + for (auto byte : audioSpecInfo) + { + sampleEntry.decSpecificInfo.push_back(static_cast(byte)); + } + + if (m_config.mode == OperatingMode::OMAF) + { + m_omafAudioTrackBrand = "oa2d"; + } + + m_trackDescriptions.insert(make_pair(trackId, VCD::MP4::TrackDescription(trackMeta, trackFileInfo, sampleEntry))); + OMAF_LOG(LOG_INFO, "Done adding AAC track !\n"); } int32_t DashInitSegmenter::GenerateInitSegment( TrackSegmentCtx *trackSegCtx, - std::map tileTrackSegCtxs) + map tileTrackSegCtxs) { - TrackId trackId = trackSegCtx->trackIdx; - + VCD::MP4::TrackId trackId = trackSegCtx->trackIdx; + OMAF_LOG(LOG_INFO, "Generate initial segment for track %d!\n", trackId.GetIndex()); bool hadFirstFramesRemaining = m_firstFrameRemaining.size(); - bool endOfStream = trackSegCtx->isEOS; - Optional codedMeta; + bool endOfStream = trackSegCtx->codedMeta.isEOS; + VCD::MP4::DataItem codedMeta; + //OMAF_LOG(LOG_INFO, "Is audio %d\n", trackSegCtx->isAudio); if (!(trackSegCtx->isExtractorTrack)) { if (!endOfStream && m_firstFrameRemaining.count(trackId)) @@ -172,7 +214,9 @@ int32_t DashInitSegmenter::GenerateInitSegment( AddH265VideoTrack(trackId, *codedMeta); break; case CodedFormat::AAC: - return OMAF_ERROR_UNDEFINED_OPERATION; + //OMAF_LOG(LOG_INFO, "To add AAC track !\n"); + AddAACTrack(trackId, *codedMeta); + break; case CodedFormat::TimedMetadata: return OMAF_ERROR_UNDEFINED_OPERATION; case CodedFormat::H265Extractor: @@ -187,7 +231,7 @@ int32_t DashInitSegmenter::GenerateInitSegment( { for (auto& normalTrack : m_config.tracks) { - std::map::iterator itTrack; + map::iterator itTrack; itTrack = tileTrackSegCtxs.find(normalTrack.first); if (itTrack != tileTrackSegCtxs.end()) { @@ -208,7 +252,8 @@ int32_t DashInitSegmenter::GenerateInitSegment( AddH265VideoTrack(normalTrack.first, *codedMeta); break; case CodedFormat::AAC: - return OMAF_ERROR_UNDEFINED_OPERATION; + AddAACTrack(normalTrack.first, *codedMeta); + break; case CodedFormat::TimedMetadata: return OMAF_ERROR_UNDEFINED_OPERATION; case CodedFormat::H265Extractor: @@ -222,7 +267,7 @@ int32_t DashInitSegmenter::GenerateInitSegment( } else { - if (trackId.get() != normalTrack.first.get()) + if (trackId.GetIndex() != normalTrack.first.GetIndex()) return OMAF_ERROR_INVALID_TRACKSEG_CTX; if (!endOfStream && m_firstFrameRemaining.count(trackId)) @@ -239,7 +284,8 @@ int32_t DashInitSegmenter::GenerateInitSegment( AddH265VideoTrack(trackId, *codedMeta); break; case CodedFormat::AAC: - return OMAF_ERROR_UNDEFINED_OPERATION; + AddAACTrack(trackId, *codedMeta); + break; case CodedFormat::TimedMetadata: return OMAF_ERROR_UNDEFINED_OPERATION; case CodedFormat::H265Extractor: @@ -260,15 +306,21 @@ int32_t DashInitSegmenter::GenerateInitSegment( { if (m_config.writeToBitstream) { + //OMAF_LOG(LOG_INFO, "WRITE TO BS FOR track %d\n", trackId.GetIndex()); if (!endOfStream) { - std::ostringstream frameStream; - StreamSegmenter::Segmenter::writeInitSegment(frameStream, MakeInitSegment(m_config.fragmented)); - std::string frameString(frameStream.str()); + ostringstream frameStream; + VCD::MP4::WriteInitSegment(frameStream, MakeInitSegment(m_config.fragmented)); + string frameString(frameStream.str()); FILE *fp = fopen(trackSegCtx->dashInitCfg.initSegName, "wb+"); if (!fp) + { + OMAF_LOG(LOG_ERROR, "Failed to open %s\n", trackSegCtx->dashInitCfg.initSegName); return OMAF_ERROR_NULL_PTR; + } + + m_initSegSize = frameString.size(); fwrite(frameString.c_str(), 1, frameString.size(), fp); fclose(fp); fp = NULL; @@ -280,108 +332,111 @@ int32_t DashInitSegmenter::GenerateInitSegment( } void DashInitSegmenter::FillOmafStructures( - TrackId indexTracked, - CodedMeta& metaData, - StreamSegmenter::Segmenter::HevcVideoSampleEntry& hevcVidEnter, - StreamSegmenter::TrackMeta& metaTracked) + VCD::MP4::TrackId inTrackId, + CodedMeta& inMetaData, + VCD::MP4::HevcVideoSampleEntry& hevcEntry, + VCD::MP4::TrackMeta& inMeta) { if (m_config.mode == OperatingMode::OMAF) { - if (metaData.projection == OmafProjectionType::EQUIRECTANGULAR) + if (inMetaData.projection == OmafProjectionType::EQUIRECTANGULAR) { - hevcVidEnter.projectionFormat = StreamSegmenter::Segmenter::ProjectionFormat::Equirectangular; + hevcEntry.projFmt = VCD::MP4::OmniProjFormat::OMNI_ERP; } - else + else if (inMetaData.projection == OmafProjectionType::CUBEMAP) + { + hevcEntry.projFmt = VCD::MP4::OmniProjFormat::OMNI_Cubemap; + } + else if (inMetaData.projection == OmafProjectionType::PLANAR) { - hevcVidEnter.projectionFormat = StreamSegmenter::Segmenter::ProjectionFormat::Cubemap; + hevcEntry.projFmt = VCD::MP4::OmniProjFormat::OMNI_Planar; } + if (m_config.packedSubPictures) { // viewport dependent - StreamSegmenter::BrandSpec infoBrandSpec = { std::string("hevd"), 0,{ "hevd" } }; - metaTracked.trackType = infoBrandSpec; - hevcVidEnter.compatibleSchemes.push_back({ "podv", 0, "" }); - hevcVidEnter.compatibleSchemes.push_back({ "ercm", 0, "" }); - m_omafVideoTrackBrand = "hevd"; + VCD::MP4::BrandSpec trackType = { string("'hevd'"), 0,{ "'hevd'" } }; + inMeta.trackType = trackType; + hevcEntry.compatibleSchemes.push_back({ "podv", 0, "" }); + hevcEntry.compatibleSchemes.push_back({ "ercm", 0, "" }); + m_omafVideoTrackBrand = "'hevd'"; } else { - StreamSegmenter::BrandSpec infoBrandSpec = { std::string("hevi"), 0,{ "hevi" } }; - metaTracked.trackType = infoBrandSpec; - hevcVidEnter.compatibleSchemes.push_back({ "podv", 0, "" }); - hevcVidEnter.compatibleSchemes.push_back({ "erpv", 0, "" }); + VCD::MP4::BrandSpec trackType = { string("hevi"), 0,{ "hevi" } }; + inMeta.trackType = trackType; + hevcEntry.compatibleSchemes.push_back({ "podv", 0, "" }); + hevcEntry.compatibleSchemes.push_back({ "erpv", 0, "" }); m_omafVideoTrackBrand = "hevi"; } - if (m_config.tracks.at(indexTracked).pipelineOutput == DataInputFormat::VideoTopBottom) + if (m_config.tracks.at(inTrackId).pipelineOutput == DataInputFormat::VideoTopBottom) { - hevcVidEnter.stvi = StreamSegmenter::Segmenter::PodvStereoVideoInfo::TopBottomPacking; + hevcEntry.stvi = VCD::MP4::VideoFramePackingType::OMNI_TOPBOTTOM; } - else if (m_config.tracks.at(indexTracked).pipelineOutput == DataInputFormat::VideoSideBySide) + else if (m_config.tracks.at(inTrackId).pipelineOutput == DataInputFormat::VideoSideBySide) { - hevcVidEnter.stvi = StreamSegmenter::Segmenter::PodvStereoVideoInfo::SideBySidePacking; + hevcEntry.stvi = VCD::MP4::VideoFramePackingType::OMNI_SIDEBYSIDE; } - // temporal interleaving not supported - - if (metaData.sphericalCoverage) + if (inMetaData.sphericalCoverage) { - hevcVidEnter.covi = StreamSegmenter::Segmenter::CoverageInformation(); - if (metaData.projection == OmafProjectionType::EQUIRECTANGULAR) + hevcEntry.covi = VCD::MP4::CoverageInformation(); + if (inMetaData.projection == OmafProjectionType::EQUIRECTANGULAR) { - hevcVidEnter.covi->coverageShape = StreamSegmenter::Segmenter::CoverageInformationShapeType::TwoAzimuthAndTwoElevationCircles; + hevcEntry.covi->coverageShape = VCD::MP4::COVIShapeType::TWO_AZIMUTH_AND_TWO_ELEVATION_CIRCLES; } else { - hevcVidEnter.covi->coverageShape = StreamSegmenter::Segmenter::CoverageInformationShapeType::FourGreatCircles; + hevcEntry.covi->coverageShape = VCD::MP4::COVIShapeType::FOUR_GREAT_CIRCLES; } - if (hevcVidEnter.stvi) + if (hevcEntry.stvi) { - hevcVidEnter.covi->defaultViewIdc = StreamSegmenter::Segmenter::ViewIdc::LEFT_AND_RIGHT; + hevcEntry.covi->defaultViewIdc = VCD::MP4::OmniViewIdc::OMNI_LEFT_AND_RIGHT; } else { - hevcVidEnter.covi->defaultViewIdc = StreamSegmenter::Segmenter::ViewIdc::MONOSCOPIC; + hevcEntry.covi->defaultViewIdc = VCD::MP4::OmniViewIdc::OMNI_MONOSCOPIC; } - hevcVidEnter.covi->viewIdcPresenceFlag = false; - - auto outputRegion = std::unique_ptr(new StreamSegmenter::Segmenter::CoverageInformationRegion()); - auto& covOfSph = metaData.sphericalCoverage.get(); - outputRegion->centreAzimuth = covOfSph.cAzimuth; - outputRegion->centreElevation = covOfSph.cElevation; - outputRegion->centreTilt = covOfSph.cTilt; - outputRegion->azimuthRange = covOfSph.rAzimuth; - outputRegion->elevationRange = covOfSph.rElevation; - outputRegion->interpolate = false; - hevcVidEnter.covi->regions.push_back(std::move(outputRegion)); + hevcEntry.covi->viewIdcPresenceFlag = false; + + auto coviReg = unique_ptr(new VCD::MP4::COVIRegion()); + auto& sphericalCoverage = inMetaData.sphericalCoverage.get(); + coviReg->centAzimuth = sphericalCoverage.cAzimuth; + coviReg->centElevation = sphericalCoverage.cElevation; + coviReg->centTilt = sphericalCoverage.cTilt; + coviReg->azimuthRange = sphericalCoverage.rAzimuth; + coviReg->elevationRange = sphericalCoverage.rElevation; + coviReg->interpolate = false; + hevcEntry.covi->sphereRegions.push_back(move(coviReg)); } - if (metaData.regionPacking) + if (inMetaData.regionPacking) { - hevcVidEnter.rwpk = StreamSegmenter::Segmenter::RegionWisePacking(); - - auto& packArea = metaData.regionPacking.get(); - hevcVidEnter.rwpk->constituenPictureMatchingFlag = packArea.constituentPictMatching; - hevcVidEnter.rwpk->projPictureHeight = packArea.projPictureHeight; - hevcVidEnter.rwpk->projPictureWidth = packArea.projPictureWidth; - hevcVidEnter.rwpk->packedPictureHeight = packArea.packedPictureHeight; - hevcVidEnter.rwpk->packedPictureWidth = packArea.packedPictureWidth; - for (auto& iutputRegion : packArea.regions) + hevcEntry.rwpk = VCD::MP4::RegionWisePacking(); + + auto& regionPacking = inMetaData.regionPacking.get(); + hevcEntry.rwpk->constituenPicMatching = regionPacking.constituentPictMatching; + hevcEntry.rwpk->projPicHeight = regionPacking.projPictureHeight; + hevcEntry.rwpk->projPicWidth = regionPacking.projPictureWidth; + hevcEntry.rwpk->packedPicHeight = regionPacking.packedPictureHeight; + hevcEntry.rwpk->packedPicWidth = regionPacking.packedPictureWidth; + for (auto& regionIn : regionPacking.regions) { - auto outputRegion = std::unique_ptr(new StreamSegmenter::Segmenter::RwpkRectRegion()); - outputRegion->packedTop = iutputRegion.packedTop; - outputRegion->packedLeft = iutputRegion.packedLeft; - outputRegion->packedWidth = iutputRegion.packedWidth; - outputRegion->packedHeight = iutputRegion.packedHeight; + auto rwpkReg = unique_ptr(new VCD::MP4::RwpkRectRegion()); + rwpkReg->packedRegTop = regionIn.packedTop; + rwpkReg->packedRegLeft = regionIn.packedLeft; + rwpkReg->packedRegWidth = regionIn.packedWidth; + rwpkReg->packedRegHeight = regionIn.packedHeight; - outputRegion->projTop = iutputRegion.projTop; - outputRegion->projLeft = iutputRegion.projLeft; - outputRegion->projWidth = iutputRegion.projWidth; - outputRegion->projHeight = iutputRegion.projHeight; + rwpkReg->projRegTop = regionIn.projTop; + rwpkReg->projRegLeft = regionIn.projLeft; + rwpkReg->projRegWidth = regionIn.projWidth; + rwpkReg->projRegHeight = regionIn.projHeight; - outputRegion->transformType = iutputRegion.transform; + rwpkReg->transformType = regionIn.transform; - hevcVidEnter.rwpk->regions.push_back(std::move(outputRegion)); + hevcEntry.rwpk->regions.push_back(std::move(rwpkReg)); } } } diff --git a/src/VROmafPacking/DashSegmenter.cpp b/src/VROmafPacking/DashSegmenter.cpp index 5f6c9371..99890669 100644 --- a/src/VROmafPacking/DashSegmenter.cpp +++ b/src/VROmafPacking/DashSegmenter.cpp @@ -35,7 +35,7 @@ #include #include "DashSegmenter.h" -#include "streamsegmenter/segmenterapi.hpp" +#include "../isolib/dash_writer/SegmentWriter.h" VCD_NS_BEGIN @@ -50,29 +50,29 @@ AcquireVideoFrameData::~AcquireVideoFrameData() } -StreamSegmenter::FrameData AcquireVideoFrameData::get() const +VCD::MP4::FrameBuf AcquireVideoFrameData::Get() const { - StreamSegmenter::FrameData frameData( + VCD::MP4::FrameBuf frameData( static_cast(m_data), static_cast(m_data) + m_dataSize); return frameData; } -size_t AcquireVideoFrameData::getSize() const +size_t AcquireVideoFrameData::GetDataSize() const { return (size_t)(m_dataSize); } -AcquireVideoFrameData* AcquireVideoFrameData::clone() const +AcquireVideoFrameData* AcquireVideoFrameData::Clone() const { return new AcquireVideoFrameData(m_data, m_dataSize); } -StreamSegmenter::AutoSegmenterConfig MakeAutoSegmenterConfig( +VCD::MP4::SegmentWriterCfg MakeSegmentWriterConfig( GeneralSegConfig *dashConfig) { - StreamSegmenter::AutoSegmenterConfig config {}; + VCD::MP4::SegmentWriterCfg config {}; config.segmentDuration = dashConfig->sgtDuration; config.subsegmentDuration = dashConfig->subsgtDuration; config.checkIDR = dashConfig->needCheckIDR; @@ -81,21 +81,21 @@ StreamSegmenter::AutoSegmenterConfig MakeAutoSegmenterConfig( DashSegmenter::DashSegmenter(GeneralSegConfig *dashConfig, bool createWriter) : m_config(*dashConfig) - , m_autoSegmenter(MakeAutoSegmenterConfig(dashConfig)) + , m_segWriter(MakeSegmentWriterConfig(dashConfig)) { if (createWriter) { - m_segmentWriter.reset(StreamSegmenter::Writer::create()); + //m_segmentWriter.reset(VCD::MP4::Writer::create()); if (m_config.useSeparatedSidx) { - m_segmentWriter->setWriteSegmentHeader(false); + m_segWriter.SetWriteSegmentHeader(false); } } for (auto trackIdMeta : m_config.tracks) { - m_autoSegmenter.addTrack(trackIdMeta.first, trackIdMeta.second); + m_segWriter.AddTrack(trackIdMeta.first, trackIdMeta.second); } } @@ -111,19 +111,21 @@ bool DashSegmenter::DetectNonRefFrame(uint8_t *frameData) } void DashSegmenter::Feed( - TrackId trackId, + VCD::MP4::TrackId trackId, CodedMeta codedFrameMeta, Nalu *dataNalu, - StreamSegmenter::FrameCts compositionTime) + VCD::MP4::FrameCts compositionTime) { CodedMeta frameMeta = codedFrameMeta; - std::unique_ptr dataFrameAcquire( + std::unique_ptr dataFrameAcquire( new AcquireVideoFrameData(dataNalu->data, dataNalu->dataSize)); - StreamSegmenter::FrameInfo infoPerFrame; + + VCD::MP4::FrameInfo infoPerFrame; infoPerFrame.cts = compositionTime; infoPerFrame.duration = frameMeta.duration; infoPerFrame.isIDR = frameMeta.isIDR(); - //LOG(INFO)<<"Feed one sample for track "<data) : false; infoPerFrame.sampleFlags.flags.reserved = 0; @@ -135,41 +137,54 @@ void DashSegmenter::Feed( infoPerFrame.sampleFlags.flags.sample_is_non_sync_sample = !infoPerFrame.isIDR; infoPerFrame.sampleFlags.flags.sample_degradation_priority = 0; - StreamSegmenter::FrameProxy ssFrame(std::move(dataFrameAcquire), infoPerFrame); - m_autoSegmenter.feed(trackId, ssFrame); + VCD::MP4::FrameWrapper ssFrame(std::move(dataFrameAcquire), infoPerFrame); + m_segWriter.FeedOneFrame(trackId, ssFrame); } int32_t DashSegmenter::SegmentData(TrackSegmentCtx *trackSegCtx) { - if (!trackSegCtx->isEOS && trackSegCtx->isExtractorTrack) + if (!trackSegCtx->codedMeta.isEOS) { + if (trackSegCtx->isAudio) + { + return SegmentOneTrack(&(trackSegCtx->audioNalu), trackSegCtx->codedMeta, trackSegCtx->dashCfg.trackSegBaseName); + } + else + { + if (trackSegCtx->isExtractorTrack) + { - if (!(trackSegCtx->extractors)) - return OMAF_ERROR_NULL_PTR; + if (!(trackSegCtx->extractors)) + return OMAF_ERROR_NULL_PTR; - PackExtractors(trackSegCtx->extractors, trackSegCtx->refTrackIdxs, &(trackSegCtx->extractorTrackNalu)); - return SegmentOneTrack(&(trackSegCtx->extractorTrackNalu), trackSegCtx->codedMeta, trackSegCtx->dashCfg.tileSegBaseName); + PackExtractors(trackSegCtx->extractors, trackSegCtx->refTrackIdxs, &(trackSegCtx->extractorTrackNalu)); + return SegmentOneTrack(&(trackSegCtx->extractorTrackNalu), trackSegCtx->codedMeta, trackSegCtx->dashCfg.trackSegBaseName); + } + else + { + return SegmentOneTrack(trackSegCtx->tileInfo->tileNalu, trackSegCtx->codedMeta, trackSegCtx->dashCfg.trackSegBaseName); + } + } } else { - return SegmentOneTrack(trackSegCtx->tileInfo->tileNalu, trackSegCtx->codedMeta, trackSegCtx->dashCfg.tileSegBaseName); + return SegmentOneTrack(NULL, trackSegCtx->codedMeta, trackSegCtx->dashCfg.trackSegBaseName); } } int32_t DashSegmenter::SegmentOneTrack(Nalu *dataNalu, CodedMeta codedMeta, char *outBaseName) { - TrackId trackId = 1; + VCD::MP4::TrackId trackId = 1; trackId = codedMeta.trackId; TrackInfo& trackInfo = m_trackInfo[trackId]; - if (codedMeta.isEOS) { if (!trackInfo.endOfStream) { if (!trackInfo.isFirstFrame) { - m_autoSegmenter.feedEnd(trackId); + m_segWriter.FeedEOS(trackId); } trackInfo.endOfStream = true; } @@ -177,15 +192,15 @@ int32_t DashSegmenter::SegmentOneTrack(Nalu *dataNalu, CodedMeta codedMeta, char else if (codedMeta.inCodingOrder) { CodedMeta frameMeta = codedMeta; - StreamSegmenter::FrameCts frameCts; - if (frameMeta.presTime == FrameTime(0, 1)) + VCD::MP4::FrameCts frameCts; + if (frameMeta.presTime == VCD::MP4::FrameTime(0, 1)) { if (trackInfo.isFirstFrame) { - trackInfo.nextCodingTime = FrameTime(frameMeta.codingIndex, 1) * frameMeta.duration.cast(); + trackInfo.nextCodingTime = VCD::MP4::FrameTime(frameMeta.codingIndex, 1) * frameMeta.duration.cast(); } frameCts = { trackInfo.nextCodingTime }; - trackInfo.nextCodingTime += frameMeta.duration.cast(); + trackInfo.nextCodingTime += frameMeta.duration.cast(); } else { @@ -203,15 +218,13 @@ int32_t DashSegmenter::SegmentOneTrack(Nalu *dataNalu, CodedMeta codedMeta, char return OMAF_ERROR_UNDEFINED_OPERATION; } - std::list segments = m_autoSegmenter.extractSegmentsWithSubsegments(); - + std::list segments = m_segWriter.ExtractSubSegments(); if (segments.size()) { for (auto& segment : segments) { m_segNum++; snprintf(m_segName, 1024, "%s.%ld.mp4", outBaseName, m_segNum); - WriteSegment(segment); } } @@ -219,29 +232,12 @@ int32_t DashSegmenter::SegmentOneTrack(Nalu *dataNalu, CodedMeta codedMeta, char return ERROR_NONE; } -int32_t DashSegmenter::WriteSegment(StreamSegmenter::Segmenter::Segments& aSegment) +int32_t DashSegmenter::WriteSegment(VCD::MP4::SegmentList& aSegment) { std::ostringstream frameStream; std::unique_ptr sidxStream; - if (m_config.useSeparatedSidx) - { - sidxStream.reset(new std::ostringstream); - if (!m_sidxWriter) - { - m_sidxWriter = m_segmentWriter->newSidxWriter(1000); - } - if (m_sidxWriter) - { - m_sidxWriter->setOutput(sidxStream.get()); - } - } - else - { - //mSidxWriter = mSegmentWriter.newSidxWriter(); - } - - m_segmentWriter->writeSubsegments(frameStream, aSegment); + m_segWriter.WriteSubSegments(frameStream, aSegment); std::string frameString(frameStream.str()); @@ -249,6 +245,7 @@ int32_t DashSegmenter::WriteSegment(StreamSegmenter::Segmenter::Segments& aSegme if (!m_file) return OMAF_ERROR_NULL_PTR; + m_segSize = frameString.size(); fwrite(frameString.c_str(), 1, frameString.size(), m_file); fclose(m_file); @@ -259,7 +256,7 @@ int32_t DashSegmenter::WriteSegment(StreamSegmenter::Segmenter::Segments& aSegme int32_t DashSegmenter::PackExtractors( std::map* extractorsMap, - std::list refTrackIdxs, + std::list refTrackIdxs, Nalu *extractorsNalu) { if (!extractorsMap) @@ -268,7 +265,7 @@ int32_t DashSegmenter::PackExtractors( std::vector extractorNALUs; std::map::iterator it; - std::list::iterator itRefTrack = refTrackIdxs.begin(); + std::list::iterator itRefTrack = refTrackIdxs.begin(); if (itRefTrack == refTrackIdxs.end()) return OMAF_ERROR_INVALID_REF_TRACK; for (it = extractorsMap->begin(); it != extractorsMap->end(); it++, itRefTrack++) @@ -278,37 +275,37 @@ int32_t DashSegmenter::PackExtractors( return OMAF_ERROR_NULL_PTR; - StreamSegmenter::Segmenter::HevcExtractorTrackFrameData hevcExFrame; - hevcExFrame.nuhTemporalIdPlus1 = DEFAULT_HEVC_TEMPORALIDPLUS1;//extractor.nuhTemporalIdPlus1; - std::list::iterator createSam; - std::list::iterator createLine; + VCD::MP4::HevcExtractorTrackPackedData extractorData; + extractorData.nuhTemporalIdPlus1 = DEFAULT_HEVC_TEMPORALIDPLUS1;//extractor.nuhTemporalIdPlus1; + std::list::iterator sampleConstruct; + std::list::iterator inlineConstruct; - StreamSegmenter::Segmenter::HevcExtractor hevcExOutput; - for (createSam = extractor->sampleConstructor.begin(), - createLine = extractor->inlineConstructor.begin(); - createSam != extractor->sampleConstructor.end() || - createLine != extractor->inlineConstructor.end();) + VCD::MP4::HevcExtractor outputExtractor; + for (sampleConstruct = extractor->sampleConstructor.begin(), + inlineConstruct = extractor->inlineConstructor.begin(); + sampleConstruct != extractor->sampleConstructor.end() || + inlineConstruct != extractor->inlineConstructor.end();) { - if (createLine != extractor->inlineConstructor.end()) + if (inlineConstruct != extractor->inlineConstructor.end()) { - hevcExOutput.inlineConstructor = StreamSegmenter::Segmenter::HevcExtractorInlineConstructor{}; - std::vector necessaryInput( - static_cast((*createLine)->inlineData), - static_cast((*createLine)->inlineData + (*createLine)->length)); - hevcExOutput.inlineConstructor->inlineData = std::move(necessaryInput); - createLine++; + outputExtractor.inlineConstructor = VCD::MP4::HevcExtractorInlineConstructor{}; + std::vector neededData( + static_cast((*inlineConstruct)->inlineData), + static_cast((*inlineConstruct)->inlineData + (*inlineConstruct)->length)); + outputExtractor.inlineConstructor->inlineData = std::move(neededData); + inlineConstruct++; } - else if (createSam != extractor->sampleConstructor.end()) + else if (sampleConstruct != extractor->sampleConstructor.end()) { - hevcExOutput.sampleConstructor = StreamSegmenter::Segmenter::HevcExtractorSampleConstructor{}; - hevcExOutput.sampleConstructor->sampleOffset = 0; - hevcExOutput.sampleConstructor->dataOffset = (*createSam)->dataOffset; - hevcExOutput.sampleConstructor->dataLength = (*createSam)->dataLength; - hevcExOutput.sampleConstructor->trackId = (*itRefTrack).get();//(*createSam)->trackRefIndex; // Note: this refers to the index in the track references. It works if trackIds are 1-based and contiguous, as the spec expects the index is 1-based. - createSam++; + outputExtractor.sampleConstructor = VCD::MP4::HevcExtractorSampleConstructor{}; + outputExtractor.sampleConstructor->sampleOffset = 0; + outputExtractor.sampleConstructor->dataOffset = (*sampleConstruct)->dataOffset; + outputExtractor.sampleConstructor->dataLength = (*sampleConstruct)->dataLength; + outputExtractor.sampleConstructor->trackId = (*itRefTrack).GetIndex();//(*sampleConstruct)->trackRefIndex; // Note: this refers to the index in the track references. It works if trackIds are 1-based and contiguous, as the spec expects the index is 1-based. + sampleConstruct++; // now we have a full extractor: either just a sample constructor, or inline+sample constructor pair - hevcExFrame.samples.push_back(hevcExOutput); - const StreamSegmenter::FrameData& nal = hevcExFrame.toFrameData(); + extractorData.samples.push_back(outputExtractor); + const VCD::MP4::FrameBuf& nal = extractorData.GenFrameData(); extractorNALUs.insert(extractorNALUs.end(), make_move_iterator(nal.begin()), make_move_iterator(nal.end())); } } diff --git a/src/VROmafPacking/DashSegmenter.h b/src/VROmafPacking/DashSegmenter.h index f0ee8a48..320bfd4f 100644 --- a/src/VROmafPacking/DashSegmenter.h +++ b/src/VROmafPacking/DashSegmenter.h @@ -40,12 +40,11 @@ #include #include -#include "streamsegmenter/autosegmenter.hpp" -#include "streamsegmenter/segmenterapi.hpp" -#include "streamsegmenter/track.hpp" -#include "streamsegmenter/optional.hpp" +#include "../isolib/dash_writer/SegmentWriter.h" +#include "../isolib/dash_writer/AcquireTrackData.h" +#include "../isolib/dash_writer/DataItem.h" -#include "definitions.h" +#include "VROmafPacking_def.h" #include "OmafPackingCommon.h" #include "MediaStream.h" #include "ExtractorTrack.h" @@ -58,6 +57,7 @@ VCD_NS_BEGIN #define DEFAULT_HEVC_TEMPORALIDPLUS1 1 #define DEFAULT_EXTRACTORTRACK_TRACKIDBASE 1000 +#define DEFAULT_AUDIOTRACK_TRACKIDBASE 2000 #define DEFAULT_QUALITY_RANK 1 #define MAINSTREAM_QUALITY_RANK 1 @@ -66,14 +66,6 @@ VCD_NS_BEGIN class DashInitSegmenter; class DashSegmenter; -template -using Optional = StreamSegmenter::Utils::Optional; - -using TrackId = StreamSegmenter::TrackId; -using FrameTime = StreamSegmenter::FrameTime; -using FrameDuration = StreamSegmenter::FrameDuration; -using FrameRate = StreamSegmenter::FrameRate; - typedef uint32_t StreamId; //! @@ -132,7 +124,8 @@ enum OmafProjectionType { NoneProjection, EQUIRECTANGULAR, - CUBEMAP + CUBEMAP, + PLANAR }; //! @@ -157,8 +150,8 @@ enum DataInputFormat //! struct TrackConfig { - StreamSegmenter::TrackMeta meta; - std::map> trackReferences; + VCD::MP4::TrackMeta meta; + std::map> trackReferences; DataInputFormat pipelineOutput; }; @@ -169,7 +162,7 @@ struct TrackConfig //! struct InitSegConfig { - std::map tracks; + std::map tracks; bool fragmented = true; @@ -191,15 +184,15 @@ struct InitSegConfig //! struct GeneralSegConfig { - StreamSegmenter::Segmenter::Duration sgtDuration; + VCD::MP4::FractU64 sgtDuration; - Optional subsgtDuration; + VCD::MP4::DataItem subsgtDuration; bool needCheckIDR = true; - std::map tracks; + std::map tracks; - StreamSegmenter::Segmenter::SequenceId baseSequenceIdx; + VCD::MP4::SequenceId baseSequenceIdx; bool useSeparatedSidx; @@ -207,7 +200,7 @@ struct GeneralSegConfig //std::shared_ptr log; - char tileSegBaseName[1024]; + char trackSegBaseName[1024]; }; //! @@ -217,13 +210,13 @@ struct GeneralSegConfig //! struct TrackInfo { - StreamSegmenter::FrameTime nextFramePresTime; // used for constructing frame presentation time + VCD::MP4::FrameTime nextFramePresTime; // used for constructing frame presentation time bool isFirstFrame = true; - int64_t lastPresIndex; + int64_t lastPresIndex = 0; - StreamSegmenter::FrameTime nextCodingTime; + VCD::MP4::FrameTime nextCodingTime; size_t numConsecutiveFrames = 0; @@ -247,7 +240,7 @@ struct Bitrate struct SegmenterMeta { // the duration of the produced segment - FrameDuration segmentDuration; + VCD::MP4::FrameDuration segmentDuration; }; //! @@ -309,7 +302,7 @@ struct QualityInfo uint8_t qualityRank; uint16_t origWidth = 0; // used only with multi-res cases uint16_t origHeight = 0; // used only with multi-res cases - Optional sphere; // not used with remaining area info + VCD::MP4::DataItem sphere; // not used with remaining area info }; //! @@ -333,11 +326,11 @@ struct CodedMeta { int64_t presIndex; // presentation index (as in RawFormatMeta) int64_t codingIndex; // coding index - FrameTime codingTime; - FrameTime presTime; - FrameDuration duration; + VCD::MP4::FrameTime codingTime; + VCD::MP4::FrameTime presTime; + VCD::MP4::FrameDuration duration; - TrackId trackId; + VCD::MP4::TrackId trackId; bool inCodingOrder; @@ -348,6 +341,8 @@ struct CodedMeta uint32_t width = 0; uint32_t height = 0; + uint8_t channelCfg = 0; + uint32_t samplingFreq = 0; Bitrate bitrate = {}; // bitrate information FrameType type = FrameType::NA; @@ -356,9 +351,9 @@ struct CodedMeta // applicable in OMAF OmafProjectionType projection = OmafProjectionType::EQUIRECTANGULAR; - Optional regionPacking; - Optional sphericalCoverage; - Optional qualityRankCoverage; + VCD::MP4::DataItem regionPacking; + VCD::MP4::DataItem sphericalCoverage; + VCD::MP4::DataItem qualityRankCoverage; bool isEOS = false; @@ -368,19 +363,6 @@ struct CodedMeta } }; -//! -//! \struct: SegmentWriterDestructor -//! \brief: define the destruction operation for the -//! segment writer -//! -struct SegmentWriterDestructor -{ - void operator()(StreamSegmenter::Writer* aWriter) - { - StreamSegmenter::Writer::destruct(aWriter); - } -}; - //! //! \struct: TrackSegmentCtx //! \brief: define the context of segmentation for @@ -389,17 +371,20 @@ struct SegmentWriterDestructor //! struct TrackSegmentCtx { + bool isAudio; bool isExtractorTrack; TileInfo *tileInfo; uint16_t tileIdx; - uint8_t extractorTrackIdx; + uint16_t extractorTrackIdx; std::map* extractors; Nalu extractorTrackNalu; - std::list refTrackIdxs; + std::list refTrackIdxs; - TrackId trackIdx; + Nalu audioNalu; + + VCD::MP4::TrackId trackIdx; InitSegConfig dashInitCfg; GeneralSegConfig dashCfg; DashInitSegmenter *initSegmenter; @@ -449,17 +434,26 @@ class DashInitSegmenter //! int32_t GenerateInitSegment( TrackSegmentCtx *trackSegCtx, - std::map trackSegCtxs); + std::map trackSegCtxs); + + //! + //! \brief Get initial segment size + //! + //! \return uint64_t + //! initial segment size + //! + uint64_t GetInitSegmentSize() { return m_initSegSize; }; private: - StreamSegmenter::Segmenter::TrackDescriptions m_trackDescriptions; //!< track description information + VCD::MP4::TrackDescriptionsMap m_trackDescriptions; //!< track description information - std::set m_firstFrameRemaining; //!< the remaining track index for which initial segment is not generated for the first frame + std::set m_firstFrameRemaining; //!< the remaining track index for which initial segment is not generated for the first frame const InitSegConfig m_config; //!< the configuration for the initial segment std::string m_omafVideoTrackBrand = ""; //!< video track OMAF brand information std::string m_omafAudioTrackBrand = ""; //!< audio track OMAF brand information + uint64_t m_initSegSize = 0; private: @@ -474,7 +468,7 @@ class DashInitSegmenter //! //! \return void //! - void AddH264VideoTrack(TrackId aTrackId, CodedMeta& aMeta); + void AddH264VideoTrack(VCD::MP4::TrackId aTrackId, CodedMeta& aMeta); //! //! \brief Add the specified track of HEVC coded data into sample @@ -487,7 +481,7 @@ class DashInitSegmenter //! //! \return void //! - void AddH265VideoTrack(TrackId aTrackId, CodedMeta& aMeta); + void AddH265VideoTrack(VCD::MP4::TrackId aTrackId, CodedMeta& aMeta); //! //! \brief Add the specified track of HEVC extractor track data @@ -500,18 +494,19 @@ class DashInitSegmenter //! //! \return void //! - void AddH265ExtractorTrack(TrackId aTrackId, CodedMeta& aMeta); + void AddH265ExtractorTrack(VCD::MP4::TrackId aTrackId, CodedMeta& aMeta); + void AddAACTrack(VCD::MP4::TrackId aTrackId, CodedMeta& aMeta); //! //! \brief Make initial segment for the track //! //! \param [in]aFragmented //! indicate whether the initial segment is fragmented //! - //! \return StreamSegmenter::Segmenter::InitSegment + //! \return InitialSegment //! the initial segment //! - StreamSegmenter::Segmenter::InitSegment MakeInitSegment(bool aFragmented); + VCD::MP4::InitialSegment MakeInitSegment(bool aFragmented); //! //! \brief Fill the OMAF compliant sample entry @@ -529,10 +524,10 @@ class DashInitSegmenter //! \return void //! void FillOmafStructures( - TrackId aTrackId, + VCD::MP4::TrackId aTrackId, CodedMeta& aMeta, - StreamSegmenter::Segmenter::HevcVideoSampleEntry& aSampleEntry, - StreamSegmenter::TrackMeta& aTrackMeta); + VCD::MP4::HevcVideoSampleEntry& aSampleEntry, + VCD::MP4::TrackMeta& aTrackMeta); }; //! @@ -540,7 +535,7 @@ class DashInitSegmenter //! \brief Define the operation of acquiring video coded data //! -class AcquireVideoFrameData : public StreamSegmenter::AcquireFrameData +class AcquireVideoFrameData : public VCD::MP4::GetDataOfFrame { public: @@ -562,10 +557,10 @@ class AcquireVideoFrameData : public StreamSegmenter::AcquireFrameData //! //! \brief Get the coded data //! - //! \return StreamSegmenter::FrameData - //! the FrameData which includes the coded data + //! \return FrameBuf + //! the FrameBuf which includes the coded data //! - StreamSegmenter::FrameData get() const override; + VCD::MP4::FrameBuf Get() const override; //! //! \brief Get the size of coded data @@ -573,7 +568,7 @@ class AcquireVideoFrameData : public StreamSegmenter::AcquireFrameData //! \return size_t //! the size of coded data //! - size_t getSize() const override; + size_t GetDataSize() const override; //! //! \brief Clone one AcquireVideoFrameData object @@ -581,7 +576,7 @@ class AcquireVideoFrameData : public StreamSegmenter::AcquireFrameData //! \return AcquireVideoFrameData* //! the pointer to the cloned AcquireVideoFrameData object //! - AcquireVideoFrameData* clone() const override; + AcquireVideoFrameData* Clone() const override; private: //Nalu *m_tileNalu; //!< the pointer to the nalu information of coded data @@ -625,8 +620,22 @@ class DashSegmenter //! int32_t SegmentData(TrackSegmentCtx *trackSegCtx); + //! + //! \brief Get totally generated segments number + //! + //! \return uint64_t + //! totally generated segments number + //! uint64_t GetSegmentsNum() { return m_segNum; }; + //! + //! \brief Get current segment size + //! + //! \return uint64_t + //! current segment size + //! + uint64_t GetSegmentSize() { return m_segSize; }; + protected: //! @@ -664,16 +673,17 @@ class DashSegmenter //! \return void //! void Feed( - TrackId trackId, + VCD::MP4::TrackId trackId, CodedMeta codedFrameMeta, Nalu *dataNalu, - StreamSegmenter::FrameCts compositionTime); + VCD::MP4::FrameCts compositionTime); - std::map m_trackInfo; //!< track information of all tracks + std::map m_trackInfo; //!< track information of all tracks const GeneralSegConfig m_config; //!< the configuration of data segment of the track - StreamSegmenter::AutoSegmenter m_autoSegmenter; //!< the low level segmenter to write segment + //VCD::MP4::AutoSegmenter m_autoSegmenter; //!< the low level segmenter to write segment + VCD::MP4::SegmentWriter m_segWriter; bool m_waitForInitSegment = false; //!< whether to wait for initial segment @@ -697,7 +707,7 @@ class DashSegmenter //! \return int32_t //! ERROR_NONE if success, else failed reason //! - int32_t WriteSegment(StreamSegmenter::Segmenter::Segments& aSegments); + int32_t WriteSegment(VCD::MP4::SegmentList& aSegments); //! //! \brief Pack all extractors data into bitstream @@ -716,18 +726,15 @@ class DashSegmenter //! int32_t PackExtractors( std::map* extractorsMap, - std::list refTrackIdxs, + std::list refTrackIdxs, Nalu *extractorsNalu); private: - std::unique_ptr m_segmentWriter; //!< the low level segment writer - - StreamSegmenter::SidxWriter *m_sidxWriter = NULL; //!< the low level sidx writer - uint64_t m_segNum = 0; //!< current segments number FILE *m_file = NULL; //!< file pointer to write segments char m_segName[1024]; //!< segment file name string + uint64_t m_segSize = 0; }; VCD_NS_END; diff --git a/src/VROmafPacking/DefaultSegmentation.cpp b/src/VROmafPacking/DefaultSegmentation.cpp index 2ad9ab94..11c7bade 100644 --- a/src/VROmafPacking/DefaultSegmentation.cpp +++ b/src/VROmafPacking/DefaultSegmentation.cpp @@ -31,14 +31,21 @@ //! Created on April 30, 2019, 6:04 AM //! +#include "VideoStreamPluginAPI.h" +#include "AudioStreamPluginAPI.h" #include "DefaultSegmentation.h" -#include "streamsegmenter/rational.hpp" +#include "../isolib/dash_writer/Fraction.h" #include #include #include #include +#ifdef _USE_TRACE_ +#include "../trace/Bandwidth_tp.h" +#include "../trace/E2E_latency_tp.h" +#endif + VCD_NS_BEGIN DefaultSegmentation::~DefaultSegmentation() @@ -50,44 +57,72 @@ DefaultSegmentation::~DefaultSegmentation() { TrackSegmentCtx *trackSegCtxs = itTrackCtx->second; MediaStream *stream = itTrackCtx->first; - VideoStream *vs = (VideoStream*)stream; - uint32_t tilesNum = vs->GetTileInRow() * vs->GetTileInCol(); - for (uint32_t i = 0; i < tilesNum; i++) + if (stream && (stream->GetMediaType() == VIDEOTYPE)) { - DELETE_MEMORY(trackSegCtxs[i].initSegmenter); - DELETE_MEMORY(trackSegCtxs[i].dashSegmenter); + VideoStream *vs = (VideoStream*)stream; + uint32_t tilesNum = vs->GetTileInRow() * vs->GetTileInCol(); + for (uint32_t i = 0; i < tilesNum; i++) + { + DELETE_MEMORY(trackSegCtxs[i].initSegmenter); + DELETE_MEMORY(trackSegCtxs[i].dashSegmenter); + } + + delete[] trackSegCtxs; + trackSegCtxs = NULL; } + else if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + if (trackSegCtxs) + { + DELETE_MEMORY(trackSegCtxs->initSegmenter); + DELETE_MEMORY(trackSegCtxs->dashSegmenter); - delete[] trackSegCtxs; - trackSegCtxs = NULL; + delete trackSegCtxs; + trackSegCtxs = NULL; + } + } } m_streamSegCtx.clear(); - std::map::iterator itExtractorCtx; - for (itExtractorCtx = m_extractorSegCtx.begin(); - itExtractorCtx != m_extractorSegCtx.end(); - itExtractorCtx++) + if (m_extractorSegCtx.size()) { - TrackSegmentCtx *trackSegCtx = itExtractorCtx->second; - if (trackSegCtx->extractorTrackNalu.data) + std::map::iterator itExtractorCtx; + for (itExtractorCtx = m_extractorSegCtx.begin(); + itExtractorCtx != m_extractorSegCtx.end(); + itExtractorCtx++) { - free(trackSegCtx->extractorTrackNalu.data); - trackSegCtx->extractorTrackNalu.data = NULL; - } + TrackSegmentCtx *trackSegCtx = itExtractorCtx->second; + if (trackSegCtx->extractorTrackNalu.data) + { + free(trackSegCtx->extractorTrackNalu.data); + trackSegCtx->extractorTrackNalu.data = NULL; + } - DELETE_MEMORY(trackSegCtx->initSegmenter); - DELETE_MEMORY(trackSegCtx->dashSegmenter); + DELETE_MEMORY(trackSegCtx->initSegmenter); + DELETE_MEMORY(trackSegCtx->dashSegmenter); + + DELETE_MEMORY(trackSegCtx); + } - DELETE_MEMORY(trackSegCtx); + m_extractorSegCtx.clear(); } - m_extractorSegCtx.clear(); - int32_t ret = pthread_mutex_destroy(&m_mutex); - if (ret) + if (m_extractorThreadIds.size()) { - LOG(ERROR) << "Failed to destroy mutex of default segmentation !" << std::endl; - return; + std::map::iterator itThreadId; + for (itThreadId = m_extractorThreadIds.begin(); itThreadId != m_extractorThreadIds.end(); ) + { + pthread_t oneThread = itThreadId->first; + if (oneThread) + { + pthread_join(oneThread, NULL); + } + m_extractorThreadIds.erase(itThreadId++); + } + m_extractorThreadIds.clear(); } + + DELETE_ARRAY(m_videosBitrate); } int32_t ConvertRwpk(RegionWisePacking *rwpk, CodedMeta *codedMeta) @@ -183,22 +218,40 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() MediaStream *stream = it->second; if (stream->GetMediaType() == VIDEOTYPE) { + m_videosNum++; VideoStream *vs = (VideoStream*)stream; uint64_t bitRate = vs->GetBitRate(); bitRateRanking.insert(bitRate); } } + if (m_videosNum != bitRateRanking.size()) + { + //LOG(ERROR) << "Invalid video streams number !" << std::endl; + OMAF_LOG(LOG_ERROR, "Invalid video streams number !\n"); + return OMAF_ERROR_VIDEO_NUM; + } + m_videosBitrate = new uint64_t[m_videosNum]; + if (!m_videosBitrate) + return OMAF_ERROR_NULL_PTR; + + memset_s(m_videosBitrate, m_videosNum * sizeof(uint64_t), 0); + std::set::reverse_iterator rateIter = bitRateRanking.rbegin(); + uint32_t index = 0; + for ( ; rateIter != bitRateRanking.rend(); rateIter++) + { + m_videosBitrate[index] = *rateIter; + index++; + } + for (it = m_streamMap->begin(); it != m_streamMap->end(); it++) { MediaStream *stream = it->second; if (stream->GetMediaType() == VIDEOTYPE) { VideoStream *vs = (VideoStream*)stream; - //TrackSegmentCtx *trackSegCtxs = vs->GetAllTrackSegCtxs(); TileInfo *tilesInfo = vs->GetAllTilesInfo(); Rational frameRate = vs->GetFrameRate(); - m_frameRate = frameRate; uint64_t bitRate = vs->GetBitRate(); uint8_t qualityLevel = bitRateRanking.size(); std::set::iterator itBitRate; @@ -210,6 +263,7 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() break; } m_projType = (VCD::OMAF::ProjectionFormat)vs->GetProjType(); + OMAF_LOG(LOG_INFO, "Get video source projection type %d\n", m_projType); m_videoSegInfo = vs->GetVideoSegInfo(); Nalu *vpsNalu = vs->GetVPSNalu(); if (!vpsNalu || !(vpsNalu->data) || !(vpsNalu->dataSize)) @@ -242,9 +296,10 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() TrackSegmentCtx *trackSegCtxs = new TrackSegmentCtx[tilesNum]; if (!trackSegCtxs) return OMAF_ERROR_NULL_PTR; - std::map tilesTrackIndex; + std::map tilesTrackIndex; for (uint32_t i = 0; i < tilesNum; i++) { + trackSegCtxs[i].isAudio = false; trackSegCtxs[i].isExtractorTrack = false; trackSegCtxs[i].tileInfo = &(tilesInfo[i]); trackSegCtxs[i].tileIdx = i; @@ -253,8 +308,8 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() //set InitSegConfig TrackConfig trackConfig{}; trackConfig.meta.trackId = m_trackIdStarter + i; - trackConfig.meta.timescale = StreamSegmenter::RatU64(frameRate.den, frameRate.num * 1000); //? - trackConfig.meta.type = StreamSegmenter::MediaType::Video; + trackConfig.meta.timescale = VCD::MP4::FractU64(frameRate.den, frameRate.num * 1000); //? + trackConfig.meta.type = VCD::MP4::TypeOfMedia::Video; trackConfig.pipelineOutput = DataInputFormat::VideoMono; trackSegCtxs[i].dashInitCfg.tracks.insert(std::make_pair(trackSegCtxs[i].trackIdx, trackConfig)); m_allTileTracks.insert(std::make_pair(trackSegCtxs[i].trackIdx, trackConfig)); @@ -262,23 +317,23 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() trackSegCtxs[i].dashInitCfg.writeToBitstream = true; trackSegCtxs[i].dashInitCfg.packedSubPictures = true; trackSegCtxs[i].dashInitCfg.mode = OperatingMode::OMAF; - trackSegCtxs[i].dashInitCfg.streamIds.push_back(trackConfig.meta.trackId.get()); + trackSegCtxs[i].dashInitCfg.streamIds.push_back(trackConfig.meta.trackId.GetIndex()); snprintf(trackSegCtxs[i].dashInitCfg.initSegName, 1024, "%s%s_track%ld.init.mp4", m_segInfo->dirName, m_segInfo->outName, m_trackIdStarter + i); //set GeneralSegConfig - trackSegCtxs[i].dashCfg.sgtDuration = StreamSegmenter::RatU64(m_videoSegInfo->segDur, 1); //? - trackSegCtxs[i].dashCfg.subsgtDuration = trackSegCtxs[i].dashCfg.sgtDuration / FrameDuration{ 1, 1}; //? + trackSegCtxs[i].dashCfg.sgtDuration = VCD::MP4::FractU64(m_videoSegInfo->segDur, 1); //? + trackSegCtxs[i].dashCfg.subsgtDuration = trackSegCtxs[i].dashCfg.sgtDuration / VCD::MP4::FrameDuration{ 1, 1}; //? trackSegCtxs[i].dashCfg.needCheckIDR = true; - StreamSegmenter::TrackMeta trackMeta{}; + VCD::MP4::TrackMeta trackMeta{}; trackMeta.trackId = trackSegCtxs[i].trackIdx; - trackMeta.timescale = StreamSegmenter::RatU64(frameRate.den, frameRate.num * 1000); //? - trackMeta.type = StreamSegmenter::MediaType::Video; + trackMeta.timescale = VCD::MP4::FractU64(frameRate.den, frameRate.num * 1000); //? + trackMeta.type = VCD::MP4::TypeOfMedia::Video; trackSegCtxs[i].dashCfg.tracks.insert(std::make_pair(trackSegCtxs[i].trackIdx, trackMeta)); trackSegCtxs[i].dashCfg.useSeparatedSidx = false; trackSegCtxs[i].dashCfg.streamsIdx.push_back(it->first); - snprintf(trackSegCtxs[i].dashCfg.tileSegBaseName, 1024, "%s%s_track%ld", m_segInfo->dirName, m_segInfo->outName, m_trackIdStarter + i); + snprintf(trackSegCtxs[i].dashCfg.trackSegBaseName, 1024, "%s%s_track%ld", m_segInfo->dirName, m_segInfo->outName, m_trackIdStarter + i); //setup DashInitSegmenter trackSegCtxs[i].initSegmenter = new DashInitSegmenter(&(trackSegCtxs[i].dashInitCfg)); @@ -313,9 +368,9 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() //setup CodedMeta trackSegCtxs[i].codedMeta.presIndex = 0; trackSegCtxs[i].codedMeta.codingIndex = 0; - trackSegCtxs[i].codedMeta.codingTime = FrameTime{ 0, 1 }; - trackSegCtxs[i].codedMeta.presTime = FrameTime{ 0, 1000 }; - trackSegCtxs[i].codedMeta.duration = FrameDuration{ frameRate.den * 1000, frameRate.num * 1000}; + trackSegCtxs[i].codedMeta.codingTime = VCD::MP4::FrameTime{ 0, 1 }; + trackSegCtxs[i].codedMeta.presTime = VCD::MP4::FrameTime{ 0, 1000 }; + trackSegCtxs[i].codedMeta.duration = VCD::MP4::FrameDuration{ frameRate.den * 1000, frameRate.num * 1000}; trackSegCtxs[i].codedMeta.trackId = trackSegCtxs[i].trackIdx; trackSegCtxs[i].codedMeta.inCodingOrder = true; trackSegCtxs[i].codedMeta.format = CodedFormat::H265; @@ -327,7 +382,7 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() trackSegCtxs[i].codedMeta.bitrate.avgBitrate = tileBitRate; trackSegCtxs[i].codedMeta.bitrate.maxBitrate = 0; trackSegCtxs[i].codedMeta.type = FrameType::IDR; - trackSegCtxs[i].codedMeta.segmenterMeta.segmentDuration = FrameDuration{ 0, 1 }; //? + trackSegCtxs[i].codedMeta.segmenterMeta.segmentDuration = VCD::MP4::FrameDuration{ 0, 1 }; //? RegionWisePacking regionPacking; @@ -350,7 +405,8 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() return OMAF_ERROR_NULL_PTR; } - memcpy(&(regionPacking.rectRegionPacking[0]), &(rwpk->rectRegionPacking[i]), sizeof(RectangularRegionWisePacking)); + memcpy_s(&(regionPacking.rectRegionPacking[0]), sizeof(RectangularRegionWisePacking), + &(rwpk->rectRegionPacking[i]), sizeof(RectangularRegionWisePacking)); ConvertRwpk(&(regionPacking), &(trackSegCtxs[i].codedMeta)); DELETE_ARRAY(regionPacking.rectRegionPacking); @@ -363,6 +419,10 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() { trackSegCtxs[i].codedMeta.projection = OmafProjectionType::CUBEMAP; } + else if (m_projType == VCD::OMAF::ProjectionFormat::PF_PLANAR) + { + trackSegCtxs[i].codedMeta.projection = OmafProjectionType::PLANAR; + } else { for (uint32_t id = 0; id < (i + 1); id++) @@ -394,201 +454,348 @@ int32_t DefaultSegmentation::ConstructTileTrackSegCtx() int32_t DefaultSegmentation::ConstructExtractorTrackSegCtx() { - std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); - std::map::iterator it1; - for (it1 = extractorTracks->begin(); it1 != extractorTracks->end(); it1++) + std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); + if (extractorTracks->size()) { - ExtractorTrack *extractorTrack = it1->second; - Nalu *vpsNalu = extractorTrack->GetVPS(); - Nalu *spsNalu = extractorTrack->GetSPS(); - Nalu *ppsNalu = extractorTrack->GetPPS(); - - std::vector vpsData( - static_cast(vpsNalu->data), - static_cast(vpsNalu->data) + vpsNalu->dataSize); - std::vector spsData( - static_cast(spsNalu->data), - static_cast(spsNalu->data) + spsNalu->dataSize); - std::vector ppsData( - static_cast(ppsNalu->data), - static_cast(ppsNalu->data) + ppsNalu->dataSize); - - RegionWisePacking *rwpk = extractorTrack->GetRwpk(); - ContentCoverage *covi = extractorTrack->GetCovi(); - std::list *picResList = extractorTrack->GetPicRes(); - Nalu *projSEI = extractorTrack->GetProjectionSEI(); - Nalu *rwpkSEI = extractorTrack->GetRwpkSEI(); - - TrackSegmentCtx *trackSegCtx = new TrackSegmentCtx; - if (!trackSegCtx) - return OMAF_ERROR_NULL_PTR; - - trackSegCtx->isExtractorTrack = true; - trackSegCtx->extractorTrackIdx = it1->first; - trackSegCtx->extractors = extractorTrack->GetAllExtractors(); - memset(&(trackSegCtx->extractorTrackNalu), 0, sizeof(Nalu)); - trackSegCtx->extractorTrackNalu.dataSize = projSEI->dataSize + rwpkSEI->dataSize; - trackSegCtx->extractorTrackNalu.data = new uint8_t[trackSegCtx->extractorTrackNalu.dataSize]; - if (!(trackSegCtx->extractorTrackNalu.data)) + std::map::iterator it1; + for (it1 = extractorTracks->begin(); it1 != extractorTracks->end(); it1++) { - DELETE_MEMORY(trackSegCtx); - return OMAF_ERROR_NULL_PTR; - } + ExtractorTrack *extractorTrack = it1->second; + Nalu *vpsNalu = extractorTrack->GetVPS(); + Nalu *spsNalu = extractorTrack->GetSPS(); + Nalu *ppsNalu = extractorTrack->GetPPS(); + + std::vector vpsData( + static_cast(vpsNalu->data), + static_cast(vpsNalu->data) + vpsNalu->dataSize); + std::vector spsData( + static_cast(spsNalu->data), + static_cast(spsNalu->data) + spsNalu->dataSize); + std::vector ppsData( + static_cast(ppsNalu->data), + static_cast(ppsNalu->data) + ppsNalu->dataSize); - memcpy(trackSegCtx->extractorTrackNalu.data, projSEI->data, projSEI->dataSize); - memcpy(trackSegCtx->extractorTrackNalu.data + projSEI->dataSize, rwpkSEI->data, rwpkSEI->dataSize); + RegionWisePacking *rwpk = extractorTrack->GetRwpk(); + ContentCoverage *covi = extractorTrack->GetCovi(); + std::list *picResList = extractorTrack->GetPicRes(); + Nalu *projSEI = extractorTrack->GetProjectionSEI(); + Nalu *rwpkSEI = extractorTrack->GetRwpkSEI(); - TilesMergeDirectionInCol *tilesMergeDir = extractorTrack->GetTilesMergeDir(); - std::list::iterator itCol; - for (itCol = tilesMergeDir->tilesArrangeInCol.begin(); - itCol != tilesMergeDir->tilesArrangeInCol.end(); itCol++) - { - TilesInCol *tileCol = *itCol; - std::list::iterator itTile; - for (itTile = tileCol->begin(); itTile != tileCol->end(); itTile++) + TrackSegmentCtx *trackSegCtx = new TrackSegmentCtx; + if (!trackSegCtx) + return OMAF_ERROR_NULL_PTR; + + trackSegCtx->isAudio = false; + trackSegCtx->isExtractorTrack = true; + trackSegCtx->extractorTrackIdx = it1->first; + trackSegCtx->extractors = extractorTrack->GetAllExtractors(); + memset_s(&(trackSegCtx->extractorTrackNalu), sizeof(Nalu), 0); + trackSegCtx->extractorTrackNalu.dataSize = projSEI->dataSize + rwpkSEI->dataSize; + trackSegCtx->extractorTrackNalu.data = new uint8_t[trackSegCtx->extractorTrackNalu.dataSize]; + if (!(trackSegCtx->extractorTrackNalu.data)) { - SingleTile *tile = *itTile; - uint8_t vsIdx = tile->streamIdxInMedia; - uint8_t origTileIdx = tile->origTileIdx; + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_NULL_PTR; + } - std::map>::iterator itTilesIdxs; - itTilesIdxs = m_tilesTrackIdxs.find(vsIdx); - if (itTilesIdxs == m_tilesTrackIdxs.end()) + memcpy_s(trackSegCtx->extractorTrackNalu.data, projSEI->dataSize, projSEI->data, projSEI->dataSize); + memcpy_s(trackSegCtx->extractorTrackNalu.data + projSEI->dataSize, rwpkSEI->dataSize, rwpkSEI->data, rwpkSEI->dataSize); + + TilesMergeDirectionInCol *tilesMergeDir = extractorTrack->GetTilesMergeDir(); + std::list::iterator itCol; + for (itCol = tilesMergeDir->tilesArrangeInCol.begin(); + itCol != tilesMergeDir->tilesArrangeInCol.end(); itCol++) + { + TilesInCol *tileCol = *itCol; + std::list::iterator itTile; + for (itTile = tileCol->begin(); itTile != tileCol->end(); itTile++) { - DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); - DELETE_MEMORY(trackSegCtx); - return OMAF_ERROR_STREAM_NOT_FOUND; + SingleTile *tile = *itTile; + uint8_t vsIdx = tile->streamIdxInMedia; + uint8_t origTileIdx = tile->origTileIdx; + + std::map>::iterator itTilesIdxs; + itTilesIdxs = m_tilesTrackIdxs.find(vsIdx); + if (itTilesIdxs == m_tilesTrackIdxs.end()) + { + DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_STREAM_NOT_FOUND; + } + std::map tilesIndex = itTilesIdxs->second; + VCD::MP4::TrackId foundTrackId = tilesIndex[origTileIdx]; + trackSegCtx->refTrackIdxs.push_back(foundTrackId); } - std::map tilesIndex = itTilesIdxs->second; - TrackId foundTrackId = tilesIndex[origTileIdx]; - trackSegCtx->refTrackIdxs.push_back(foundTrackId); } - } - trackSegCtx->trackIdx = DEFAULT_EXTRACTORTRACK_TRACKIDBASE + trackSegCtx->extractorTrackIdx; + trackSegCtx->trackIdx = DEFAULT_EXTRACTORTRACK_TRACKIDBASE + trackSegCtx->extractorTrackIdx; - //set up InitSegConfig - std::set allTrackIds; - std::map::iterator itTrack; - for (itTrack = m_allTileTracks.begin(); itTrack != m_allTileTracks.end(); itTrack++) - { - trackSegCtx->dashInitCfg.tracks.insert(std::make_pair(itTrack->first, itTrack->second)); - allTrackIds.insert(itTrack->first); - } + //set up InitSegConfig + std::set allTrackIds; + std::map::iterator itTrack; + for (itTrack = m_allTileTracks.begin(); itTrack != m_allTileTracks.end(); itTrack++) + { + trackSegCtx->dashInitCfg.tracks.insert(std::make_pair(itTrack->first, itTrack->second)); + allTrackIds.insert(itTrack->first); + } - TrackConfig trackConfig{}; - trackConfig.meta.trackId = trackSegCtx->trackIdx; - trackConfig.meta.timescale = StreamSegmenter::RatU64(m_frameRate.den, m_frameRate.num * 1000); //? - trackConfig.meta.type = StreamSegmenter::MediaType::Video; - trackConfig.trackReferences.insert(std::make_pair("scal", allTrackIds)); - trackConfig.pipelineOutput = DataInputFormat::VideoMono; - trackSegCtx->dashInitCfg.tracks.insert(std::make_pair(trackSegCtx->trackIdx, trackConfig)); - - trackSegCtx->dashInitCfg.fragmented = true; - trackSegCtx->dashInitCfg.writeToBitstream = true; - trackSegCtx->dashInitCfg.packedSubPictures = true; - trackSegCtx->dashInitCfg.mode = OperatingMode::OMAF; - trackSegCtx->dashInitCfg.streamIds.push_back(trackSegCtx->trackIdx.get()); - std::set::iterator itId; - for (itId = allTrackIds.begin(); itId != allTrackIds.end(); itId++) - { - trackSegCtx->dashInitCfg.streamIds.push_back((*itId).get()); - } - snprintf(trackSegCtx->dashInitCfg.initSegName, 1024, "%s%s_track%d.init.mp4", m_segInfo->dirName, m_segInfo->outName, trackSegCtx->trackIdx.get()); - - //set up GeneralSegConfig - trackSegCtx->dashCfg.sgtDuration = StreamSegmenter::RatU64(m_videoSegInfo->segDur, 1); //? - trackSegCtx->dashCfg.subsgtDuration = trackSegCtx->dashCfg.sgtDuration / FrameDuration{ 1, 1}; //? - trackSegCtx->dashCfg.needCheckIDR = true; - - StreamSegmenter::TrackMeta trackMeta{}; - trackMeta.trackId = trackSegCtx->trackIdx; - trackMeta.timescale = StreamSegmenter::RatU64(m_frameRate.den, m_frameRate.num * 1000); //? - trackMeta.type = StreamSegmenter::MediaType::Video; - trackSegCtx->dashCfg.tracks.insert(std::make_pair(trackSegCtx->trackIdx, trackMeta)); - - trackSegCtx->dashCfg.useSeparatedSidx = false; - trackSegCtx->dashCfg.streamsIdx.push_back(trackSegCtx->trackIdx.get()); - snprintf(trackSegCtx->dashCfg.tileSegBaseName, 1024, "%s%s_track%d", m_segInfo->dirName, m_segInfo->outName, trackSegCtx->trackIdx.get()); - - //set up DashInitSegmenter - trackSegCtx->initSegmenter = new DashInitSegmenter(&(trackSegCtx->dashInitCfg)); - if (!(trackSegCtx->initSegmenter)) - { - DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); - DELETE_MEMORY(trackSegCtx); - return OMAF_ERROR_NULL_PTR; - } + TrackConfig trackConfig{}; + trackConfig.meta.trackId = trackSegCtx->trackIdx; + trackConfig.meta.timescale = VCD::MP4::FractU64(m_frameRate.den, m_frameRate.num * 1000); //? + trackConfig.meta.type = VCD::MP4::TypeOfMedia::Video; + trackConfig.trackReferences.insert(std::make_pair("scal", allTrackIds)); + trackConfig.pipelineOutput = DataInputFormat::VideoMono; + trackSegCtx->dashInitCfg.tracks.insert(std::make_pair(trackSegCtx->trackIdx, trackConfig)); + + trackSegCtx->dashInitCfg.fragmented = true; + trackSegCtx->dashInitCfg.writeToBitstream = true; + trackSegCtx->dashInitCfg.packedSubPictures = true; + trackSegCtx->dashInitCfg.mode = OperatingMode::OMAF; + trackSegCtx->dashInitCfg.streamIds.push_back(trackSegCtx->trackIdx.GetIndex()); + std::set::iterator itId; + for (itId = allTrackIds.begin(); itId != allTrackIds.end(); itId++) + { + trackSegCtx->dashInitCfg.streamIds.push_back((*itId).GetIndex()); + } + snprintf(trackSegCtx->dashInitCfg.initSegName, 1024, "%s%s_track%d.init.mp4", m_segInfo->dirName, m_segInfo->outName, trackSegCtx->trackIdx.GetIndex()); + + //set up GeneralSegConfig + trackSegCtx->dashCfg.sgtDuration = VCD::MP4::FractU64(m_videoSegInfo->segDur, 1); //? + trackSegCtx->dashCfg.subsgtDuration = trackSegCtx->dashCfg.sgtDuration / VCD::MP4::FrameDuration{ 1, 1}; //? + trackSegCtx->dashCfg.needCheckIDR = true; + + VCD::MP4::TrackMeta trackMeta{}; + trackMeta.trackId = trackSegCtx->trackIdx; + trackMeta.timescale = VCD::MP4::FractU64(m_frameRate.den, m_frameRate.num * 1000); //? + trackMeta.type = VCD::MP4::TypeOfMedia::Video; + trackSegCtx->dashCfg.tracks.insert(std::make_pair(trackSegCtx->trackIdx, trackMeta)); + + trackSegCtx->dashCfg.useSeparatedSidx = false; + trackSegCtx->dashCfg.streamsIdx.push_back(trackSegCtx->trackIdx.GetIndex()); + snprintf(trackSegCtx->dashCfg.trackSegBaseName, 1024, "%s%s_track%d", m_segInfo->dirName, m_segInfo->outName, trackSegCtx->trackIdx.GetIndex()); + + //set up DashInitSegmenter + trackSegCtx->initSegmenter = new DashInitSegmenter(&(trackSegCtx->dashInitCfg)); + if (!(trackSegCtx->initSegmenter)) + { + DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_NULL_PTR; + } - //set up DashSegmenter - trackSegCtx->dashSegmenter = new DashSegmenter(&(trackSegCtx->dashCfg), true); - if (!(trackSegCtx->dashSegmenter)) - { - DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); - DELETE_MEMORY(trackSegCtx->initSegmenter); - DELETE_MEMORY(trackSegCtx); - return OMAF_ERROR_NULL_PTR; - } + //set up DashSegmenter + trackSegCtx->dashSegmenter = new DashSegmenter(&(trackSegCtx->dashCfg), true); + if (!(trackSegCtx->dashSegmenter)) + { + DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); + DELETE_MEMORY(trackSegCtx->initSegmenter); + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_NULL_PTR; + } - //set up CodedMeta - trackSegCtx->codedMeta.presIndex = 0; - trackSegCtx->codedMeta.codingIndex = 0; - trackSegCtx->codedMeta.codingTime = FrameTime{ 0, 1 }; - trackSegCtx->codedMeta.presTime = FrameTime{ 0, 1000 }; - trackSegCtx->codedMeta.duration = FrameDuration{ m_frameRate.den * 1000, m_frameRate.num * 1000}; - trackSegCtx->codedMeta.trackId = trackSegCtx->trackIdx; - trackSegCtx->codedMeta.inCodingOrder = true; - trackSegCtx->codedMeta.format = CodedFormat::H265Extractor; - trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::VPS, vpsData)); - trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::SPS, spsData)); - trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::PPS, ppsData)); - trackSegCtx->codedMeta.width = rwpk->packedPicWidth;//tilesInfo[i].tileWidth; - trackSegCtx->codedMeta.height = rwpk->packedPicHeight;//tilesInfo[i].tileHeight; - trackSegCtx->codedMeta.bitrate.avgBitrate = 0; - trackSegCtx->codedMeta.bitrate.maxBitrate = 0; - trackSegCtx->codedMeta.type = FrameType::IDR; - trackSegCtx->codedMeta.segmenterMeta.segmentDuration = FrameDuration{ 0, 1 }; //? - ConvertRwpk(rwpk, &(trackSegCtx->codedMeta)); - ConvertCovi(covi->sphereRegions, &(trackSegCtx->codedMeta)); + //set up CodedMeta + trackSegCtx->codedMeta.presIndex = 0; + trackSegCtx->codedMeta.codingIndex = 0; + trackSegCtx->codedMeta.codingTime = VCD::MP4::FrameTime{ 0, 1 }; + trackSegCtx->codedMeta.presTime = VCD::MP4::FrameTime{ 0, 1000 }; + trackSegCtx->codedMeta.duration = VCD::MP4::FrameDuration{ m_frameRate.den * 1000, m_frameRate.num * 1000}; + trackSegCtx->codedMeta.trackId = trackSegCtx->trackIdx; + trackSegCtx->codedMeta.inCodingOrder = true; + trackSegCtx->codedMeta.format = CodedFormat::H265Extractor; + trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::VPS, vpsData)); + trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::SPS, spsData)); + trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::PPS, ppsData)); + trackSegCtx->codedMeta.width = rwpk->packedPicWidth;//tilesInfo[i].tileWidth; + trackSegCtx->codedMeta.height = rwpk->packedPicHeight;//tilesInfo[i].tileHeight; + trackSegCtx->codedMeta.bitrate.avgBitrate = 0; + trackSegCtx->codedMeta.bitrate.maxBitrate = 0; + trackSegCtx->codedMeta.type = FrameType::IDR; + trackSegCtx->codedMeta.segmenterMeta.segmentDuration = VCD::MP4::FrameDuration{ 0, 1 }; //? + ConvertRwpk(rwpk, &(trackSegCtx->codedMeta)); + ConvertCovi(covi->sphereRegions, &(trackSegCtx->codedMeta)); + + FillQualityRank(&(trackSegCtx->codedMeta), picResList); + + if (m_projType == VCD::OMAF::ProjectionFormat::PF_ERP) + { + trackSegCtx->codedMeta.projection = OmafProjectionType::EQUIRECTANGULAR; + } + else if (m_projType == VCD::OMAF::ProjectionFormat::PF_CUBEMAP) + { + trackSegCtx->codedMeta.projection = OmafProjectionType::CUBEMAP; + } + else if (m_projType == VCD::OMAF::ProjectionFormat::PF_PLANAR) + { + trackSegCtx->codedMeta.projection = OmafProjectionType::PLANAR; + } + else + { + DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); + DELETE_MEMORY(trackSegCtx->initSegmenter); + DELETE_MEMORY(trackSegCtx->dashSegmenter); + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_INVALID_PROJECTIONTYPE; + } - FillQualityRank(&(trackSegCtx->codedMeta), picResList); + trackSegCtx->codedMeta.isEOS = false; - if (m_projType == VCD::OMAF::ProjectionFormat::PF_ERP) - { - trackSegCtx->codedMeta.projection = OmafProjectionType::EQUIRECTANGULAR; + m_extractorSegCtx.insert(std::make_pair(extractorTrack, trackSegCtx)); } - else if (m_projType == VCD::OMAF::ProjectionFormat::PF_CUBEMAP) - { - trackSegCtx->codedMeta.projection = OmafProjectionType::CUBEMAP; - } - else + } + + return ERROR_NONE; +} + +int32_t DefaultSegmentation::ConstructAudioTrackSegCtx() +{ + uint8_t audioId = 0; + std::map::iterator it; + for (it = m_streamMap->begin(); it != m_streamMap->end(); it++) + { + uint8_t strId = it->first; + MediaStream *stream = it->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) { - DELETE_ARRAY(trackSegCtx->extractorTrackNalu.data); - DELETE_MEMORY(trackSegCtx->initSegmenter); - DELETE_MEMORY(trackSegCtx->dashSegmenter); - DELETE_MEMORY(trackSegCtx); - return OMAF_ERROR_INVALID_PROJECTIONTYPE; - } + //OMAF_LOG(LOG_INFO, "Begin to construct audio track segmentation context !\n"); + AudioStream *as = (AudioStream*)stream; + uint32_t frequency = as->GetSampleRate(); + uint8_t chlConfig = as->GetChannelNum(); + uint16_t bitRate = as->GetBitRate(); + std::vector packedAudioSpecCfg = as->GetPackedSpecCfg(); + OMAF_LOG(LOG_INFO, "Audio sample rate %d\n", frequency); + OMAF_LOG(LOG_INFO, "Audio channel number %d\n", chlConfig); + OMAF_LOG(LOG_INFO, "Audio bit rate %d\n", bitRate); + OMAF_LOG(LOG_INFO, "Audio specific configuration packed size %lld\n", packedAudioSpecCfg.size()); + + TrackSegmentCtx *trackSegCtx = new TrackSegmentCtx; + if (!trackSegCtx) + return OMAF_ERROR_NULL_PTR; - trackSegCtx->codedMeta.isEOS = false; + trackSegCtx->isAudio = true; + trackSegCtx->isExtractorTrack = false; + trackSegCtx->tileInfo = NULL; + trackSegCtx->tileIdx = 0; + trackSegCtx->extractorTrackIdx = 0; + trackSegCtx->extractors = NULL; + trackSegCtx->trackIdx = DEFAULT_AUDIOTRACK_TRACKIDBASE + (uint64_t)audioId; + + TrackConfig trackConfig{}; + trackConfig.meta.trackId = trackSegCtx->trackIdx; + trackConfig.meta.timescale = VCD::MP4::FractU64(1, frequency);//m_frameRate.den, m_frameRate.num * 1000); //maybe need to be changed later + trackConfig.meta.type = VCD::MP4::TypeOfMedia::Audio; + trackConfig.pipelineOutput = DataInputFormat::Audio; + trackSegCtx->dashInitCfg.tracks.insert(std::make_pair(trackSegCtx->trackIdx, trackConfig)); + trackSegCtx->dashInitCfg.fragmented = true; + trackSegCtx->dashInitCfg.writeToBitstream = true; + trackSegCtx->dashInitCfg.packedSubPictures = true; + trackSegCtx->dashInitCfg.mode = OperatingMode::OMAF; + trackSegCtx->dashInitCfg.streamIds.push_back(trackConfig.meta.trackId.GetIndex()); + snprintf(trackSegCtx->dashInitCfg.initSegName, 1024, "%s%s_track%ld.init.mp4", m_segInfo->dirName, m_segInfo->outName, (DEFAULT_AUDIOTRACK_TRACKIDBASE + (uint64_t)audioId)); + + //set GeneralSegConfig + trackSegCtx->dashCfg.sgtDuration = VCD::MP4::FractU64(m_segInfo->segDuration, 1); //? + trackSegCtx->dashCfg.subsgtDuration = trackSegCtx->dashCfg.sgtDuration / VCD::MP4::FrameDuration{ 1, 1}; //? + trackSegCtx->dashCfg.needCheckIDR = true; + + VCD::MP4::TrackMeta trackMeta{}; + trackMeta.trackId = trackSegCtx->trackIdx; + trackMeta.timescale = VCD::MP4::FractU64(1, frequency);//m_frameRate.den, m_frameRate.num * 1000); //? + trackMeta.type = VCD::MP4::TypeOfMedia::Audio; + trackSegCtx->dashCfg.tracks.insert(std::make_pair(trackSegCtx->trackIdx, trackMeta)); + + trackSegCtx->dashCfg.useSeparatedSidx = false; + trackSegCtx->dashCfg.streamsIdx.push_back(strId); + snprintf(trackSegCtx->dashCfg.trackSegBaseName, 1024, "%s%s_track%ld", m_segInfo->dirName, m_segInfo->outName, (DEFAULT_AUDIOTRACK_TRACKIDBASE + (uint64_t)audioId)); + + //setup DashInitSegmenter + trackSegCtx->initSegmenter = new DashInitSegmenter(&(trackSegCtx->dashInitCfg)); + if (!(trackSegCtx->initSegmenter)) + { + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_NULL_PTR; + } - m_extractorSegCtx.insert(std::make_pair(extractorTrack, trackSegCtx)); + //setup DashSegmenter + trackSegCtx->dashSegmenter = new DashSegmenter(&(trackSegCtx->dashCfg), true); + if (!(trackSegCtx->dashSegmenter)) + { + DELETE_MEMORY(trackSegCtx->initSegmenter); + DELETE_MEMORY(trackSegCtx); + return OMAF_ERROR_NULL_PTR; + } + + trackSegCtx->qualityRanking = DEFAULT_QUALITY_RANK; + + //setup CodedMeta + trackSegCtx->codedMeta.presIndex = 0; + trackSegCtx->codedMeta.codingIndex = 0; + trackSegCtx->codedMeta.codingTime = VCD::MP4::FrameTime{ 0, 1 }; + trackSegCtx->codedMeta.presTime = VCD::MP4::FrameTime{ 0, 1000 }; + trackSegCtx->codedMeta.trackId = trackSegCtx->trackIdx; + trackSegCtx->codedMeta.inCodingOrder = true; + trackSegCtx->codedMeta.format = CodedFormat::AAC; + trackSegCtx->codedMeta.duration = VCD::MP4::FrameDuration{SAMPLES_NUM_IN_FRAME, frequency}; + trackSegCtx->codedMeta.channelCfg = chlConfig; + trackSegCtx->codedMeta.samplingFreq = frequency; + trackSegCtx->codedMeta.type = FrameType::IDR; + trackSegCtx->codedMeta.bitrate.avgBitrate = bitRate; + trackSegCtx->codedMeta.bitrate.maxBitrate = 0;//? + trackSegCtx->codedMeta.decoderConfig.insert(std::make_pair(ConfigType::AudioSpecificConfig, packedAudioSpecCfg)); + trackSegCtx->codedMeta.isEOS = false; + + trackSegCtx->isEOS = false; + + m_streamSegCtx.insert(std::make_pair(stream, trackSegCtx)); + //m_streamsIsKey.insert(std::make_pair(stream, true)); + //m_streamsIsEOS.insert(std::make_pair(stream, false)); + } } + m_audioSegCtxsConsted = true; + //OMAF_LOG(LOG_INFO, "Complete audio segmentation context construction !\n"); return ERROR_NONE; } int32_t DefaultSegmentation::VideoEndSegmentation() { - std::map::iterator it = m_streamMap->begin(); - for ( ; it != m_streamMap->end(); it++) + if (m_streamMap->size()) { - MediaStream *stream = it->second; - if (stream->GetMediaType() == VIDEOTYPE) + std::map::iterator it = m_streamMap->begin(); + for ( ; it != m_streamMap->end(); it++) { - int32_t ret = EndEachVideo(stream); - if (ret) - return ret; + MediaStream *stream = it->second; + if (stream) + { + if (stream->GetMediaType() == VIDEOTYPE) + { + int32_t ret = EndEachVideo(stream); + if (ret) + return ret; + } + } + } + } + + return ERROR_NONE; +} + +int32_t DefaultSegmentation::AudioEndSegmentation() +{ + if (m_streamMap->size()) + { + std::map::iterator it = m_streamMap->begin(); + for ( ; it != m_streamMap->end(); it++) + { + MediaStream *stream = it->second; + if (stream) + { + if (stream->GetMediaType() == AUDIOTYPE) + { + int32_t ret = EndEachAudio(stream); + if (ret) + return ret; + } + } } } @@ -629,15 +836,87 @@ int32_t DefaultSegmentation::WriteSegmentForEachVideo(MediaStream *stream, bool trackSegCtxs[tileIdx].codedMeta.presIndex++; trackSegCtxs[tileIdx].codedMeta.codingIndex++; - trackSegCtxs[tileIdx].codedMeta.presTime.num += 1000 / (m_frameRate.num / m_frameRate.den); - trackSegCtxs[tileIdx].codedMeta.presTime.den = 1000; + trackSegCtxs[tileIdx].codedMeta.presTime.m_num += 1000 / (m_frameRate.num / m_frameRate.den); + trackSegCtxs[tileIdx].codedMeta.presTime.m_den = 1000; m_segNum = dashSegmenter->GetSegmentsNum(); + +#ifdef _USE_TRACE_ + //trace + if (m_segNum == (m_prevSegNum + 1)) + { + uint64_t segSize = dashSegmenter->GetSegmentSize(); + uint32_t trackIndex = trackSegCtxs[tileIdx].trackIdx.GetIndex(); + const char *trackType = "tile_track"; + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%d x %d", (trackSegCtxs[tileIdx].tileInfo)->tileWidth, (trackSegCtxs[tileIdx].tileInfo)->tileHeight); + + tracepoint(bandwidth_tp_provider, packed_segment_size, trackIndex, trackType, tileRes, m_segNum, segSize); + } +#endif } return ERROR_NONE; } +int32_t DefaultSegmentation::WriteSegmentForEachAudio(MediaStream *stream, FrameBSInfo *frameData, bool isKeyFrame, bool isEOS) +{ + if (!stream) + return OMAF_ERROR_NULL_PTR; + + AudioStream *as = (AudioStream*)stream; + + uint8_t hdrSize = as->GetHeaderDataSize(); + + std::map::iterator itStreamTrack; + itStreamTrack = m_streamSegCtx.find(stream); + if (itStreamTrack == m_streamSegCtx.end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + TrackSegmentCtx *trackSegCtx = itStreamTrack->second; + if (!trackSegCtx) + return OMAF_ERROR_NULL_PTR; + + if (isKeyFrame) + trackSegCtx->codedMeta.type = FrameType::IDR; + else + trackSegCtx->codedMeta.type = FrameType::NONIDR; + + trackSegCtx->codedMeta.isEOS = isEOS; + trackSegCtx->isEOS = isEOS; + + if (frameData && frameData->data && frameData->dataSize) + { + trackSegCtx->audioNalu.data = frameData->data + hdrSize; + trackSegCtx->audioNalu.dataSize = frameData->dataSize - hdrSize; + } + else + { + trackSegCtx->audioNalu.data = NULL; + trackSegCtx->audioNalu.dataSize = 0; + } + + DashSegmenter *dashSegmenter = trackSegCtx->dashSegmenter; + if (!dashSegmenter) + return OMAF_ERROR_NULL_PTR; + + //OMAF_LOG(LOG_INFO, "Write audio track segment !\n"); + int32_t ret = dashSegmenter->SegmentData(trackSegCtx); + if (ret) + return ret; + + trackSegCtx->codedMeta.presIndex++; + trackSegCtx->codedMeta.codingIndex++; + trackSegCtx->codedMeta.presTime.m_num += 1000 / (m_frameRate.num / m_frameRate.den); + trackSegCtx->codedMeta.presTime.m_den = 1000; + + //OMAF_LOG(LOG_INFO, "EOS %d\n", trackSegCtx->isEOS); + m_audioSegNum = dashSegmenter->GetSegmentsNum(); + + //OMAF_LOG(LOG_INFO, "AUDIO seg num %ld\n", m_audioSegNum); + return ERROR_NONE; +} + int32_t DefaultSegmentation::WriteSegmentForEachExtractorTrack( ExtractorTrack *extractorTrack, bool isKeyFrame, @@ -672,8 +951,22 @@ int32_t DefaultSegmentation::WriteSegmentForEachExtractorTrack( trackSegCtx->codedMeta.presIndex++; trackSegCtx->codedMeta.codingIndex++; - trackSegCtx->codedMeta.presTime.num += 1000 / (m_frameRate.num / m_frameRate.den); - trackSegCtx->codedMeta.presTime.den = 1000; + trackSegCtx->codedMeta.presTime.m_num += 1000 / (m_frameRate.num / m_frameRate.den); + trackSegCtx->codedMeta.presTime.m_den = 1000; + +#ifdef _USE_TRACE_ + uint64_t currSegNum = dashSegmenter->GetSegmentsNum(); + if (currSegNum == (m_prevSegNum + 1)) + { + uint64_t segSize = dashSegmenter->GetSegmentSize(); + uint32_t trackIndex = trackSegCtx->trackIdx.GetIndex(); + const char *trackType = "extractor_track"; + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%s", "none"); + + tracepoint(bandwidth_tp_provider, packed_segment_size, trackIndex, trackType, tileRes, currSegNum, segSize); + } +#endif return ERROR_NONE; } @@ -686,7 +979,7 @@ int32_t DefaultSegmentation::StartExtractorTrackSegmentation( if (ret) { - LOG(ERROR) << "Failed to create extractor track segmentation thread !" << std::endl; + OMAF_LOG(LOG_ERROR, "Failed to create extractor track segmentation thread !\n"); return OMAF_ERROR_CREATE_THREAD; } @@ -702,7 +995,7 @@ int32_t DefaultSegmentation::StartLastExtractorTrackSegmentation( if (ret) { - LOG(ERROR) << "Failed to create extractor track segmentation thread !" << std::endl; + OMAF_LOG(LOG_ERROR, "Failed to create extractor track segmentation thread !\n"); return OMAF_ERROR_CREATE_THREAD; } @@ -730,16 +1023,37 @@ void *DefaultSegmentation::LastExtractorTrackSegThread(void *pThis) int32_t DefaultSegmentation::ExtractorTrackSegmentation() { + bool isFrameReady = false; + ExtractorTrack *extractorTrack = NULL; + pthread_t threadId = pthread_self(); + if (threadId == 0) + { + OMAF_LOG(LOG_ERROR, "NULL thread id for extractor track segmentation !\n"); + return OMAF_ERROR_INVALID_THREAD; + } + + while(1) + { + std::map::iterator itETThread; + itETThread = m_extractorThreadIds.find(threadId); + if (itETThread != m_extractorThreadIds.end()) + { + extractorTrack = itETThread->second; + break; + } + usleep(50); + } + while(1) { - std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); - std::map::iterator itExtractorTrack; + std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); + std::map::iterator itExtractorTrack; - pthread_t threadId = pthread_self(); - ExtractorTrack *extractorTrack = m_extractorThreadIds[threadId]; - if (!extractorTrack) - return OMAF_ERROR_NULL_PTR; + //pthread_t threadId = pthread_self(); + //ExtractorTrack *extractorTrack = m_extractorThreadIds[threadId]; + //if (!extractorTrack) + // return OMAF_ERROR_NULL_PTR; for (itExtractorTrack = extractorTracks->begin(); itExtractorTrack != extractorTracks->end(); itExtractorTrack++) @@ -749,12 +1063,14 @@ int32_t DefaultSegmentation::ExtractorTrackSegmentation() } if (itExtractorTrack == extractorTracks->end()) { - LOG(ERROR) << "Can't find specified Extractor Track! " << std::endl; + OMAF_LOG(LOG_ERROR, "Can't find specified Extractor Track!\n"); return OMAF_ERROR_INVALID_DATA; } - while (!(extractorTrack->GetFramesReadyStatus())) + isFrameReady = ((m_currSegedFrmNum == (m_prevSegedFrmNum + 1)) && (extractorTrack->GetProcessedFrmNum() == m_currProcessedFrmNum) && (m_currSegedFrmNum == (m_currProcessedFrmNum + 1))); + while (!isFrameReady) { usleep(50); + isFrameReady = ((m_currSegedFrmNum == (m_prevSegedFrmNum + 1)) && (extractorTrack->GetProcessedFrmNum() == m_currProcessedFrmNum) && (m_currSegedFrmNum == (m_currProcessedFrmNum + 1))); } uint8_t etId = 0; @@ -763,7 +1079,7 @@ int32_t DefaultSegmentation::ExtractorTrackSegmentation() if (itExtractorTrack == extractorTracks->end()) { - LOG(ERROR) << "Can't find specified Extractor Track! " << std::endl; + OMAF_LOG(LOG_ERROR, "Can't find specified Extractor Track!\n"); return OMAF_ERROR_INVALID_DATA; } @@ -776,7 +1092,7 @@ int32_t DefaultSegmentation::ExtractorTrackSegmentation() itET = m_extractorSegCtx.find(extractorTrack1); if (itET == m_extractorSegCtx.end()) { - LOG(ERROR) << "Can't find segmentation context for specified extractor track !" << std::endl; + OMAF_LOG(LOG_ERROR, "Can't find segmentation context for specified extractor track !\n"); return OMAF_ERROR_INVALID_DATA; } TrackSegmentCtx *trackSegCtx = itET->second; @@ -797,7 +1113,6 @@ int32_t DefaultSegmentation::ExtractorTrackSegmentation() itExtractorTrack++; } if (m_isEOS) - //return ERROR_NONE; break; } @@ -806,15 +1121,36 @@ int32_t DefaultSegmentation::ExtractorTrackSegmentation() int32_t DefaultSegmentation::LastExtractorTrackSegmentation() { + bool isFrameReady = false; + ExtractorTrack *extractorTrack = NULL; + pthread_t threadId = pthread_self(); + if (threadId == 0) + { + OMAF_LOG(LOG_ERROR, "NULL thread id for extractor track segmentation !\n"); + return OMAF_ERROR_INVALID_THREAD; + } + while(1) { - std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); - std::map::iterator itExtractorTrack; + std::map::iterator itETThread; + itETThread = m_extractorThreadIds.find(threadId); + if (itETThread != m_extractorThreadIds.end()) + { + extractorTrack = itETThread->second; + break; + } + usleep(50); + } - pthread_t threadId = pthread_self(); - ExtractorTrack *extractorTrack = m_extractorThreadIds[threadId]; - if (!extractorTrack) - return OMAF_ERROR_NULL_PTR; + while(1) + { + std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); + std::map::iterator itExtractorTrack; + + //pthread_t threadId = pthread_self(); + //ExtractorTrack *extractorTrack = m_extractorThreadIds[threadId]; + //if (!extractorTrack) + //return OMAF_ERROR_NULL_PTR; for (itExtractorTrack = extractorTracks->begin(); itExtractorTrack != extractorTracks->end(); itExtractorTrack++) @@ -824,13 +1160,15 @@ int32_t DefaultSegmentation::LastExtractorTrackSegmentation() } if (itExtractorTrack == extractorTracks->end()) { - LOG(ERROR) << "Can't find specified Extractor Track! " << std::endl; + OMAF_LOG(LOG_ERROR, "Can't find specified Extractor Track!\n"); return OMAF_ERROR_INVALID_DATA; } - while (!(extractorTrack->GetFramesReadyStatus())) + isFrameReady = ((m_currSegedFrmNum == (m_prevSegedFrmNum + 1)) && (extractorTrack->GetProcessedFrmNum() == m_currProcessedFrmNum) && (m_currSegedFrmNum == (m_currProcessedFrmNum + 1))); + while (!isFrameReady) { usleep(50); + isFrameReady = ((m_currSegedFrmNum == (m_prevSegedFrmNum + 1)) && (extractorTrack->GetProcessedFrmNum() == m_currProcessedFrmNum) && (m_currSegedFrmNum == (m_currProcessedFrmNum + 1))); } uint8_t etId = 0; @@ -839,12 +1177,11 @@ int32_t DefaultSegmentation::LastExtractorTrackSegmentation() if (itExtractorTrack == extractorTracks->end()) { - LOG(ERROR) << "Can't find specified Extractor Track! " << std::endl; + OMAF_LOG(LOG_ERROR, "Can't find specified Extractor Track!\n"); return OMAF_ERROR_INVALID_DATA; } ExtractorTrack *extractorTrack1 = itExtractorTrack->second; - extractorTrack1->ConstructExtractors(); WriteSegmentForEachExtractorTrack(extractorTrack1, m_nowKeyFrame, m_isEOS); @@ -852,7 +1189,7 @@ int32_t DefaultSegmentation::LastExtractorTrackSegmentation() itET = m_extractorSegCtx.find(extractorTrack1); if (itET == m_extractorSegCtx.end()) { - LOG(ERROR) << "Can't find segmentation context for specified extractor track !" << std::endl; + OMAF_LOG(LOG_ERROR, "Can't find segmentation context for specified extractor track !\n"); return OMAF_ERROR_INVALID_DATA; } TrackSegmentCtx *trackSegCtx = itET->second; @@ -873,13 +1210,42 @@ int32_t DefaultSegmentation::LastExtractorTrackSegmentation() itExtractorTrack++; } if (m_isEOS) - //return ERROR_NONE; break; } return ERROR_NONE; } +bool DefaultSegmentation::HasAudio() +{ + std::map::iterator it; + for (it = m_streamMap->begin(); it != m_streamMap->end(); it++) + { + MediaStream *stream = it->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + return true; + } + } + + return false; +} + +bool DefaultSegmentation::OnlyAudio() +{ + std::map::iterator it; + for (it = m_streamMap->begin(); it != m_streamMap->end(); it++) + { + MediaStream *stream = it->second; + if (stream && (stream->GetMediaType() == VIDEOTYPE)) + { + return false; + } + } + + return true; +} + int32_t DefaultSegmentation::VideoSegmentation() { uint64_t currentT = 0; @@ -891,20 +1257,68 @@ int32_t DefaultSegmentation::VideoSegmentation() if (ret) return ret; - m_mpdGen = new MpdGenerator( - &m_streamSegCtx, - &m_extractorSegCtx, - m_segInfo, - m_projType, - m_frameRate); - if (!m_mpdGen) - return OMAF_ERROR_NULL_PTR; + bool hasAudio = HasAudio(); + if (hasAudio) + { + uint32_t waitTimes = 10000; + uint32_t currWaitTime = 0; + while (currWaitTime < waitTimes) + { + { + std::lock_guard lock(m_audioMutex); + if (m_audioSegCtxsConsted) + { + break; + } + } + usleep(50); + currWaitTime++; + } + if (currWaitTime >= waitTimes) + { + OMAF_LOG(LOG_ERROR, "Constructing segmentation context for audio stream takes too long time !\n"); + return OMAF_ERROR_TIMED_OUT; + } - ret = m_mpdGen->Initialize(); + { + std::lock_guard lock(m_audioMutex); + m_mpdGen = new MpdGenerator( + &m_streamSegCtx, + &m_extractorSegCtx, + m_segInfo, + m_projType, + m_frameRate, + m_videosNum); + if (!m_mpdGen) + return OMAF_ERROR_NULL_PTR; - if (ret) - return ret; + ret = m_mpdGen->Initialize(); + if (ret) + return ret; + + m_isMpdGenInit = true; + } + } + else + { + m_mpdGen = new MpdGenerator( + &m_streamSegCtx, + &m_extractorSegCtx, + m_segInfo, + m_projType, + m_frameRate, + m_videosNum); + if (!m_mpdGen) + return OMAF_ERROR_NULL_PTR; + + ret = m_mpdGen->Initialize(); + + if (ret) + return ret; + + m_isMpdGenInit = true; + } std::map::iterator itStreamTrack; for (itStreamTrack = m_streamSegCtx.begin(); itStreamTrack != m_streamSegCtx.end(); itStreamTrack++) @@ -926,50 +1340,105 @@ int32_t DefaultSegmentation::VideoSegmentation() if (ret) return ret; +#ifdef _USE_TRACE_ + //trace + uint64_t initSegSize = initSegmenter->GetInitSegmentSize(); + uint32_t trackIndex = trackSegCtxs[tileIdx].trackIdx.GetIndex(); + const char *trackType = "init_track"; + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%s", "none"); + + tracepoint(bandwidth_tp_provider, packed_segment_size, + trackIndex, trackType, tileRes, 0, initSegSize); +#endif } } } - std::map::iterator itExtractorTrack; - for (itExtractorTrack = m_extractorSegCtx.begin(); - itExtractorTrack != m_extractorSegCtx.end(); - itExtractorTrack++) + if (m_extractorSegCtx.size()) { - TrackSegmentCtx *trackSegCtx = itExtractorTrack->second; + std::map::iterator itExtractorTrack; + for (itExtractorTrack = m_extractorSegCtx.begin(); + itExtractorTrack != m_extractorSegCtx.end(); + itExtractorTrack++) + { + TrackSegmentCtx *trackSegCtx = itExtractorTrack->second; - DashInitSegmenter *initSegmenter = trackSegCtx->initSegmenter; - if (!initSegmenter) - return OMAF_ERROR_NULL_PTR; + DashInitSegmenter *initSegmenter = trackSegCtx->initSegmenter; + if (!initSegmenter) + return OMAF_ERROR_NULL_PTR; - ret = initSegmenter->GenerateInitSegment(trackSegCtx, m_trackSegCtx); - if (ret) - return ret; - } + ret = initSegmenter->GenerateInitSegment(trackSegCtx, m_trackSegCtx); + if (ret) + return ret; +#ifdef _USE_TRACE_ + //trace + uint64_t initSegSize = initSegmenter->GetInitSegmentSize(); + uint32_t trackIndex = trackSegCtx->trackIdx.GetIndex(); + const char *trackType = "init_track"; + char tileRes[128] = { 0 }; + snprintf(tileRes, 128, "%s", "none"); + + tracepoint(bandwidth_tp_provider, packed_segment_size, + trackIndex, trackType, tileRes, 0, initSegSize); +#endif + } + } m_prevSegNum = m_segNum; uint16_t extractorTrackNum = m_extractorSegCtx.size(); - if (extractorTrackNum % m_segInfo->extractorTracksPerSegThread == 0) + if (extractorTrackNum) { - m_aveETPerSegThread = m_segInfo->extractorTracksPerSegThread; - m_lastETPerSegThread = m_segInfo->extractorTracksPerSegThread; - m_threadNumForET = extractorTrackNum / m_segInfo->extractorTracksPerSegThread; - } - else - { - m_aveETPerSegThread = m_segInfo->extractorTracksPerSegThread; - m_lastETPerSegThread = extractorTrackNum % m_segInfo->extractorTracksPerSegThread; - m_threadNumForET = extractorTrackNum / m_segInfo->extractorTracksPerSegThread + 1; + if (extractorTrackNum % m_segInfo->extractorTracksPerSegThread == 0) + { + m_aveETPerSegThread = m_segInfo->extractorTracksPerSegThread; + m_lastETPerSegThread = m_segInfo->extractorTracksPerSegThread; + m_threadNumForET = extractorTrackNum / m_segInfo->extractorTracksPerSegThread; + } + else + { + m_aveETPerSegThread = m_segInfo->extractorTracksPerSegThread; + m_lastETPerSegThread = extractorTrackNum % m_segInfo->extractorTracksPerSegThread; + m_threadNumForET = extractorTrackNum / m_segInfo->extractorTracksPerSegThread + 1; + } + + OMAF_LOG(LOG_INFO, "Lanuch %d threads for Extractor Track segmentation!\n", m_threadNumForET); + OMAF_LOG(LOG_INFO, "Average Extractor Track number per thread is %d\n", m_aveETPerSegThread); + OMAF_LOG(LOG_INFO, "The last thread involves %d Extractor Tracks !\n", m_lastETPerSegThread); } - LOG(INFO) << "Lanuch " << m_threadNumForET << " threads for Extractor Track segmentation!" << std::endl; - LOG(INFO) << "Average Extractor Track number per thread is " << m_aveETPerSegThread << std::endl; - LOG(INFO) << "The last thread involves " << m_lastETPerSegThread << " Extractor Tracks !" << std::endl; +#ifdef _USE_TRACE_ + int64_t trackIdxTag = 0; +#endif while (1) { if (m_segNum == 1) { + if (hasAudio) + { + uint32_t waitTimes = 50000; + uint32_t currWaitTime = 0; + while (currWaitTime < waitTimes) + { + { + std::lock_guard lock(m_audioMutex); + if (m_audioSegNum >= 1) + { + break; + } + } + usleep(50); + currWaitTime++; + } + if (currWaitTime >= waitTimes) + { + OMAF_LOG(LOG_ERROR, "It takes too much time to generate the first audio segment !\n"); + return OMAF_ERROR_TIMED_OUT; + } + } + if (m_segInfo->isLive) { m_mpdGen->UpdateMpd(m_segNum, m_framesNum); @@ -1000,6 +1469,16 @@ int32_t DefaultSegmentation::VideoSegmentation() m_framesIsKey[vs] = currFrame->isKeyFrame; m_streamsIsEOS[vs] = false; +#ifdef _USE_TRACE_ + //trace + char resolution[1024] = { 0 }; + snprintf(resolution, 1024, "%d x %d", vs->GetSrcWidth(), vs->GetSrcHeight()); + char tileSplit[1024] = { 0 }; + snprintf(tileSplit, 1024, "%d x %d", vs->GetTileInCol(), vs->GetTileInRow()); + tracepoint(bandwidth_tp_provider, encoded_frame_size, + &resolution[0], &tileSplit[0], m_framesNum, currFrame->dataSize); +#endif + vs->UpdateTilesNalu(); WriteSegmentForEachVideo(vs, currFrame->isKeyFrame, false); } @@ -1023,7 +1502,6 @@ int32_t DefaultSegmentation::VideoSegmentation() bool keyFrame = itKeyFrame->second; if (frameIsKey != keyFrame) return OMAF_ERROR_INVALID_DATA; - } m_nowKeyFrame = frameIsKey; @@ -1040,28 +1518,18 @@ int32_t DefaultSegmentation::VideoSegmentation() } m_isEOS = nowEOS; - std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); - std::map::iterator itExtractorTrack = extractorTracks->begin(); - for ( ; itExtractorTrack != extractorTracks->end(); /*itExtractorTrack++*/) + m_currSegedFrmNum++; + + std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); + if (extractorTracks->size()) { - ExtractorTrack *extractorTrack = itExtractorTrack->second; - extractorTrack->SetFramesReady(true); - if (m_extractorThreadIds.size() < m_threadNumForET) + std::map::iterator itExtractorTrack = extractorTracks->begin(); + for ( ; itExtractorTrack != extractorTracks->end(); /*itExtractorTrack++*/) { - if (m_aveETPerSegThread == m_lastETPerSegThread) + ExtractorTrack *extractorTrack = itExtractorTrack->second; + if (m_extractorThreadIds.size() < m_threadNumForET) { - int32_t retET = StartExtractorTrackSegmentation(extractorTrack); - if (retET) - return retET; - - for (uint16_t num = 0; num < m_aveETPerSegThread; num++) - { - itExtractorTrack++; - } - } - else - { - if ((uint16_t)(m_extractorThreadIds.size()) < (m_threadNumForET - 1)) + if (m_aveETPerSegThread == m_lastETPerSegThread) { int32_t retET = StartExtractorTrackSegmentation(extractorTrack); if (retET) @@ -1069,48 +1537,73 @@ int32_t DefaultSegmentation::VideoSegmentation() for (uint16_t num = 0; num < m_aveETPerSegThread; num++) { - itExtractorTrack++; + if (itExtractorTrack != extractorTracks->end()) + { + itExtractorTrack++; + } } } else { - int32_t retET = StartLastExtractorTrackSegmentation(extractorTrack); - if (retET) - return retET; - - for ( ; itExtractorTrack != extractorTracks->end(); ) + if ((uint16_t)(m_extractorThreadIds.size()) < (m_threadNumForET - 1)) + { + int32_t retET = StartExtractorTrackSegmentation(extractorTrack); + if (retET) + return retET; + + for (uint16_t num = 0; num < m_aveETPerSegThread; num++) + { + if (itExtractorTrack != extractorTracks->end()) + { + itExtractorTrack++; + } + } + } + else { - itExtractorTrack++; + int32_t retET = StartLastExtractorTrackSegmentation(extractorTrack); + if (retET) + return retET; + + for ( ; itExtractorTrack != extractorTracks->end(); ) + { + itExtractorTrack++; + } } } } + else + { + break; + } } - else + if (m_extractorThreadIds.size() != m_threadNumForET) { - itExtractorTrack++; + OMAF_LOG(LOG_ERROR, "Launched threads number %ld doesn't match calculated threads number %d\n", (m_extractorThreadIds.size()), m_threadNumForET); } - } - if (m_extractorThreadIds.size() != m_threadNumForET) - { - LOG(ERROR) << "Launched threads number " << (m_extractorThreadIds.size()) << " doesn't match calculated threads number " << m_threadNumForET << std::endl; - } - usleep(2000); + usleep(2000); - for (itExtractorTrack = extractorTracks->begin(); - itExtractorTrack != extractorTracks->end(); - itExtractorTrack++) - { - ExtractorTrack *extractorTrack = itExtractorTrack->second; - while (extractorTrack->GetProcessedFrmNum() == m_framesNum) + for (itExtractorTrack = extractorTracks->begin(); + itExtractorTrack != extractorTracks->end(); + itExtractorTrack++) { - usleep(1); + ExtractorTrack *extractorTrack = itExtractorTrack->second; + while (extractorTrack->GetProcessedFrmNum() == m_framesNum) + { + usleep(1); - if (extractorTrack->GetProcessedFrmNum() == (m_framesNum + 1)) - break; + if (extractorTrack->GetProcessedFrmNum() == (m_framesNum + 1)) + { + break; + } + } } } + m_prevSegedFrmNum++; + m_currProcessedFrmNum++; + for (itStream = m_streamMap->begin(); itStream != m_streamMap->end(); itStream++) { MediaStream *stream = itStream->second; @@ -1126,7 +1619,6 @@ int32_t DefaultSegmentation::VideoSegmentation() vs->AddFrameToSegment(); } } - //m_framesNum++; if (m_segNum == (m_prevSegNum + 1)) { @@ -1134,7 +1626,7 @@ int32_t DefaultSegmentation::VideoSegmentation() std::chrono::high_resolution_clock clock; uint64_t before = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); - LOG(INFO) << "Complete one seg on " << (before - currentT) << " ms" << std::endl; + OMAF_LOG(LOG_INFO, "Complete one seg for video in %lld ms\n", (before - currentT)); currentT = before; } @@ -1145,26 +1637,34 @@ int32_t DefaultSegmentation::VideoSegmentation() int32_t removeCnt = m_segNum - m_segInfo->windowSize - m_segInfo->extraWindowSize; if (removeCnt > 0) { - std::map::iterator itOneTrack; + std::map::iterator itOneTrack; +#ifdef _USE_TRACE_ + auto itOneTrackTag = m_allTileTracks.begin(); + trackIdxTag = itOneTrackTag->first.GetIndex(); +#endif + for (itOneTrack = m_allTileTracks.begin(); itOneTrack != m_allTileTracks.end(); itOneTrack++) { - TrackId trackIndex = itOneTrack->first; + VCD::MP4::TrackId trackIndex = itOneTrack->first; char rmFile[1024]; - snprintf(rmFile, 1024, "%s%s_track%d.%d.mp4", m_segInfo->dirName, m_segInfo->outName, trackIndex.get(), removeCnt); + snprintf(rmFile, 1024, "%s%s_track%d.%d.mp4", m_segInfo->dirName, m_segInfo->outName, trackIndex.GetIndex(), removeCnt); remove(rmFile); } - std::map::iterator itOneExtractorTrack; - for (itOneExtractorTrack = m_extractorSegCtx.begin(); - itOneExtractorTrack != m_extractorSegCtx.end(); - itOneExtractorTrack++) + if (m_extractorSegCtx.size()) { - TrackSegmentCtx *trackSegCtx = itOneExtractorTrack->second; - TrackId trackIndex = trackSegCtx->trackIdx; - char rmFile[1024]; - snprintf(rmFile, 1024, "%s%s_track%d.%d.mp4", m_segInfo->dirName, m_segInfo->outName, trackIndex.get(), removeCnt); - remove(rmFile); + std::map::iterator itOneExtractorTrack; + for (itOneExtractorTrack = m_extractorSegCtx.begin(); + itOneExtractorTrack != m_extractorSegCtx.end(); + itOneExtractorTrack++) + { + TrackSegmentCtx *trackSegCtx = itOneExtractorTrack->second; + VCD::MP4::TrackId trackIndex = trackSegCtx->trackIdx; + char rmFile[1024]; + snprintf(rmFile, 1024, "%s%s_track%d.%d.mp4", m_segInfo->dirName, m_segInfo->outName, trackIndex.GetIndex(), removeCnt); + remove(rmFile); + } } } } @@ -1174,24 +1674,297 @@ int32_t DefaultSegmentation::VideoSegmentation() { if (m_segInfo->isLive) { + if (hasAudio) + { + uint32_t waitTimes = 10000; + uint32_t currWaitTime = 0; + while (currWaitTime < waitTimes) + { + { + std::lock_guard lock(m_audioMutex); + if (m_audioSegNum >= m_segNum) + { + break; + } + } + usleep(50); + currWaitTime++; + } + if (currWaitTime >= waitTimes) + { + OMAF_LOG(LOG_ERROR, "Audio still hasn't generated all segments !\n"); + OMAF_LOG(LOG_ERROR, "Video segments num %ld and audio segments num %ld\n", m_segNum, m_audioSegNum); + return OMAF_ERROR_TIMED_OUT; + } + } int32_t ret = m_mpdGen->UpdateMpd(m_segNum, m_framesNum); if (ret) return ret; } else { +#ifdef _USE_TRACE_ + //trace + const char *dashMode = "static"; + float currFrameRate = (float)(m_frameRate.num) / (float)(m_frameRate.den); + tracepoint(bandwidth_tp_provider, segmentation_info, + dashMode, m_segInfo->segDuration, currFrameRate, + m_videosNum, m_videosBitrate, + m_framesNum, m_segNum); +#endif + + if (hasAudio) + { + uint32_t waitTimes = 10000; + uint32_t currWaitTime = 0; + while (currWaitTime < waitTimes) + { + { + std::lock_guard lock(m_audioMutex); + if (m_audioSegNum >= m_segNum) + { + break; + } + } + usleep(50); + currWaitTime++; + } + if (currWaitTime >= waitTimes) + { + OMAF_LOG(LOG_ERROR, "Audio still hasn't generated all segments !\n"); + OMAF_LOG(LOG_ERROR, "Video segments num %ld and audio segments num %ld\n", m_segNum, m_audioSegNum); + return OMAF_ERROR_TIMED_OUT; + } + } + int32_t ret = m_mpdGen->WriteMpd(m_framesNum); if (ret) return ret; } - LOG(INFO) << "Total " << m_framesNum << " frames written into segments!" << std::endl; - //return ERROR_NONE; + OMAF_LOG(LOG_INFO, "Totally write %ld frames into video tracks!\n", m_framesNum); break; } +#ifdef _USE_TRACE_ + string tag = "trackIdx:" + to_string(trackIdxTag); + tracepoint(E2E_latency_tp_provider, + post_op_info, + m_framesNum, + tag.c_str()); +#endif m_framesNum++; } return ERROR_NONE; } +int32_t DefaultSegmentation::AudioSegmentation() +{ + OMAF_LOG(LOG_INFO, "Launch audio segmentation thread !\n"); + uint64_t currentT = 0; + int32_t ret = ConstructAudioTrackSegCtx(); + if (ret) + return ret; + OMAF_LOG(LOG_INFO, "Construction for audio track segmentation context DONE !\n"); + bool onlyAudio = OnlyAudio(); + if (onlyAudio) + { + m_mpdGen = new MpdGenerator( + &m_streamSegCtx, + &m_extractorSegCtx, + m_segInfo, + m_projType, + m_frameRate, + 0); + if (!m_mpdGen) + return OMAF_ERROR_NULL_PTR; + + ret = m_mpdGen->Initialize(); + + if (ret) + return ret; + + m_isMpdGenInit = true; + } + else + { + bool mpdGenInitialized = false; + while(!mpdGenInitialized) + { + { + std::lock_guard lock(m_audioMutex); + if (m_isMpdGenInit) + { + mpdGenInitialized = true; + break; + } + } + + usleep(50); + } + } + + std::map::iterator itStreamTrack; + for (itStreamTrack = m_streamSegCtx.begin(); itStreamTrack != m_streamSegCtx.end(); itStreamTrack++) + { + MediaStream *stream = itStreamTrack->first; + TrackSegmentCtx* trackSegCtx = itStreamTrack->second; + + if (stream->GetMediaType() == AUDIOTYPE) + { + DashInitSegmenter *initSegmenter = trackSegCtx->initSegmenter; + if (!initSegmenter) + return OMAF_ERROR_NULL_PTR; + + ret = initSegmenter->GenerateInitSegment(trackSegCtx, m_trackSegCtx); + if (ret) + return ret; + + } + } + + OMAF_LOG(LOG_INFO, "Done audio initial segment !\n"); + m_audioPrevSegNum = m_audioSegNum; + OMAF_LOG(LOG_INFO, "Initial audio segment num %ld\n", m_audioSegNum); + + bool nowEOS = false; + bool eosWritten = false; + uint64_t framesWritten = 0; + while(1) + { + if (onlyAudio) + { + if (m_audioSegNum == 1) + { + if (m_segInfo->isLive) + { + m_mpdGen->UpdateMpd(m_audioSegNum, m_framesNum); + } + } + } + + std::map::iterator itStream = m_streamMap->begin(); + for ( ; itStream != m_streamMap->end(); itStream++) + { + MediaStream *stream = itStream->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + AudioStream *as = (AudioStream*)stream; + as->SetCurrFrameInfo(); + FrameBSInfo *currFrame = as->GetCurrFrameInfo(); + + while (!currFrame) + { + usleep(50); + as->SetCurrFrameInfo(); + currFrame = as->GetCurrFrameInfo(); + if (!currFrame && (as->GetEOS())) + break; + } + nowEOS = as->GetEOS(); + if (currFrame) + { + WriteSegmentForEachAudio(as, currFrame, true, false); + framesWritten++; + } + else + { + eosWritten = true; + } + } + } + + for (itStream = m_streamMap->begin(); itStream != m_streamMap->end(); itStream++) + { + MediaStream *stream = itStream->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + AudioStream *as = (AudioStream*)stream; + + if (m_audioSegNum == (m_audioPrevSegNum + 1)) + { + as->DestroyCurrSegmentFrames(); + } + + as->AddFrameToSegment(); + } + } + + if (m_audioSegNum == (m_audioPrevSegNum + 1)) + { + m_audioPrevSegNum++; + + std::chrono::high_resolution_clock clock; + uint64_t before = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); + OMAF_LOG(LOG_INFO, "Complete one seg for audio in %lld ms\n", (before - currentT)); + currentT = before; + } + + if (m_segInfo->isLive) + { + if (m_segInfo->windowSize && m_segInfo->extraWindowSize) + { + int32_t removeCnt = m_audioSegNum - m_segInfo->windowSize - m_segInfo->extraWindowSize; + if (removeCnt > 0) + { + for (itStream = m_streamMap->begin(); itStream != m_streamMap->end(); itStream++) + { + MediaStream *stream = itStream->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + TrackSegmentCtx* oneSegCtx = m_streamSegCtx[stream]; + if (!oneSegCtx) + return OMAF_ERROR_NULL_PTR; + + VCD::MP4::TrackId trackIndex = oneSegCtx->trackIdx; + char rmFile[1024]; + snprintf(rmFile, 1024, "%s%s_track%d.%d.mp4", m_segInfo->dirName, m_segInfo->outName, trackIndex.GetIndex(), removeCnt); + remove(rmFile); + } + } + } + } + } + + if (onlyAudio) + { + if (nowEOS && eosWritten) + { + if (m_segInfo->isLive) + { + int32_t ret = m_mpdGen->UpdateMpd(m_audioSegNum, m_framesNum); + if (ret) + return ret; + } else { + int32_t ret = m_mpdGen->WriteMpd(m_framesNum); + if (ret) + return ret; + } + OMAF_LOG(LOG_INFO, "Total %ld frames written into segments!\n", m_framesNum); + break; + } + m_framesNum++; + } + + //OMAF_LOG(LOG_INFO, "NOW eos %d \n", nowEOS); + if (nowEOS && eosWritten) + { + std::map::iterator itStr = m_streamMap->begin(); + for ( ; itStr != m_streamMap->end(); itStr++) + { + MediaStream *stream = itStr->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + AudioStream *as = (AudioStream*)stream; + WriteSegmentForEachAudio(as, NULL, false, true); + } + } + + break; + } + } + + OMAF_LOG(LOG_INFO, "Totally write %ld frames into audio track!\n", framesWritten); + return ERROR_NONE; +} + int32_t DefaultSegmentation::EndEachVideo(MediaStream *stream) { if (!stream) @@ -1203,4 +1976,15 @@ int32_t DefaultSegmentation::EndEachVideo(MediaStream *stream) return ERROR_NONE; } +int32_t DefaultSegmentation::EndEachAudio(MediaStream *stream) +{ + if (!stream) + return OMAF_ERROR_NULL_PTR; + + AudioStream *as = (AudioStream*)stream; + as->SetEOS(true); + + return ERROR_NONE; +} + VCD_NS_END diff --git a/src/VROmafPacking/DefaultSegmentation.h b/src/VROmafPacking/DefaultSegmentation.h index 12b312d7..6296aeb1 100644 --- a/src/VROmafPacking/DefaultSegmentation.h +++ b/src/VROmafPacking/DefaultSegmentation.h @@ -35,6 +35,7 @@ #ifndef _DEFAULTSEGMENTATION_H_ #define _DEFAULTSEGMENTATION_H_ +#include #include "Segmentation.h" #include "DashSegmenter.h" @@ -54,17 +55,25 @@ class DefaultSegmentation : public Segmentation DefaultSegmentation() { m_segNum = 0; + m_audioSegNum = 0; + m_audioPrevSegNum = 0; + m_audioSegCtxsConsted = false; m_framesNum = 0; m_videoSegInfo = NULL; m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; m_isEOS = false; m_nowKeyFrame = false; m_prevSegNum = 0; - pthread_mutex_init(&m_mutex, NULL); m_isFramesReady = false; m_aveETPerSegThread = 0; m_lastETPerSegThread = 0; m_threadNumForET = 0; + m_videosNum = 0; + m_videosBitrate = NULL; + m_prevSegedFrmNum = 0; + m_currSegedFrmNum = 0; + m_currProcessedFrmNum = 0; + m_isMpdGenInit = false; }; //! @@ -82,17 +91,74 @@ class DefaultSegmentation : public Segmentation DefaultSegmentation(std::map *streams, ExtractorTrackManager *extractorTrackMan, InitialInfo *initInfo) : Segmentation(streams, extractorTrackMan, initInfo) { m_segNum = 0; + m_audioSegNum = 0; + m_audioPrevSegNum = 0; + m_audioSegCtxsConsted = false; m_framesNum = 0; m_videoSegInfo = NULL; m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; m_isEOS = false; m_nowKeyFrame = false; m_prevSegNum = 0; - pthread_mutex_init(&m_mutex, NULL); m_isFramesReady = false; m_aveETPerSegThread = 0; m_lastETPerSegThread = 0; m_threadNumForET = 0; + m_videosNum = 0; + m_videosBitrate = NULL; + m_prevSegedFrmNum = 0; + m_currSegedFrmNum = 0; + m_currProcessedFrmNum = 0; + m_isMpdGenInit = false; + }; + + DefaultSegmentation(const DefaultSegmentation& src) + { + m_segNum = src.m_segNum; + m_audioSegNum = src.m_audioSegNum; + m_audioPrevSegNum = src.m_audioPrevSegNum; + m_audioSegCtxsConsted = src.m_audioSegCtxsConsted; + m_framesNum = src.m_framesNum; + m_videoSegInfo = std::move(src.m_videoSegInfo); + m_projType = src.m_projType; + m_isEOS = src.m_isEOS; + m_nowKeyFrame = src.m_nowKeyFrame; + m_prevSegNum = src.m_prevSegNum; + m_isFramesReady = src.m_isFramesReady; + m_aveETPerSegThread = src.m_aveETPerSegThread; + m_lastETPerSegThread = src.m_lastETPerSegThread; + m_threadNumForET = src.m_threadNumForET; + m_videosNum = src.m_videosNum; + m_videosBitrate = std::move(src.m_videosBitrate); + m_prevSegedFrmNum = src.m_prevSegedFrmNum; + m_currSegedFrmNum = src.m_currSegedFrmNum; + m_currProcessedFrmNum = src.m_currProcessedFrmNum; + m_isMpdGenInit = src.m_isMpdGenInit; + }; + + DefaultSegmentation& operator=(DefaultSegmentation&& other) + { + m_segNum = other.m_segNum; + m_audioSegNum = other.m_audioSegNum; + m_audioPrevSegNum = other.m_audioPrevSegNum; + m_audioSegCtxsConsted = other.m_audioSegCtxsConsted; + m_framesNum = other.m_framesNum; + m_videoSegInfo = NULL; + m_projType = other.m_projType; + m_isEOS = other.m_isEOS; + m_nowKeyFrame = other.m_nowKeyFrame; + m_prevSegNum = other.m_prevSegNum; + m_isFramesReady = other.m_isFramesReady; + m_aveETPerSegThread = other.m_aveETPerSegThread; + m_lastETPerSegThread = other.m_lastETPerSegThread; + m_threadNumForET = other.m_threadNumForET; + m_videosNum = other.m_videosNum; + m_videosBitrate = NULL; + m_prevSegedFrmNum = other.m_prevSegedFrmNum; + m_currSegedFrmNum = other.m_currSegedFrmNum; + m_currProcessedFrmNum = other.m_currProcessedFrmNum; + m_isMpdGenInit = other.m_isMpdGenInit; + return *this; }; //! @@ -111,13 +177,31 @@ class DefaultSegmentation : public Segmentation //! //! \brief End the segmentation process for - //! all media streams + //! all video streams //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! virtual int32_t VideoEndSegmentation(); + //! + //! \brief Execute the segmentation process for + //! all audio streams + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + virtual int32_t AudioSegmentation(); + + //! + //! \brief End the segmentation process for + //! all audio streams + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + virtual int32_t AudioEndSegmentation(); + private: //! //! \brief Write povd box for segments, @@ -132,7 +216,7 @@ class DefaultSegmentation : public Segmentation //! //! \brief Construct track segmentation context - //! for all tracks and all media streams + //! for all video tile tracks //! //! \return int32_t //! ERROR_NONE if success, else failed reason @@ -148,6 +232,15 @@ class DefaultSegmentation : public Segmentation //! int32_t ConstructExtractorTrackSegCtx(); + //! + //! \brief Construct track segmentation context + //! for all audio tracks + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t ConstructAudioTrackSegCtx(); + //! //! \brief Write segments for specified video stream //! @@ -173,6 +266,17 @@ class DefaultSegmentation : public Segmentation bool isKeyFrame, bool isEOS); + //! + //! \brief Write segments for specified audio stream + //! + //! \param [in] stream + //! pointer to specified audio stream + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t WriteSegmentForEachAudio(MediaStream *stream, FrameBSInfo *frameData, bool isKeyFrame, bool isEOS); + //! //! \brief End segmentation process for specified video stream //! @@ -184,6 +288,17 @@ class DefaultSegmentation : public Segmentation //! int32_t EndEachVideo(MediaStream *stream); + //! + //! \brief End segmentation process for specified audio stream + //! + //! \param [in] stream + //! pointer to specified audio stream + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t EndEachAudio(MediaStream *stream); + //! //! \brief Start segmentation thread for specified extractor track //! @@ -257,32 +372,59 @@ class DefaultSegmentation : public Segmentation //! void SetFramesReadyStatus(bool isFramesReady) { - pthread_mutex_lock(&m_mutex); + std::lock_guard lock(m_mutex); m_isFramesReady = isFramesReady; - pthread_mutex_unlock(&m_mutex); }; + //! + //! \brief Check whether there are only audio streams + //! in input streams + //! + //! \return bool + //! true if there are only audio streams, else false + //! + bool OnlyAudio(); + + //! + //! \brief Check whether there is audio stream in input + //! streams + //! + //! \return bool + //! true if there is audio stream, else false + //! + bool HasAudio(); + private: std::map m_streamSegCtx; //!< map of media stream and its track segmentation context std::map m_extractorSegCtx; //!< map of extractor track and its track segmentation context - std::map m_allTileTracks; //!< map of track and its track configuration + std::map m_allTileTracks; //!< map of track and its track configuration std::map m_framesIsKey; //!< map of media stream and its current frame status (IDR or not) std::map m_streamsIsEOS; //!< map of media stream and its current EOS status VCD::OMAF::ProjectionFormat m_projType; //!< picture projection type VideoSegmentInfo *m_videoSegInfo; //!< pointer to the video segment information - std::map> m_tilesTrackIdxs; //!< map of tile and its track index - std::map m_trackSegCtx; //!< map of tile track and its track segmentation context + std::map> m_tilesTrackIdxs; //!< map of tile and its track index + std::map m_trackSegCtx; //!< map of tile track and its track segmentation context uint64_t m_segNum; //!< current written segments number + std::mutex m_audioMutex; + uint64_t m_audioSegNum; + uint64_t m_audioPrevSegNum; + bool m_audioSegCtxsConsted; uint64_t m_framesNum; //!< current written frames number std::map m_extractorThreadIds; //!< map of thread ID for extractor track segmentation and corresponding extractor track bool m_isEOS; //!< whether EOS has been gotten for all media streams bool m_nowKeyFrame; //!< whether current frames are key frames for each corresponding media stream uint64_t m_prevSegNum; //!< previously written segments number - pthread_mutex_t m_mutex; //!< thread mutex for main segmentation thread + std::mutex m_mutex; //!< thread mutex for main segmentation thread bool m_isFramesReady; //!< whether frames are ready for extractor track uint16_t m_aveETPerSegThread; //!< average extractor tracks number in segmentation thread uint16_t m_lastETPerSegThread; //!< extractor tracks number in last segmentation thread uint16_t m_threadNumForET; //!< threads number for extractor track segmentation + uint32_t m_videosNum; //!< video streams number + uint64_t *m_videosBitrate; //!< video stream bitrate array + uint64_t m_prevSegedFrmNum; //!< previous number of frames which have been segmented for their tile tracks + uint64_t m_currSegedFrmNum; //!< newest number of frames which have been segmented for their tile tracks + uint64_t m_currProcessedFrmNum;//!< newest number of frames which have been segmented for both tiles tracks and extractor tracks + bool m_isMpdGenInit; //!< flag for whether MPD generator has been initialized }; VCD_NS_END; diff --git a/src/VROmafPacking/ExtractorTrack.cpp b/src/VROmafPacking/ExtractorTrack.cpp index dbefded6..cb66206b 100644 --- a/src/VROmafPacking/ExtractorTrack.cpp +++ b/src/VROmafPacking/ExtractorTrack.cpp @@ -33,8 +33,7 @@ #include "360SCVPAPI.h" #include "ExtractorTrack.h" -#include "VideoStream.h" -#include "../utils/OmafStructure.h" +#include "VideoStreamPluginAPI.h" VCD_NS_BEGIN @@ -42,7 +41,7 @@ ExtractorTrack::ExtractorTrack() { m_streams = NULL; m_viewportIdx = 0; - m_projType = 0; + m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; m_dstRwpk = NULL; m_dstCovi = NULL; @@ -54,8 +53,6 @@ ExtractorTrack::ExtractorTrack() m_rwpkSEI = NULL; m_processedFrmNum = 0; - m_isFramesReady = false; - //pthread_mutex_init(&m_mutex, NULL); m_360scvpParam = NULL; m_dstWidth = 0; m_dstHeight = 0; @@ -66,10 +63,12 @@ int32_t ExtractorTrack::Initialize() m_dstRwpk = new RegionWisePacking; if (!m_dstRwpk) return OMAF_ERROR_NULL_PTR; + memset_s(m_dstRwpk, sizeof(RegionWisePacking), 0); m_dstCovi = new ContentCoverage; if (!m_dstCovi) return OMAF_ERROR_NULL_PTR; + memset_s(m_dstCovi, sizeof(ContentCoverage), 0); m_tilesMergeDir = new TilesMergeDirectionInCol; if (!m_tilesMergeDir) @@ -78,39 +77,32 @@ int32_t ExtractorTrack::Initialize() m_vps = new Nalu; if (!m_vps) return OMAF_ERROR_NULL_PTR; - memset(m_vps, 0, sizeof(Nalu)); + memset_s(m_vps, sizeof(Nalu), 0); m_sps = new Nalu; if (!m_sps) return OMAF_ERROR_NULL_PTR; - memset(m_sps, 0, sizeof(Nalu)); + memset_s(m_sps, sizeof(Nalu), 0); m_pps = new Nalu; if (!m_pps) return OMAF_ERROR_NULL_PTR; - memset(m_pps, 0, sizeof(Nalu)); + memset_s(m_pps, sizeof(Nalu), 0); m_projSEI = new Nalu; if (!m_projSEI) return OMAF_ERROR_NULL_PTR; - memset(m_projSEI, 0, sizeof(Nalu)); + memset_s(m_projSEI, sizeof(Nalu), 0); m_rwpkSEI = new Nalu; if (!m_rwpkSEI) return OMAF_ERROR_NULL_PTR; - memset(m_rwpkSEI, 0, sizeof(Nalu)); + memset_s(m_rwpkSEI, sizeof(Nalu), 0); m_360scvpParam = new param_360SCVP; if (!m_360scvpParam) return OMAF_ERROR_NULL_PTR; - memset(m_360scvpParam, 0, sizeof(param_360SCVP)); - - int32_t ret = pthread_mutex_init(&m_mutex, NULL); - if (ret) - { - LOG(ERROR) << "Failed to initialize mutex for extractor track !" << std::endl; - return ret; - } + memset_s(m_360scvpParam, sizeof(param_360SCVP), 0); return ERROR_NONE; } @@ -119,7 +111,7 @@ ExtractorTrack::ExtractorTrack(uint8_t viewportIdx, std::mapinlineData = new uint8_t[256]; if (!inlineCtor->inlineData) @@ -329,22 +356,18 @@ int32_t ExtractorTrack::GenerateExtractors() DELETE_MEMORY(inlineCtor); return OMAF_ERROR_NULL_PTR; } - memset(inlineCtor->inlineData, 0, 256); + memset_s(inlineCtor->inlineData, 256, 0); - if (m_360scvpHandles.size() < m_streams->size()) + std::map::iterator itHdl; + itHdl = m_360scvpHandles.find((MediaStream*)video); + if (itHdl == m_360scvpHandles.end()) { void *handle = I360SCVP_New(video->Get360SCVPHandle()); m_360scvpHandles.insert(std::make_pair((MediaStream*)video, handle)); } - void *m_360scvpHandle = m_360scvpHandles[(MediaStream*)video]; - memcpy(m_360scvpParam, video->Get360SCVPParam(), sizeof(param_360SCVP)); + memcpy_s(m_360scvpParam, sizeof(param_360SCVP), video->Get360SCVPParam(), sizeof(param_360SCVP)); - if (tileIdx == 0) - { - m_dstWidth = m_360scvpParam->destWidth; - m_dstHeight = m_360scvpParam->destHeight; - } if (!m_dstWidth || !m_dstHeight) { DELETE_MEMORY(extractor); @@ -364,7 +387,7 @@ int32_t ExtractorTrack::GenerateExtractors() DELETE_MEMORY(inlineCtor); return OMAF_ERROR_NULL_PTR; } - memcpy(tempData, tileInfo->tileNalu->data, tileInfo->tileNalu->dataSize); + memcpy_s(tempData, tileInfo->tileNalu->dataSize, tileInfo->tileNalu->data, tileInfo->tileNalu->dataSize); tempData[0] = 0; tempData[1] = 0; @@ -387,7 +410,7 @@ int32_t ExtractorTrack::GenerateExtractors() inlineCtor->length = DASH_SAMPLELENFIELD_SIZE + m_360scvpParam->outputBitstreamLen - HEVC_STARTCODES_LEN; - memset(inlineCtor->inlineData, 0xff, DASH_SAMPLELENFIELD_SIZE); + memset_s(inlineCtor->inlineData, DASH_SAMPLELENFIELD_SIZE, 0xff); extractor->inlineConstructor.push_back(inlineCtor); @@ -417,7 +440,6 @@ int32_t ExtractorTrack::GenerateExtractors() DELETE_ARRAY(tempData); } } - m_isFramesReady = false; return ERROR_NONE; } @@ -456,7 +478,6 @@ int32_t ExtractorTrack::DestroyExtractors() } m_extractors.clear(); - m_isFramesReady = false; return ERROR_NONE; } @@ -525,10 +546,10 @@ int32_t ExtractorTrack::UpdateExtractors() if (!(inlineCtor->inlineData)) return OMAF_ERROR_NULL_PTR; - memset(inlineCtor->inlineData, 0, 256); + memset_s(inlineCtor->inlineData, 256, 0); void *m_360scvpHandle = m_360scvpHandles[(MediaStream*)video]; - memcpy(m_360scvpParam, video->Get360SCVPParam(), sizeof(param_360SCVP)); + memcpy_s(m_360scvpParam, sizeof(param_360SCVP), video->Get360SCVPParam(), sizeof(param_360SCVP)); m_360scvpParam->destWidth = m_dstWidth; m_360scvpParam->destHeight = m_dstHeight; @@ -537,7 +558,7 @@ int32_t ExtractorTrack::UpdateExtractors() if (!tempData) return OMAF_ERROR_NULL_PTR; - memcpy(tempData, tileInfo->tileNalu->data, tileInfo->tileNalu->dataSize); + memcpy_s(tempData, tileInfo->tileNalu->dataSize, tileInfo->tileNalu->data, tileInfo->tileNalu->dataSize); tempData[0] = 0; tempData[1] = 0; @@ -557,7 +578,7 @@ int32_t ExtractorTrack::UpdateExtractors() inlineCtor->length = DASH_SAMPLELENFIELD_SIZE + m_360scvpParam->outputBitstreamLen - HEVC_STARTCODES_LEN; - memset(inlineCtor->inlineData, 0xff, DASH_SAMPLELENFIELD_SIZE); + memset_s(inlineCtor->inlineData, DASH_SAMPLELENFIELD_SIZE, 0xff); SampleConstructor *sampleCtor = extractor->sampleConstructor.front(); if (!sampleCtor) @@ -576,7 +597,6 @@ int32_t ExtractorTrack::UpdateExtractors() DELETE_ARRAY(tempData); } } - m_isFramesReady = false; return ERROR_NONE; } @@ -748,7 +768,7 @@ int32_t ExtractorTrack::SetNalu(Nalu *srcNalu, Nalu *dstNalu) if (!(dstNalu->data)) return OMAF_ERROR_NULL_PTR; - memcpy(dstNalu->data, srcNalu->data, srcNalu->dataSize); + memcpy_s(dstNalu->data, srcNalu->dataSize, srcNalu->data, srcNalu->dataSize); dstNalu->startCodesSize = srcNalu->startCodesSize; dstNalu->naluType = srcNalu->naluType; diff --git a/src/VROmafPacking/ExtractorTrack.h b/src/VROmafPacking/ExtractorTrack.h index 68dd5c1a..7c9f85ff 100644 --- a/src/VROmafPacking/ExtractorTrack.h +++ b/src/VROmafPacking/ExtractorTrack.h @@ -36,12 +36,14 @@ #define _EXTRACTORTRACK_H_ #include "VROmafPacking_data.h" -#include "definitions.h" +#include "VROmafPacking_def.h" #include "MediaStream.h" #include "RegionWisePackingGenerator.h" +#include "../utils/OmafStructure.h" #include #include +#include VCD_NS_BEGIN @@ -88,8 +90,6 @@ struct InlineConstructor //! struct Extractor { - //NaluHeader *naluHeader; - //uint8_t constructorType; std::list sampleConstructor; std::list inlineConstructor; }; @@ -119,6 +119,10 @@ class ExtractorTrack //! ExtractorTrack(uint8_t viewportIdx, std::map *streams, uint16_t projType); + ExtractorTrack(const ExtractorTrack& src); + + ExtractorTrack& operator=(ExtractorTrack&& other); + //! //! \brief Destructor //! @@ -174,15 +178,13 @@ class ExtractorTrack //! std::map* GetAllExtractors() { return &m_extractors; }; - //std::map* GetAllRefTrackIds() { return &m_refTrackIds; }; - //! //! \brief Get projection type of the video frame //! - //! \return uint16_t - //! 0 is ERP, and 1 is CubeMap + //! \return VCD::OMAF::ProjectionFormat + //! PF_ERP is ERP, and PF_CUBEMAP is CubeMap //! - uint16_t GetProjType() { return m_projType; }; + VCD::OMAF::ProjectionFormat GetProjType() { return m_projType; }; //! //! \brief Get the region wise packing information for this extractor track @@ -301,20 +303,7 @@ class ExtractorTrack //! uint64_t GetProcessedFrmNum() { - int32_t ret = 0; - ret = pthread_mutex_lock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to lock mutex in Extractor Track for getting processed frames number !" << std::endl; - return 0; - } uint64_t processedFrmNum = m_processedFrmNum; - ret = pthread_mutex_unlock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to unlock mutex in Extractor Track for getting processed frames number !" << std::endl; - return 0; - } return processedFrmNum; }; @@ -325,126 +314,16 @@ class ExtractorTrack //! void IncreaseProcessedFrmNum() { - int32_t ret = 0; - ret = pthread_mutex_lock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to lock mutex in Extractor Track for increasing processed frames number !" << std::endl; - return; - } m_processedFrmNum++; - ret = pthread_mutex_unlock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to unlock mutex in Extractor Track for increasing processed frames number !" << std::endl; - return; - } } - //! - //! \brief Set current frames ready status for extractor track - //! - //! \param [in] isFramesReady - //! whether current frames are ready for extractor track - //! - //! \return void - //! - void SetFramesReady(bool isFramesReady) - { - int32_t ret = 0; - ret = pthread_mutex_lock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to lock mutex in Extractor Track for setting frames ready status !" << std::endl; - return; - } - m_isFramesReady = isFramesReady; - ret = pthread_mutex_unlock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to unlock mutex in Extractor Track for setting frames ready status !" << std::endl; - return; - } - }; - - //! - //! \brief Get current frames ready status for extractor track - //! - //! \return bool - //! whether current frames are ready for extractor track - //! - bool GetFramesReadyStatus() - { - int32_t ret = 0; - ret = pthread_mutex_lock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to lock mutex in Extractor Track for getting frames ready status !" << std::endl; - return false; - } - bool isFramesReady = m_isFramesReady; - ret = pthread_mutex_unlock(&m_mutex); - if (ret) - { - LOG(ERROR) << "Failed to unlock mutex in Extractor Track for getting frames ready status !" << std::endl; - return false; - } - return isFramesReady; - }; + uint8_t GetViewportId() { return m_viewportIdx; }; -private: - //! - //! \brief Get the data offset of the first byte within - //! the sample of specified tile to copy - //! - //! \param [in] data - //! pointer to the whole bitstream data for current frame - //! \param [in] dataSize - //! the size of bitstream data for current frame - //! \param [in] tileIdx - //! the index of specified tile in the current frame - //! - //! \return uint32_t - //! the data offset of the first byte within - //! the sample of specified tile to copy - //! - //uint32_t GetSampleDataOffset(uint8_t *data, int32_t dataSize, uint16_t tileIdx); + void SetPackedPicWidth(uint32_t packedWidth) { m_dstWidth = packedWidth; }; - //! - //! \brief Get the number of bytes to copy within - //! the sample of specified tile - //! - //! \param [in] data - //! pointer to the whole bitstream data for current frame - //! \param [in] dataSize - //! the size of bitstream data for current frame - //! \param [in] tileIdx - //! the index of specified tile in the current frame - //! - //! \return uint32_t - //! the number of bytes to copy within - //! the sample of specified tile - //! - //uint32_t GetSampleDataSize(uint8_t *data, int32_t dataSize, uint16_t tileIdx); + void SetPackedPicHeight(uint32_t packedHeight) { m_dstHeight = packedHeight; }; - //! - //! \brief Generate the new slice header for the specified tile - //! in final packed frame corresponding to extractor track - //! - //! \param [in] data - //! the pointer to the bitstream data for current frame - //! \param [in] dstTileIdx - //! the index of specified tile in final packed frame - //! \param [in] dstRwpk - //! the pointer to the region wise packing information - //! of packed frame - //! \param [in] newHeader - //! the pointer to the new slice header for the specified - //! tile in packed frame - //! - //! \return void - //! - //void GenerateNewSliceHeader(uint8_t *data, uint8_t dstTileIdx, RegionWisePacking *dstRwpk, uint8_t *newHeader); +private: //! //! \brief Generate projection SEI @@ -465,7 +344,7 @@ class ExtractorTrack private: std::map *m_streams; //!< media streams map set up in OmafPackage uint8_t m_viewportIdx; //!< the index of viewport corresponding to extractor track - uint16_t m_projType; //!< projection type of the video frame + VCD::OMAF::ProjectionFormat m_projType; //!< projection type of the video frame RegionWisePacking *m_dstRwpk; //!< pointer to the region wise packing information of extractor track ContentCoverage *m_dstCovi; //!< pointer to the content coverage information of extractor track std::map m_extractors; //!< map of all extractors belong to the extractor track @@ -481,10 +360,8 @@ class ExtractorTrack param_360SCVP *m_360scvpParam; //!< 360SCVP library parameter std::map m_360scvpHandles; //!< map of 360SCVP library handle and corresponding media stream uint64_t m_processedFrmNum; //!< processed frames number in extractor track - bool m_isFramesReady; //!< whether frames are ready for extractor track - pthread_mutex_t m_mutex; //!< thread mutex for extractor track segmentation thread - int32_t m_dstWidth; - int32_t m_dstHeight; + uint32_t m_dstWidth; + uint32_t m_dstHeight; }; VCD_NS_END; diff --git a/src/VROmafPacking/ExtractorTrackGenerator.cpp b/src/VROmafPacking/ExtractorTrackGenerator.cpp new file mode 100644 index 00000000..a4780ab0 --- /dev/null +++ b/src/VROmafPacking/ExtractorTrackGenerator.cpp @@ -0,0 +1,1266 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ExtractorTrackGenerator.cpp +//! \brief: Extractor track generator class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include +#include + +#include "ExtractorTrackGenerator.h" +#include "VideoStreamPluginAPI.h" +#ifdef _USE_TRACE_ +#include "../trace/Bandwidth_tp.h" +#endif + +VCD_NS_BEGIN + +ExtractorTrackGenerator::~ExtractorTrackGenerator() +{ + if (m_tilesSelection.size()) + { + std::map>::iterator it; + for (it = m_tilesSelection.begin(); it != m_tilesSelection.end(); ) + { + std::map oneLayout = it->second; + std::map::iterator it1; + for (it1 = oneLayout.begin(); it1 != oneLayout.end(); ) + { + TileDef *oneTile = it1->second; + DELETE_ARRAY(oneTile); + oneLayout.erase(it1++); + } + oneLayout.clear(); + m_tilesSelection.erase(it++); + } + + m_tilesSelection.clear(); + } + + if (m_middleSelection.size()) + { + std::map>::iterator it; + for (it = m_middleSelection.begin(); it != m_middleSelection.end(); ) + { + std::map oneLayout = it->second; + oneLayout.clear(); + m_middleSelection.erase(it++); + } + + m_middleSelection.clear(); + } + + if (m_viewportCCInfo.size()) + { + std::map::iterator it; + for (it = m_viewportCCInfo.begin(); it != m_viewportCCInfo.end(); ) + { + CCDef *oneCC = it->second; + DELETE_MEMORY(oneCC); + m_viewportCCInfo.erase(it++); + } + m_viewportCCInfo.clear(); + } + + m_middleCCInfo.clear(); + + if (m_rwpkGenMap.size()) + { + std::map::iterator it; + for (it = m_rwpkGenMap.begin(); it != m_rwpkGenMap.end(); ) + { + RegionWisePackingGenerator *rwpkGen = it->second; + DELETE_MEMORY(rwpkGen); + m_rwpkGenMap.erase(it++); + } + m_rwpkGenMap.clear(); + } + + DELETE_ARRAY(m_videoIdxInMedia); + if (m_newSPSNalu) + { + DELETE_ARRAY(m_newSPSNalu->data); + } + DELETE_MEMORY(m_newSPSNalu); + if (m_newPPSNalu) + { + DELETE_ARRAY(m_newPPSNalu->data); + } + DELETE_MEMORY(m_newPPSNalu); + m_360scvpParam = NULL; + m_360scvpHandle = NULL; +} + +int32_t ExtractorTrackGenerator::SelectTilesInView( + float yaw, float pitch, + uint8_t tileInRow, uint8_t tileInCol) +{ + if (!m_360scvpParam || !m_360scvpHandle) + { + OMAF_LOG(LOG_ERROR, "360SCVP should be set up before selecting tiles based on viewport !\n"); + return OMAF_ERROR_NULL_PTR; + } + + if ((yaw < -180.0) || (yaw > 180.0)) + { + OMAF_LOG(LOG_ERROR, "Invalid yaw in selecting tiles based on viewport !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + if ((pitch < -90.0) || (pitch > 90.0)) + { + OMAF_LOG(LOG_ERROR, "Invalid pitch in selecting tiles based on viewport !\n"); + return OMAF_ERROR_INVALID_DATA; + } + + int32_t ret = I360SCVP_setViewPort(m_360scvpHandle, yaw, pitch); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to set viewport !\n"); + return OMAF_ERROR_SCVP_SET_FAILED; + } + + ret = I360SCVP_process(m_360scvpParam, m_360scvpHandle); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed in 360SCVP process !\n"); + return OMAF_ERROR_SCVP_PROCESS_FAILED; + } + + uint64_t totalTiles = tileInRow * tileInCol; + TileDef *tilesInView = new TileDef[1024]; + if (!tilesInView) + { + OMAF_LOG(LOG_ERROR, "Failed to create tiles def array !\n"); + return OMAF_ERROR_NULL_PTR; + } + + memset(tilesInView, 0, 1024 * sizeof(TileDef)); + + Param_ViewportOutput paramViewport; + int32_t selectedTilesNum = 0; + selectedTilesNum = I360SCVP_getTilesInViewport(tilesInView, ¶mViewport, m_360scvpHandle); + + #ifdef _USE_TRACE_ + tracepoint(bandwidth_tp_provider, tiles_selection_redundancy, + paramViewport.dstWidthNet, + paramViewport.dstHeightNet, + paramViewport.dstWidthAlignTile, + paramViewport.dstHeightAlignTile, + paramViewport.dstWidthAlignTile / (m_initInfo->viewportInfo)->viewportWidth, + paramViewport.dstHeightAlignTile / (m_initInfo->viewportInfo)->viewportHeight); + #endif + + if ((selectedTilesNum <= 0) || ((uint64_t)(selectedTilesNum) > totalTiles)) + { + OMAF_LOG(LOG_ERROR, "Unreasonable selected tiles number based on viewport !\n"); + delete [] tilesInView; + tilesInView = NULL; + return OMAF_ERROR_SCVP_INCORRECT_RESULT; + } + + uint32_t sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && (selectedTilesNum % sqrtedSize)) { sqrtedSize--; } + if (sqrtedSize == 1) + { + OMAF_LOG(LOG_INFO, "Additional tile needs to be selected for tiles stitching !\n"); + selectedTilesNum++; + tilesInView[selectedTilesNum-1].x = tilesInView[0].x; + tilesInView[selectedTilesNum-1].y = tilesInView[0].y; + tilesInView[selectedTilesNum-1].idx = tilesInView[0].idx; + tilesInView[selectedTilesNum-1].faceId = tilesInView[0].faceId; + } + + //adjust selected tiles number again to make sure packed sub-picture width/height ratio to a normal range + sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && (selectedTilesNum % sqrtedSize)) { sqrtedSize--; } + uint32_t dividedSize = selectedTilesNum / sqrtedSize; + uint32_t supplementedNum = 0; + + if (((sqrtedSize > dividedSize) && ((sqrtedSize - dividedSize) > 3)) || + ((dividedSize > sqrtedSize) && ((dividedSize - sqrtedSize) > 3))) + { + OMAF_LOG(LOG_INFO, "High packed sub-picture width/height ratio %u : %u\n", (sqrtedSize > dividedSize ? sqrtedSize : dividedSize), (sqrtedSize > dividedSize ? dividedSize : sqrtedSize)); + } + + while ((sqrtedSize > dividedSize) && ((sqrtedSize - dividedSize) > 3)) + { + selectedTilesNum++; + supplementedNum++; + tilesInView[selectedTilesNum-1].x = tilesInView[supplementedNum].x; + tilesInView[selectedTilesNum-1].y = tilesInView[supplementedNum].y; + tilesInView[selectedTilesNum-1].idx = tilesInView[supplementedNum].idx; + tilesInView[selectedTilesNum-1].faceId = tilesInView[supplementedNum].faceId; + sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && (selectedTilesNum % sqrtedSize)) { sqrtedSize--; } + if (sqrtedSize == 1) + { + selectedTilesNum++; + supplementedNum++; + tilesInView[selectedTilesNum-1].x = tilesInView[supplementedNum].x; + tilesInView[selectedTilesNum-1].y = tilesInView[supplementedNum].y; + tilesInView[selectedTilesNum-1].idx = tilesInView[supplementedNum].idx; + tilesInView[selectedTilesNum-1].faceId = tilesInView[supplementedNum].faceId; + sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && (selectedTilesNum % sqrtedSize)) { sqrtedSize--; } + } + + dividedSize = selectedTilesNum / sqrtedSize; + } + while (( dividedSize > sqrtedSize) && ((dividedSize - sqrtedSize) > 3)) + { + selectedTilesNum++; + supplementedNum++; + tilesInView[selectedTilesNum-1].x = tilesInView[supplementedNum].x; + tilesInView[selectedTilesNum-1].y = tilesInView[supplementedNum].y; + tilesInView[selectedTilesNum-1].idx = tilesInView[supplementedNum].idx; + tilesInView[selectedTilesNum-1].faceId = tilesInView[supplementedNum].faceId; + sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && (selectedTilesNum % sqrtedSize)) { sqrtedSize--; } + if (sqrtedSize == 1) + { + selectedTilesNum++; + supplementedNum++; + tilesInView[selectedTilesNum-1].x = tilesInView[supplementedNum].x; + tilesInView[selectedTilesNum-1].y = tilesInView[supplementedNum].y; + tilesInView[selectedTilesNum-1].idx = tilesInView[supplementedNum].idx; + tilesInView[selectedTilesNum-1].faceId = tilesInView[supplementedNum].faceId; + sqrtedSize = (uint32_t)sqrt(selectedTilesNum); + while(sqrtedSize && (selectedTilesNum % sqrtedSize)) { sqrtedSize--; } + } + + dividedSize = selectedTilesNum / sqrtedSize; + } + + if (supplementedNum > 0) + { + OMAF_LOG(LOG_INFO, "Supplement %u tiles for packed sub-picture width/height ratio\n", supplementedNum); + + OMAF_LOG(LOG_INFO, "Now packed sub-picture width/height ratio %u : %u\n", (dividedSize > sqrtedSize ? dividedSize : sqrtedSize), (dividedSize > sqrtedSize ? sqrtedSize : dividedSize)); + } + + CCDef *outCC = new CCDef; + if (!outCC) + { + delete [] tilesInView; + tilesInView = NULL; + return OMAF_ERROR_NULL_PTR; + } + ret = I360SCVP_getContentCoverage(m_360scvpHandle, outCC); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to calculate Content coverage information !\n"); + delete [] tilesInView; + tilesInView = NULL; + delete outCC; + outCC = NULL; + return OMAF_ERROR_SCVP_INCORRECT_RESULT; + } + + std::map>::iterator it; + it = m_middleSelection.find((uint16_t)selectedTilesNum); + if (it == m_middleSelection.end()) + { + std::map oneLayout; + oneLayout.insert(std::make_pair(m_middleViewNum, tilesInView)); + m_middleSelection.insert(std::make_pair((uint16_t)selectedTilesNum, oneLayout)); + m_middleCCInfo.insert(std::make_pair(m_middleViewNum, outCC)); + m_middleViewNum++; + } + else + { + std::map* oneLayout = &(it->second); + std::map::iterator it1; + uint64_t diffNum = 0; + for (it1 = oneLayout->begin(); it1 != oneLayout->end(); it1++) + { + TileDef *oneTilesSet = it1->second; + if (!oneTilesSet) + { + DELETE_ARRAY(tilesInView); + DELETE_MEMORY(outCC); + return OMAF_ERROR_NULL_PTR; + } + + uint16_t i = 0; + for ( ; i < selectedTilesNum; i++) + { + TileDef *oneTile = &(tilesInView[i]); + if (!oneTile) + { + DELETE_ARRAY(tilesInView); + DELETE_MEMORY(outCC); + return OMAF_ERROR_NULL_PTR; + } + + uint16_t j = 0; + for ( ; j < selectedTilesNum; j++) + { + TileDef *tile = &(oneTilesSet[j]); + if ((oneTile->x == tile->x) && (oneTile->y == tile->y) && + (oneTile->idx == tile->idx) && (oneTile->faceId == tile->faceId)) + { + break; + } + } + if (j == selectedTilesNum) + break; + } + if (i < selectedTilesNum ) + { + diffNum++; + } + } + if (diffNum == oneLayout->size()) + { + oneLayout->insert(std::make_pair(m_middleViewNum, tilesInView)); + m_middleCCInfo.insert(std::make_pair(m_middleViewNum, outCC)); + m_middleViewNum++; + } + else + { + DELETE_ARRAY(tilesInView); + DELETE_MEMORY(outCC); + } + } + + return ERROR_NONE; +} + +static bool IsIncluded(int32_t *firstIds, uint16_t idsNum1, int32_t *secondIds, uint16_t idsNum2) +{ + bool included = false; + if (idsNum1 > idsNum2) + { + included = false; + } + else + { + uint16_t includedNum = 0; + for (uint16_t i = 0; i < idsNum1; i++) + { + int32_t id1 = firstIds[i]; + for (uint16_t j = 0; j < idsNum2; j++) + { + int32_t id2 = secondIds[j]; + if (id2 == id1) + { + includedNum++; + break; + } + } + } + if (includedNum == idsNum1) + { + included = true; + } + else + { + included = false; + } + } + + return included; +} + +int32_t ExtractorTrackGenerator::RefineTilesSelection() +{ + std::set allSelectedNums; + std::map>::iterator itSelection; + for (itSelection = m_middleSelection.begin(); itSelection != m_middleSelection.end(); itSelection++) + { + uint16_t selectedNum = itSelection->first; + allSelectedNums.insert(selectedNum); + } + + std::map> selectedTilesIds; + std::set::iterator numIter = allSelectedNums.begin(); + for ( ; numIter != allSelectedNums.end(); numIter++) + { + std::map idsMap; + uint16_t currNum = *numIter; + std::map currGroup = m_middleSelection[currNum]; + std::map::iterator it1; + for (it1 = currGroup.begin(); it1 != currGroup.end(); it1++) + { + TileDef *tiles = it1->second; + int32_t *idsGroup = new int32_t[currNum]; + memset_s(idsGroup, currNum * sizeof(int32_t), 0); + for (uint16_t selIdx = 0; selIdx < currNum; selIdx++) + { + idsGroup[selIdx] = tiles[selIdx].idx; + } + idsMap.insert(std::make_pair(it1->first, idsGroup)); + } + selectedTilesIds.insert(std::make_pair(currNum, idsMap)); + } + + std::map refinedSelection; + for (numIter = allSelectedNums.begin() ; numIter != allSelectedNums.end(); numIter++) + { + uint16_t currNum = *numIter; + std::map currGroup = selectedTilesIds[currNum]; + std::map::iterator it1; + for (it1 = currGroup.begin(); it1 != currGroup.end(); it1++) + { + int32_t *tiles = it1->second; + bool needReserved = true; + + std::set::iterator largerIter = numIter; + largerIter++; + for ( ; largerIter != allSelectedNums.end(); largerIter++) + { + uint16_t largerNum = *largerIter; + std::map largerGroup = selectedTilesIds[largerNum]; + std::map::iterator it2; + for (it2 = largerGroup.begin(); it2 != largerGroup.end(); it2++) + { + int32_t *largerTiles = it2->second; + + bool included = IsIncluded(tiles, currNum, largerTiles, largerNum); + needReserved = !included; + if (!needReserved) + break; + } + if (!needReserved) + break; + } + refinedSelection.insert(std::make_pair(it1->first, needReserved)); + } + } + + std::map>::iterator it3; + for (it3 = m_middleSelection.begin(); it3 != m_middleSelection.end(); it3++) + { + uint16_t selectedTilesNum = it3->first; + std::map existedSelection = it3->second; + std::map::iterator it5; + for (it5 = existedSelection.begin(); it5 != existedSelection.end(); it5++) + { + std::map>::iterator it4; + it4 = m_tilesSelection.find(selectedTilesNum); + + if (it4 == m_tilesSelection.end()) + { + std::map oneSelection; + uint16_t viewIdx = it5->first; + bool reserved = refinedSelection[viewIdx]; + if (reserved) + { + oneSelection.insert(std::make_pair(m_viewportNum, it5->second)); + m_tilesSelection.insert(std::make_pair(selectedTilesNum, oneSelection)); + m_viewportCCInfo.insert(std::make_pair(m_viewportNum, m_middleCCInfo[viewIdx])); + m_viewportNum++; + } + else + { + DELETE_ARRAY(it5->second); + DELETE_MEMORY(m_middleCCInfo[viewIdx]); + } + } + else + { + std::map* oneSelection = &(it4->second); + + uint16_t viewIdx = it5->first; + bool reserved = refinedSelection[viewIdx]; + if (reserved) + { + oneSelection->insert(std::make_pair(m_viewportNum, it5->second)); + m_viewportCCInfo.insert(std::make_pair(m_viewportNum, m_middleCCInfo[viewIdx])); + m_viewportNum++; + } + else + { + DELETE_ARRAY(it5->second); + DELETE_MEMORY(m_middleCCInfo[viewIdx]); + } + } + } + } + + std::map>::iterator idsIter; + for (idsIter = selectedTilesIds.begin(); idsIter != selectedTilesIds.end(); ) + { + std::map oneIds = idsIter->second; + std::map::iterator tileIdIter; + for (tileIdIter = oneIds.begin(); tileIdIter != oneIds.end(); ) + { + int32_t *idsGroup = tileIdIter->second; + DELETE_ARRAY(idsGroup); + oneIds.erase(tileIdIter++); + } + oneIds.clear(); + selectedTilesIds.erase(idsIter++); + } + selectedTilesIds.clear(); + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::CalculateViewportNum() +{ + if (!m_videoIdxInMedia) + return OMAF_ERROR_NULL_PTR; + + std::map::iterator it; + it = m_streams->find(m_videoIdxInMedia[0]); + if (it == m_streams->end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + VideoStream *vs = (VideoStream*)(it->second); + uint16_t origWidth = vs->GetSrcWidth(); + uint16_t origHeight = vs->GetSrcHeight(); + uint8_t tileInRow = vs->GetTileInRow(); + uint8_t tileInCol = vs->GetTileInCol(); + + m_360scvpParam = new param_360SCVP; + if (!m_360scvpParam) + { + OMAF_LOG(LOG_ERROR, "Failed to create 360SCVP parameter !\n"); + return OMAF_ERROR_NULL_PTR; + } + + m_360scvpParam->usedType = E_VIEWPORT_ONLY; + m_360scvpParam->logFunction = (void*)logCallBack; + if (m_initInfo->projType == E_SVIDEO_EQUIRECT) { + m_yawStep = (float)((((origWidth / tileInRow) * 360.00) / origWidth) / 2); + m_pitchStep = (float)((((origHeight / tileInCol) * 180.00) / origHeight) / 2); + + m_360scvpParam->paramViewPort.viewportWidth = (m_initInfo->viewportInfo)->viewportWidth; + m_360scvpParam->paramViewPort.viewportHeight = (m_initInfo->viewportInfo)->viewportHeight; + m_360scvpParam->paramViewPort.viewPortPitch = (m_initInfo->viewportInfo)->viewportPitch; + m_360scvpParam->paramViewPort.viewPortYaw = (m_initInfo->viewportInfo)->viewportYaw; + m_360scvpParam->paramViewPort.viewPortFOVH = (m_initInfo->viewportInfo)->horizontalFOVAngle + m_yawStep; + m_360scvpParam->paramViewPort.viewPortFOVV = (m_initInfo->viewportInfo)->verticalFOVAngle + m_pitchStep; + m_360scvpParam->paramViewPort.geoTypeInput = (EGeometryType)(m_initInfo->projType); + m_360scvpParam->paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; + m_360scvpParam->paramViewPort.tileNumRow = tileInCol; + m_360scvpParam->paramViewPort.tileNumCol = tileInRow; + m_360scvpParam->paramViewPort.usageType = E_VIEWPORT_ONLY; + m_360scvpParam->paramViewPort.faceWidth = origWidth; + m_360scvpParam->paramViewPort.faceHeight = origHeight; + m_360scvpParam->paramViewPort.paramVideoFP.cols = 1; + m_360scvpParam->paramViewPort.paramVideoFP.rows = 1; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + } else if (m_initInfo->projType == E_SVIDEO_CUBEMAP) { + m_yawStep = (float)((((origWidth / tileInRow) * 360.00) / ((origWidth / 3) * 4)) / 2); + m_pitchStep = (float)((((origHeight / tileInCol) * 180.00) / ((origHeight / 2) * 2)) / 2); + + m_360scvpParam->paramViewPort.viewportWidth = (m_initInfo->viewportInfo)->viewportWidth; + m_360scvpParam->paramViewPort.viewportHeight = (m_initInfo->viewportInfo)->viewportHeight; + m_360scvpParam->paramViewPort.viewPortPitch = (m_initInfo->viewportInfo)->viewportPitch; + m_360scvpParam->paramViewPort.viewPortYaw = (m_initInfo->viewportInfo)->viewportYaw; + m_360scvpParam->paramViewPort.viewPortFOVH = (m_initInfo->viewportInfo)->horizontalFOVAngle + m_yawStep* 2; + m_360scvpParam->paramViewPort.viewPortFOVV = (m_initInfo->viewportInfo)->verticalFOVAngle + m_pitchStep* 2; + m_360scvpParam->paramViewPort.geoTypeInput = (EGeometryType)(m_initInfo->projType); + m_360scvpParam->paramViewPort.geoTypeOutput = E_SVIDEO_VIEWPORT; + m_360scvpParam->paramViewPort.tileNumRow = tileInCol / 2; + m_360scvpParam->paramViewPort.tileNumCol = tileInRow / 3; + m_360scvpParam->paramViewPort.usageType = E_VIEWPORT_ONLY; + m_360scvpParam->paramViewPort.faceWidth = origWidth / 3; + m_360scvpParam->paramViewPort.faceHeight = origHeight / 2; + + m_360scvpParam->paramViewPort.paramVideoFP.cols = 3; + m_360scvpParam->paramViewPort.paramVideoFP.rows = 2; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][0].idFace = 0; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][0].rotFace = NO_TRANSFORM; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][0].faceWidth = m_360scvpParam->paramViewPort.faceWidth; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][0].faceHeight = m_360scvpParam->paramViewPort.faceHeight; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][1].idFace = 1; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][1].rotFace = NO_TRANSFORM; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][2].idFace = 2; + m_360scvpParam->paramViewPort.paramVideoFP.faces[0][2].rotFace = NO_TRANSFORM; + m_360scvpParam->paramViewPort.paramVideoFP.faces[1][0].idFace = 3; + m_360scvpParam->paramViewPort.paramVideoFP.faces[1][0].rotFace = NO_TRANSFORM; + m_360scvpParam->paramViewPort.paramVideoFP.faces[1][1].idFace = 4; + m_360scvpParam->paramViewPort.paramVideoFP.faces[1][1].rotFace = NO_TRANSFORM; + m_360scvpParam->paramViewPort.paramVideoFP.faces[1][2].idFace = 5; + m_360scvpParam->paramViewPort.paramVideoFP.faces[1][2].rotFace = NO_TRANSFORM; + } + + OMAF_LOG(LOG_INFO, "Yaw and Pitch steps for going through all viewports are %f and %f\n", m_yawStep, m_pitchStep); + + m_360scvpHandle = I360SCVP_Init(m_360scvpParam); + if (!m_360scvpHandle) + { + OMAF_LOG(LOG_ERROR, "Failed to create 360SCVP handle !\n"); + return OMAF_ERROR_SCVP_INIT_FAILED; + } + + for (float one_yaw = -180.0; one_yaw <= 180.0; ) + { + for (float one_pitch = -90.0; one_pitch <= 90.0; ) + { + int ret = SelectTilesInView(one_yaw, one_pitch, tileInRow, tileInCol); + if (ret) + return ret; + + one_pitch += m_pitchStep; + } + + one_yaw += m_yawStep; + } + + if (m_middleViewNum > 100) + { + OMAF_LOG(LOG_INFO, "Too many extractor tracks, now need to refine tiles selection!\n"); + RefineTilesSelection(); + } + else + { + m_tilesSelection = m_middleSelection; + m_viewportNum = m_middleViewNum; + m_viewportCCInfo = m_middleCCInfo; + } + + DELETE_MEMORY(m_360scvpParam); + I360SCVP_unInit(m_360scvpHandle); + m_360scvpHandle = NULL; + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::FillDstRegionWisePacking( + RegionWisePackingGenerator *rwpkGen, + TileDef *tilesInViewport, + RegionWisePacking *dstRwpk) +{ + if (!rwpkGen || !tilesInViewport || !dstRwpk) + return OMAF_ERROR_NULL_PTR; + + dstRwpk->projPicWidth = m_origResWidth; + dstRwpk->projPicHeight = m_origResHeight; + + int32_t ret = rwpkGen->GenerateDstRwpk(tilesInViewport, dstRwpk); + if (ret) + return ret; + + m_packedPicWidth = rwpkGen->GetPackedPicWidth(); + m_packedPicHeight = rwpkGen->GetPackedPicHeight(); + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::FillTilesMergeDirection( + RegionWisePackingGenerator *rwpkGen, + TileDef *tilesInViewport, + TilesMergeDirectionInCol *tilesMergeDir) +{ + if (!rwpkGen || !tilesInViewport || !tilesMergeDir) + return OMAF_ERROR_NULL_PTR; + + int32_t ret = rwpkGen->GenerateTilesMergeDirection(tilesInViewport, tilesMergeDir); + if (ret) + return ret; + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::FillDstContentCoverage( + uint16_t viewportIdx, + ContentCoverage *dstCovi) +{ + CCDef *viewportCC = NULL; + viewportCC = m_viewportCCInfo[viewportIdx]; + if (!viewportCC) + { + OMAF_LOG(LOG_ERROR, "There is no calculated CC info for the viewport !\n"); + return OMAF_ERROR_NULL_PTR; + } + + if (m_projType == VCD::OMAF::ProjectionFormat::PF_ERP) + { + dstCovi->coverageShapeType = 1; + } + else + { + dstCovi->coverageShapeType = 0; + } + + dstCovi->numRegions = 1; + dstCovi->viewIdcPresenceFlag = false; + dstCovi->defaultViewIdc = 0; + + dstCovi->sphereRegions = new SphereRegion[dstCovi->numRegions]; + if (!dstCovi->sphereRegions) + return OMAF_ERROR_NULL_PTR; + + SphereRegion *sphereRegion = &(dstCovi->sphereRegions[0]); + memset_s(sphereRegion, sizeof(SphereRegion), 0); + sphereRegion->viewIdc = 0; + sphereRegion->centreAzimuth = viewportCC->centreAzimuth; + sphereRegion->centreElevation = viewportCC->centreElevation; + sphereRegion->centreTilt = 0; + sphereRegion->azimuthRange = viewportCC->azimuthRange; + sphereRegion->elevationRange = viewportCC->elevationRange; + sphereRegion->interpolate = 0; + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::CheckAndFillInitInfo() +{ + if (!m_initInfo) + return OMAF_ERROR_NULL_PTR; + + uint8_t actualVideoNum = 0; + uint8_t totalStreamNum = m_initInfo->bsNumVideo + m_initInfo->bsNumAudio; + m_videoIdxInMedia = new uint8_t[totalStreamNum]; + if (!m_videoIdxInMedia) + return OMAF_ERROR_NULL_PTR; + + memset_s(m_videoIdxInMedia, totalStreamNum * sizeof(uint8_t), 0); + + for (uint8_t streamIdx = 0; streamIdx < totalStreamNum; streamIdx++) + { + BSBuffer *bs = &(m_initInfo->bsBuffers[streamIdx]); + if (bs->mediaType == VIDEOTYPE) + { + actualVideoNum++; + } + } + + if (actualVideoNum != m_initInfo->bsNumVideo) + return OMAF_ERROR_VIDEO_NUM; + + std::set bitRateRanking; + + std::map::iterator it; + for (it = m_streams->begin(); it != m_streams->end(); it++) + { + MediaStream *stream = it->second; + if (stream->GetMediaType() == VIDEOTYPE) + { + VideoStream *vs = (VideoStream*)stream; + uint64_t bitRate = vs->GetBitRate(); + bitRateRanking.insert(bitRate); + } + } + + std::set::reverse_iterator rateIter = bitRateRanking.rbegin(); + uint8_t vsIdx = 0; + for ( ; rateIter != bitRateRanking.rend(); rateIter++) + { + uint64_t bitRate = *rateIter; + for (it = m_streams->begin(); it != m_streams->end(); it++) + { + MediaStream *stream = it->second; + if (stream->GetMediaType() == VIDEOTYPE) + { + VideoStream *vs = (VideoStream*)stream; + uint64_t videoBitRate = vs->GetBitRate(); + if (videoBitRate == bitRate) + { + m_videoIdxInMedia[vsIdx] = it->first; //rank video index from largest bitrate to smallest bitrate + break; + } + } + } + vsIdx++; + } + + uint8_t mainVSId = m_videoIdxInMedia[0]; + it = m_streams->find(mainVSId); + if (it == m_streams->end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + VideoStream *mainVS = (VideoStream*)(it->second); + m_origResWidth = mainVS->GetSrcWidth(); + m_origResHeight = mainVS->GetSrcHeight(); + m_origTileInRow = mainVS->GetTileInRow(); + m_origTileInCol = mainVS->GetTileInCol(); + m_tilesInfo = mainVS->GetAllTilesInfo(); + m_origTileWidth = m_tilesInfo[0].tileWidth; + m_origTileHeight = m_tilesInfo[0].tileHeight; + m_projType = (VCD::OMAF::ProjectionFormat)(mainVS->GetProjType()); + + (m_initInfo->viewportInfo)->inWidth = m_origResWidth; + (m_initInfo->viewportInfo)->inHeight = m_origResHeight; + (m_initInfo->viewportInfo)->tileInRow = m_origTileInRow; + (m_initInfo->viewportInfo)->tileInCol = m_origTileInCol; + (m_initInfo->viewportInfo)->outGeoType = 2; //viewport + (m_initInfo->viewportInfo)->inGeoType = mainVS->GetProjType(); + + if ((m_initInfo->segmentationInfo)->extractorTracksPerSegThread == 0) + { + if ((m_origTileInRow * m_origTileInCol) % 4 == 0) + { + (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 4; + } + else if ((m_origTileInRow * m_origTileInCol) % 3 == 0) + { + (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 3; + } + else if ((m_origTileInRow * m_origTileInCol) % 2 == 0) + { + (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 2; + } + else + { + (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 1; + } + } + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::Initialize() +{ + if (!m_initInfo) + return OMAF_ERROR_NULL_PTR; + + m_fixedPackedPicRes = m_initInfo->fixedPackedPicRes; + + int32_t ret = CheckAndFillInitInfo(); + if (ret) + return ret; + + std::map::iterator it; + it = m_streams->find(m_videoIdxInMedia[0]); //high resolution video stream + if (it == m_streams->end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + VideoStream *vs = (VideoStream*)(it->second); + m_origVPSNalu = vs->GetVPSNalu(); + m_origSPSNalu = vs->GetSPSNalu(); + m_origPPSNalu = vs->GetPPSNalu(); + +#ifdef _USE_TRACE_ + //trace + if ((EGeometryType)((m_initInfo->viewportInfo)->inGeoType) == EGeometryType::E_SVIDEO_EQUIRECT) + { + const char *projType = "ERP"; + tracepoint(bandwidth_tp_provider, initial_viewport_info, + (m_initInfo->viewportInfo)->viewportWidth, + (m_initInfo->viewportInfo)->viewportHeight, + (m_initInfo->viewportInfo)->viewportPitch, + (m_initInfo->viewportInfo)->viewportYaw, + (m_initInfo->viewportInfo)->horizontalFOVAngle, + (m_initInfo->viewportInfo)->verticalFOVAngle, + projType); + } + else if ((EGeometryType)((m_initInfo->viewportInfo)->inGeoType) == EGeometryType::E_SVIDEO_CUBEMAP) + { + const char *projType = "CubeMap"; + tracepoint(bandwidth_tp_provider, initial_viewport_info, + (m_initInfo->viewportInfo)->viewportWidth, + (m_initInfo->viewportInfo)->viewportHeight, + (m_initInfo->viewportInfo)->viewportPitch, + (m_initInfo->viewportInfo)->viewportYaw, + (m_initInfo->viewportInfo)->horizontalFOVAngle, + (m_initInfo->viewportInfo)->verticalFOVAngle, + projType); + } +#endif + + ret = CalculateViewportNum(); + if (ret) + return ret; + + OMAF_LOG(LOG_INFO, "Total Viewport number is %d\n", m_viewportNum); + + std::set allSelectedNums; + std::map>::iterator itSelection; + for (itSelection = m_tilesSelection.begin(); itSelection != m_tilesSelection.end(); itSelection++) + { + uint16_t selectedNum = itSelection->first; + allSelectedNums.insert(selectedNum); + } + std::set::reverse_iterator numIter = allSelectedNums.rbegin(); + if (numIter == allSelectedNums.rend()) + { + OMAF_LOG(LOG_ERROR, "ERROR in all selected tiles numbers !\n"); + return OMAF_ERROR_INVALID_DATA; + } + uint16_t maxSelectedNum = *numIter; + OMAF_LOG(LOG_INFO, "Maxmum selected tiles number in viewport is %d\n", maxSelectedNum); + + for (itSelection = m_tilesSelection.begin(); itSelection != m_tilesSelection.end(); itSelection++) + { + uint16_t selectedNum = itSelection->first; + + RegionWisePackingGenerator *rwpkGen = new RegionWisePackingGenerator(); + if (!rwpkGen) + return OMAF_ERROR_NULL_PTR; + + if (m_fixedPackedPicRes) + { + ret = rwpkGen->Initialize( + m_initInfo->packingPluginPath, m_initInfo->packingPluginName, + m_streams, m_videoIdxInMedia, + selectedNum, maxSelectedNum, logCallBack); + } + else + { + ret = rwpkGen->Initialize( + m_initInfo->packingPluginPath, m_initInfo->packingPluginName, + m_streams, m_videoIdxInMedia, + selectedNum, selectedNum, logCallBack); + } + + if (ret) + { + DELETE_MEMORY(rwpkGen); + return ret; + } + + m_rwpkGenMap.insert(std::make_pair(selectedNum, rwpkGen)); + } + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::ConvertTilesIdx( + uint16_t tilesNum, + TileDef *tilesInViewport) +{ + if (!tilesInViewport) + return OMAF_ERROR_NULL_PTR; + + for (uint16_t idx = 0; idx < tilesNum; idx++) + { + TileDef *oneTile = &(tilesInViewport[idx]); + + for (uint8_t regIdx = 0; regIdx < (m_origTileInRow * m_origTileInCol); regIdx++) + { + TileInfo *tileInfo = &(m_tilesInfo[regIdx]); + if ((tileInfo->corresHorPosTo360SCVP == oneTile->x) && + (tileInfo->corresVerPosTo360SCVP == oneTile->y) && + (tileInfo->corresFaceIdTo360SCVP == oneTile->faceId)) + { + oneTile->idx = tileInfo->tileIdxInProjPic; + break; + } + } + } + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::GenerateExtractorTracks( + std::map& extractorTrackMap, + std::map *streams) +{ + if (!streams) + return OMAF_ERROR_NULL_PTR; + + int32_t ret = ERROR_NONE; + + std::list picResolution; + std::map::iterator itStr; + uint8_t videoNum = m_initInfo->bsNumVideo; + for (uint8_t vsIdx = 0; vsIdx < videoNum; vsIdx++) + { + itStr = m_streams->find(m_videoIdxInMedia[vsIdx]); + if (itStr == m_streams->end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + VideoStream *vs = (VideoStream*)(itStr->second); + + PicResolution resolution = { vs->GetSrcWidth(), vs->GetSrcHeight() }; + picResolution.push_back(resolution); + } + + std::map>::iterator it; + for (it = m_tilesSelection.begin(); it != m_tilesSelection.end(); it++) + { + uint16_t selectedNum = it->first; + RegionWisePackingGenerator *rwpkGen = m_rwpkGenMap[selectedNum]; + if (!rwpkGen) + return OMAF_ERROR_NULL_PTR; + + std::map oneLayout = it->second; + std::map::iterator it1; + for (it1 = oneLayout.begin(); it1 != oneLayout.end(); it1++) + { + uint16_t viewportIdx = it1->first; + TileDef *tilesInView = it1->second; + if (!tilesInView) + return OMAF_ERROR_NULL_PTR; + + if (m_projType == VCD::OMAF::ProjectionFormat::PF_CUBEMAP) + { + ConvertTilesIdx(selectedNum, tilesInView); + } + + ExtractorTrack *extractorTrack = new ExtractorTrack(viewportIdx, streams, (m_initInfo->viewportInfo)->inGeoType); + + if (!extractorTrack) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + return OMAF_ERROR_NULL_PTR; + } + + int32_t retInit = extractorTrack->Initialize(); + if (retInit) + { + OMAF_LOG(LOG_ERROR, "Failed to initialize extractor track !\n"); + + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return retInit; + } + + ret = rwpkGen->GenerateMergedTilesArrange(tilesInView); + if (ret) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return ret; + } + + ret = FillDstRegionWisePacking(rwpkGen, tilesInView, extractorTrack->GetRwpk()); + if (ret) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return ret; + } + + ret = FillTilesMergeDirection(rwpkGen, tilesInView, extractorTrack->GetTilesMergeDir()); + if (ret) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return ret; + } + + ret = FillDstContentCoverage(viewportIdx, extractorTrack->GetCovi()); + if (ret) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return ret; + } + + ret = GenerateNewSPS(); + if (ret) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return ret; + } + + ret = GenerateNewPPS(rwpkGen); + if (ret) + { + std::map::iterator itET = extractorTrackMap.begin(); + for ( ; itET != extractorTrackMap.end(); ) + { + ExtractorTrack *extractorTrack1 = itET->second; + DELETE_MEMORY(extractorTrack1); + extractorTrackMap.erase(itET++); + } + extractorTrackMap.clear(); + DELETE_MEMORY(extractorTrack); + return ret; + } + + extractorTrack->SetPackedPicWidth(m_packedPicWidth); + extractorTrack->SetPackedPicHeight(m_packedPicHeight); + extractorTrack->SetNalu(m_origVPSNalu, extractorTrack->GetVPS()); + extractorTrack->SetNalu(m_newSPSNalu, extractorTrack->GetSPS()); + extractorTrack->SetNalu(m_newPPSNalu, extractorTrack->GetPPS()); + + std::list* picResList = extractorTrack->GetPicRes(); + std::list::iterator itRes; + for (itRes = picResolution.begin(); itRes != picResolution.end(); itRes++) + { + PicResolution picRes = *itRes; + picResList->push_back(picRes); + } + + extractorTrackMap.insert(std::make_pair(viewportIdx, std::move(extractorTrack))); + } + } + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::GenerateNewSPS() +{ + if (!m_packedPicWidth || !m_packedPicHeight || !m_origSPSNalu) + return OMAF_ERROR_BAD_PARAM; + + if (!(m_origSPSNalu->data) || !(m_origSPSNalu->dataSize)) + return OMAF_ERROR_INVALID_SPS; + + if (!m_360scvpParam || !m_360scvpHandle) + { + std::map::iterator it; + it = m_streams->find(m_videoIdxInMedia[0]); + if (it == m_streams->end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + VideoStream *vs = (VideoStream*)(it->second); + m_360scvpHandle = vs->Get360SCVPHandle(); + m_360scvpParam = vs->Get360SCVPParam(); + + if (!m_360scvpParam || !m_360scvpHandle) + return OMAF_ERROR_NULL_PTR; + } + + if (m_newSPSNalu) + { + DELETE_ARRAY(m_newSPSNalu->data); + DELETE_MEMORY(m_newSPSNalu); + } + + m_newSPSNalu = new Nalu; + if (!m_newSPSNalu) + return OMAF_ERROR_NULL_PTR; + + m_newSPSNalu->data = new uint8_t[1024];//include start codes + if (!m_newSPSNalu->data) + return OMAF_ERROR_NULL_PTR; + + m_360scvpParam->pInputBitstream = m_origSPSNalu->data; + m_360scvpParam->inputBitstreamLen = m_origSPSNalu->dataSize; + m_360scvpParam->destWidth = m_packedPicWidth; + m_360scvpParam->destHeight = m_packedPicHeight; + m_360scvpParam->pOutputBitstream = m_newSPSNalu->data; + + int32_t ret = I360SCVP_GenerateSPS(m_360scvpParam, m_360scvpHandle); + if (ret) + return OMAF_ERROR_SCVP_OPERATION_FAILED; + + m_newSPSNalu->dataSize = m_360scvpParam->outputBitstreamLen; + m_newSPSNalu->startCodesSize = HEVC_STARTCODES_LEN; + m_newSPSNalu->naluType = HEVC_SPS_NALU_TYPE; + + return ERROR_NONE; +} + +int32_t ExtractorTrackGenerator::GenerateNewPPS(RegionWisePackingGenerator *rwpkGen) +{ + if (!rwpkGen) + return OMAF_ERROR_NULL_PTR; + + TileArrangement *tileArray = rwpkGen->GetMergedTilesArrange(); + if (!tileArray) + return OMAF_ERROR_NULL_PTR; + + if (!m_360scvpParam || !m_360scvpHandle) + { + std::map::iterator it; + it = m_streams->find(m_videoIdxInMedia[0]); + if (it == m_streams->end()) + return OMAF_ERROR_STREAM_NOT_FOUND; + + VideoStream *vs = (VideoStream*)(it->second); + m_360scvpHandle = vs->Get360SCVPHandle(); + m_360scvpParam = vs->Get360SCVPParam(); + + if (!m_360scvpParam || !m_360scvpHandle) + return OMAF_ERROR_NULL_PTR; + } + + if (m_newPPSNalu) + { + DELETE_ARRAY(m_newPPSNalu->data); + DELETE_MEMORY(m_newPPSNalu); + } + + m_newPPSNalu = new Nalu; + if (!m_newPPSNalu) + return OMAF_ERROR_NULL_PTR; + + m_newPPSNalu->data = new uint8_t[1024];//include start codes + if (!m_newPPSNalu->data) + return OMAF_ERROR_NULL_PTR; + + m_360scvpParam->pInputBitstream = m_origPPSNalu->data; //includes start codes + m_360scvpParam->inputBitstreamLen = m_origPPSNalu->dataSize; + + m_360scvpParam->pOutputBitstream = m_newPPSNalu->data; + + int32_t ret = I360SCVP_GeneratePPS(m_360scvpParam, tileArray, m_360scvpHandle); + if (ret) + return OMAF_ERROR_SCVP_OPERATION_FAILED; + + m_newPPSNalu->dataSize = m_360scvpParam->outputBitstreamLen; + m_newPPSNalu->startCodesSize = HEVC_STARTCODES_LEN; + m_newPPSNalu->naluType = HEVC_PPS_NALU_TYPE; + + return ERROR_NONE; +} + +VCD_NS_END diff --git a/src/VROmafPacking/ExtractorTrackGenerator.h b/src/VROmafPacking/ExtractorTrackGenerator.h index 0a55afaf..ff118a61 100644 --- a/src/VROmafPacking/ExtractorTrackGenerator.h +++ b/src/VROmafPacking/ExtractorTrackGenerator.h @@ -26,8 +26,9 @@ //! //! \file: ExtractorTrackGenerator.h -//! \brief: Extractor track generator base class definition -//! \detail: Define the basic operation of extractor track generator. +//! \brief: Extractor track generator class definition +//! \detail: Define the operation of extractor track generator for various +//! input video streams scenarios //! //! Created on April 30, 2019, 6:04 AM //! @@ -36,16 +37,18 @@ #define _EXTRACTORTRACKGENERATOR_H_ #include "VROmafPacking_data.h" -#include "definitions.h" +#include "VROmafPacking_def.h" #include "MediaStream.h" #include "ExtractorTrack.h" #include "RegionWisePackingGenerator.h" +#include "../utils/OmafStructure.h" VCD_NS_BEGIN //! //! \class ExtractorTrackGenerator -//! \brief Define the basic operation of extractor track generator +//! \brief Define the operation of extractor track generator +//! for various input video streams scenarios //! class ExtractorTrackGenerator @@ -58,10 +61,29 @@ class ExtractorTrackGenerator { m_initInfo = NULL; m_streams = NULL; + m_middleViewNum = 0; m_viewportNum = 0; - m_rwpkGen = NULL; + m_fixedPackedPicRes = false; m_newSPSNalu = NULL; m_newPPSNalu = NULL; + m_videoIdxInMedia = NULL; + m_360scvpParam = NULL; + m_360scvpHandle = NULL; + m_origResWidth = 0; + m_origResHeight = 0; + m_origTileInRow = 0; + m_origTileInCol = 0; + m_origTileWidth = 0; + m_origTileHeight = 0; + m_tilesInfo = NULL; + m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; + m_packedPicWidth = 0; + m_packedPicHeight = 0; + m_origVPSNalu = NULL; + m_origSPSNalu = NULL; + m_origPPSNalu = NULL; + m_pitchStep = 0.00; + m_yawStep = 0.00; }; //! @@ -76,24 +98,108 @@ class ExtractorTrackGenerator { m_initInfo = initInfo; m_streams = streams; + m_fixedPackedPicRes = false; + m_middleViewNum = 0; m_viewportNum = 0; - m_rwpkGen = NULL; m_newSPSNalu = NULL; m_newPPSNalu = NULL; + m_videoIdxInMedia = NULL; + m_360scvpParam = NULL; + m_360scvpHandle = NULL; + m_origResWidth = 0; + m_origResHeight = 0; + m_origTileInRow = 0; + m_origTileInCol = 0; + m_origTileWidth = 0; + m_origTileHeight = 0; + m_tilesInfo = NULL; + m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; + m_packedPicWidth = 0; + m_packedPicHeight = 0; + m_origVPSNalu = NULL; + m_origSPSNalu = NULL; + m_origPPSNalu = NULL; + m_pitchStep = 0.00; + m_yawStep = 0.00; + }; + + ExtractorTrackGenerator(const ExtractorTrackGenerator& src) + { + m_initInfo = src.m_initInfo; + m_streams = src.m_streams; + m_fixedPackedPicRes = src.m_fixedPackedPicRes; + m_middleViewNum = src.m_middleViewNum; + m_viewportNum = src.m_viewportNum; + m_newSPSNalu = std::move(src.m_newSPSNalu); + m_newPPSNalu = std::move(src.m_newPPSNalu); + m_videoIdxInMedia = std::move(src.m_videoIdxInMedia); + m_360scvpParam = std::move(src.m_360scvpParam); + m_360scvpHandle = std::move(src.m_360scvpHandle); + m_origResWidth = src.m_origResWidth; + m_origResHeight = src.m_origResHeight; + m_origTileInRow = src.m_origTileInRow; + m_origTileInCol = src.m_origTileInCol; + m_origTileWidth = src.m_origTileWidth; + m_origTileHeight = src.m_origTileHeight; + m_tilesInfo = std::move(src.m_tilesInfo); + m_projType = src.m_projType; + m_packedPicWidth = src.m_packedPicWidth; + m_packedPicHeight = src.m_packedPicHeight; + m_origVPSNalu = std::move(src.m_origVPSNalu); + m_origSPSNalu = std::move(src.m_origSPSNalu); + m_origPPSNalu = std::move(src.m_origPPSNalu); + m_pitchStep = src.m_pitchStep; + m_yawStep = src.m_yawStep; + }; + + ExtractorTrackGenerator& operator=(ExtractorTrackGenerator&& other) + { + m_initInfo = other.m_initInfo; + m_streams = other.m_streams; + m_fixedPackedPicRes = other.m_fixedPackedPicRes; + m_middleViewNum = other.m_middleViewNum; + m_viewportNum = other.m_viewportNum; + m_newSPSNalu = NULL; + m_newPPSNalu = NULL; + m_videoIdxInMedia = NULL; + m_360scvpParam = NULL; + m_360scvpHandle = NULL; + m_origResWidth = other.m_origResWidth; + m_origResHeight = other.m_origResHeight; + m_origTileInRow = other.m_origTileInRow; + m_origTileInCol = other.m_origTileInCol; + m_origTileWidth = other.m_origTileWidth; + m_origTileHeight = other.m_origTileHeight; + m_tilesInfo = NULL; + m_projType = other.m_projType; + m_packedPicWidth = other.m_packedPicWidth; + m_packedPicHeight = other.m_packedPicHeight; + m_origVPSNalu = NULL; + m_origSPSNalu = NULL; + m_origPPSNalu = NULL; + m_pitchStep = other.m_pitchStep; + m_yawStep = other.m_yawStep; + + return *this; }; //! //! \brief Destructor //! - virtual ~ExtractorTrackGenerator() {}; + ~ExtractorTrackGenerator(); //! //! \brief Initialize the extractor track generator + //! for various input video streams scenarios //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t Initialize() = 0; + int32_t Initialize(); + + int32_t ConvertTilesIdx( + uint16_t tilesNum, + TileDef *tilesInViewport); //! //! \brief Generate all extractor tracks @@ -107,7 +213,9 @@ class ExtractorTrackGenerator //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t GenerateExtractorTracks(std::map& extractorTrackMap, std::map *streams) = 0; + int32_t GenerateExtractorTracks( + std::map& extractorTrackMap, + std::map *streams); //! //! \brief Get the new SPS nalu for tiles merged bitstream @@ -126,6 +234,11 @@ class ExtractorTrackGenerator Nalu* GetNewPPS() { return m_newPPSNalu; }; private: + + int32_t SelectTilesInView( + float yaw, float pitch, + uint8_t tileInRow, uint8_t tileInCol); + //! //! \brief Calculate the total viewport number //! according to the initial information @@ -133,41 +246,52 @@ class ExtractorTrackGenerator //! \return uint16_t //! the total viewport number //! - virtual uint16_t CalculateViewportNum() = 0; + int32_t CalculateViewportNum(); + int32_t RefineTilesSelection(); //! //! \brief Fill the region wise packing information //! for the specified viewport //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] dstRwpk + //! \param [in] rwpkGen + //! the pointer to the region wise packing generator + //! \param [in] tilesInViewport + //! the pointer to all tiles information in packed + //! sub-picture + //! \param [out] dstRwpk //! pointer to the region wise packing information for the //! specified viewport generated according to srcRwpk and - //! detailed tiles merging strategy + //! tiles merging strategy //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t FillDstRegionWisePacking(uint8_t viewportIdx, RegionWisePacking *dstRwpk) = 0; + int32_t FillDstRegionWisePacking( + RegionWisePackingGenerator *rwpkGen, + TileDef *tilesInViewport, + RegionWisePacking *dstRwpk); //! //! \brief Fill the tiles merging direction information //! for the specified viewport //! - //! \param [in] viewportIdx - //! the index of the specified viewport + //! \param [in] rwpkGen + //! the pointer to the region wise packing generator + //! \param [in] tilesInViewport + //! the pointer to all tiles information in packed + //! sub-picture //! \param [out] tilesMergeDir - //! pointer to the tiles merging direction information for the - //! specified viewport generated according to the detailed + //! pointer to the tiles merging direction information + //! for the specified viewport generated according to //! tiles merging strategy //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t FillTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir) = 0; + int32_t FillTilesMergeDirection( + RegionWisePackingGenerator *rwpkGen, + TileDef *tilesInViewport, + TilesMergeDirectionInCol *tilesMergeDir); //! //! \brief Fill the content coverage information @@ -175,51 +299,73 @@ class ExtractorTrackGenerator //! //! \param [in] viewportIdx //! the index of the specified viewport - //! \param [in] dstCovi + //! \param [out] dstCovi //! pointer to the content coverage information for the //! specified viewport generated according to srcCovi and - //! detailed tiles merging strategy + //! tiles merging strategy //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t FillDstContentCoverage(uint8_t viewportIdx, ContentCoverage *dstCovi) = 0; + int32_t FillDstContentCoverage(uint16_t viewportIdx, ContentCoverage *dstCovi); //! //! \brief Check the validation of initial information - //! input by library interface, like whether the - //! TilesMergingType is correct compared to actual - //! streams information, meanwhile fill the lacked + //! input by library interface, meanwhile fill the lacked //! information according to actual streams information //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t CheckAndFillInitInfo() = 0; + int32_t CheckAndFillInitInfo(); //! - //! \brief Generate the new SPS nalu for tiles merged bitstream + //! \brief Generate the new SPS for tiles merged bitstream //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t GenerateNewSPS() = 0; + int32_t GenerateNewSPS(); //! - //! \brief Generate the new PPS nalu for tiles merged bitstream + //! \brief Generate the new PPS for tiles merged bitstream //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t GenerateNewPPS() = 0; + int32_t GenerateNewPPS(RegionWisePackingGenerator *rwpkGen); -protected: +private: InitialInfo *m_initInfo; //!< initial information input by library interface std::map *m_streams; //!< media streams map set up in OmafPackage + uint16_t m_middleViewNum; uint16_t m_viewportNum; //!< viewport number calculated according to initial information - RegionWisePackingGenerator *m_rwpkGen; //!< pointer to region wise packing generator + std::map> m_middleSelection; + std::map> m_tilesSelection; //!< all tiles selection results for all viewports (yaw from -180 to 180 and pitch from -90 to 90), that is std::map> + std::map m_middleCCInfo; + std::map m_viewportCCInfo; + std::map m_rwpkGenMap; //!< all RWPK generators according to different tiles selection layout, that is std::map + bool m_fixedPackedPicRes; //!< whether extractor track packed sub-picture needs the fixed resolution Nalu *m_newSPSNalu; //!< pointer to the new SPS nalu Nalu *m_newPPSNalu; //!< pointer to the new PPS nalu + uint8_t *m_videoIdxInMedia; //!< pointer to index of video streams in media streams + param_360SCVP *m_360scvpParam; //!< 360SCVP library initial parameter + void *m_360scvpHandle; //!< 360SCVP library handle + uint16_t m_origResWidth; //!< frame width of high resolution video stream + uint16_t m_origResHeight; //!< frame height of high resolution video stream + uint8_t m_origTileInRow; //!< the number of high resolution tiles in one row in original picture + uint8_t m_origTileInCol; //!< the number of high resolution tiles in one column in original picture + uint16_t m_origTileWidth; //!< the width of high resolution tile + uint16_t m_origTileHeight; //!< the height of high resolution tile + TileInfo *m_tilesInfo; //!< pointer to tile information of all tiles in high resolution video stream + VCD::OMAF::ProjectionFormat m_projType; //!< the projection type + uint32_t m_packedPicWidth; //!< the width of tiles merged picture + uint32_t m_packedPicHeight; //!< the height of tiles merged picture + Nalu *m_origVPSNalu; //!< the pointer to original VPS nalu of high resolution video stream + Nalu *m_origSPSNalu; //!< the pointer to original SPS nalu of high resolution video stream + Nalu *m_origPPSNalu; //!< the pointer to original PPS nalu of high resolution video stream + float m_pitchStep; //!< the step of pitch angle when going through all viewports + float m_yawStep; //!< the step of yaw angle when going through all viewports }; VCD_NS_END; diff --git a/src/VROmafPacking/ExtractorTrackManager.cpp b/src/VROmafPacking/ExtractorTrackManager.cpp index 8d2c3b21..c3b8df78 100644 --- a/src/VROmafPacking/ExtractorTrackManager.cpp +++ b/src/VROmafPacking/ExtractorTrackManager.cpp @@ -49,11 +49,27 @@ ExtractorTrackManager::ExtractorTrackManager(InitialInfo *initInfo) m_streams = NULL; } +ExtractorTrackManager::ExtractorTrackManager(const ExtractorTrackManager& src) +{ + m_extractorTrackGen = std::move(src.m_extractorTrackGen); + m_initInfo = std::move(src.m_initInfo); + m_streams = std::move(src.m_streams); +} + +ExtractorTrackManager& ExtractorTrackManager::operator=(ExtractorTrackManager&& other) +{ + m_extractorTrackGen = std::move(other.m_extractorTrackGen); + m_initInfo = std::move(other.m_initInfo); + m_streams = std::move(other.m_streams); + + return *this; +} + ExtractorTrackManager::~ExtractorTrackManager() { DELETE_MEMORY(m_extractorTrackGen); - std::map::iterator it; + std::map::iterator it; for (it = m_extractorTracks.begin(); it != m_extractorTracks.end();) { DELETE_MEMORY(it->second); @@ -79,31 +95,29 @@ int32_t ExtractorTrackManager::Initialize(std::map *media m_streams = mediaStreams; - if (m_initInfo->tilesMergingType == OnlyOneVideo) + if (m_initInfo->packingPluginName) { - m_extractorTrackGen = new OneVideoExtractorTrackGenerator(m_initInfo, m_streams); + OMAF_LOG(LOG_INFO, "Appoint plugin %s for extractor track generation !\n", (m_initInfo->packingPluginName)); + m_extractorTrackGen = new ExtractorTrackGenerator(m_initInfo, m_streams); if (!m_extractorTrackGen) + { + OMAF_LOG(LOG_ERROR, "Failed to create extractor track generator !\n"); return OMAF_ERROR_NULL_PTR; - } - else if (m_initInfo->tilesMergingType == TwoResTilesMerging) - { - m_extractorTrackGen = new TwoResExtractorTrackGenerator(m_initInfo, m_streams); + } - if (!m_extractorTrackGen) - return OMAF_ERROR_NULL_PTR; + int32_t ret = m_extractorTrackGen->Initialize(); + if (ret) + return ret; - } else { - return OMAF_ERROR_UNDEFINED_OPERATION; //after adding other tiles merging strategy than TwoResTilesMerging, change here. + ret = AddExtractorTracks(); + if (ret) + return ret; + } + else + { + OMAF_LOG(LOG_INFO, "No plugin appointed, so extractor track will not be generated !\n"); } - - int32_t ret = m_extractorTrackGen->Initialize(); - if (ret) - return ret; - - ret = AddExtractorTracks(); - if (ret) - return ret; return ERROR_NONE; } diff --git a/src/VROmafPacking/ExtractorTrackManager.h b/src/VROmafPacking/ExtractorTrackManager.h index beb8ebcc..e0d41062 100644 --- a/src/VROmafPacking/ExtractorTrackManager.h +++ b/src/VROmafPacking/ExtractorTrackManager.h @@ -36,12 +36,11 @@ #define _EXTRACTORTRACKMANAGER_H_ #include "VROmafPacking_data.h" -#include "definitions.h" +#include "VROmafPacking_def.h" #include "MediaStream.h" -#include "VideoStream.h" +//#include "VideoStream.h" #include "ExtractorTrack.h" -#include "OneVideoExtractorTrackGenerator.h" -#include "TwoResExtractorTrackGenerator.h" +#include "ExtractorTrackGenerator.h" VCD_NS_BEGIN @@ -66,6 +65,10 @@ class ExtractorTrackManager //! ExtractorTrackManager(InitialInfo *initInfo); + ExtractorTrackManager(const ExtractorTrackManager& src); + + ExtractorTrackManager& operator=(ExtractorTrackManager&& other); + //! //! \brief Destructor //! @@ -88,7 +91,7 @@ class ExtractorTrackManager //! \return std::map* //! the pointer to the extractor tracks map //! - std::map* GetAllExtractorTracks() + std::map* GetAllExtractorTracks() { return &m_extractorTracks; } @@ -102,7 +105,7 @@ class ExtractorTrackManager int32_t AddExtractorTracks(); private: std::map *m_streams; //!< media streams map set up in OmafPackage - std::map m_extractorTracks; //!< extractor tracks map + std::map m_extractorTracks; //!< extractor tracks map ExtractorTrackGenerator *m_extractorTrackGen; //!< extractor track generator to generate all extractor tracks InitialInfo *m_initInfo; //!< the initial information input by library interface }; diff --git a/src/VROmafPacking/MpdGenerator.cpp b/src/VROmafPacking/MpdGenerator.cpp index b77f7824..8ef0244c 100644 --- a/src/VROmafPacking/MpdGenerator.cpp +++ b/src/VROmafPacking/MpdGenerator.cpp @@ -36,6 +36,7 @@ #include #include #include "MpdGenerator.h" +#include "VideoStreamPluginAPI.h" VCD_NS_BEGIN @@ -46,13 +47,14 @@ MpdGenerator::MpdGenerator() m_segInfo = NULL; m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; m_miniUpdatePeriod = 0; - memset(m_availableStartTime, 0, 1024); + memset_s(m_availableStartTime, 1024, 0); m_publishTime = NULL; m_presentationDur = NULL; m_timeScale = 0; m_xmlDoc = NULL; m_frameRate.num = 0; m_frameRate.den = 0; + m_vsNum = 0; } MpdGenerator::MpdGenerator( @@ -60,19 +62,57 @@ MpdGenerator::MpdGenerator( std::map *extractorSegCtxs, SegmentationInfo *segInfo, VCD::OMAF::ProjectionFormat projType, - Rational frameRate) + Rational frameRate, + uint8_t videoNum) { m_streamSegCtx = streamsSegCtxs; m_extractorSegCtx = extractorSegCtxs; m_segInfo = segInfo; m_projType = projType; m_miniUpdatePeriod = 0; - memset(m_availableStartTime, 0, 1024); + memset_s(m_availableStartTime, 1024, 0); m_publishTime = NULL; m_presentationDur = NULL; m_frameRate = frameRate; m_timeScale = 0; m_xmlDoc = NULL; + m_vsNum = videoNum; +} + +MpdGenerator::MpdGenerator(const MpdGenerator& src) +{ + m_streamSegCtx = std::move(src.m_streamSegCtx); + m_extractorSegCtx = std::move(src.m_extractorSegCtx); + m_segInfo = std::move(src.m_segInfo); + m_projType = src.m_projType; + m_miniUpdatePeriod = src.m_miniUpdatePeriod; + memset_s(m_availableStartTime, 1024, 0); + m_publishTime = std::move(src.m_publishTime); + m_presentationDur = std::move(src.m_presentationDur); + m_timeScale = src.m_timeScale; + m_xmlDoc = std::move(src.m_xmlDoc); + m_frameRate.num = src.m_frameRate.num; + m_frameRate.den = src.m_frameRate.den; + m_vsNum = src.m_vsNum; +} + +MpdGenerator& MpdGenerator::operator=(MpdGenerator&& other) +{ + m_streamSegCtx = std::move(other.m_streamSegCtx); + m_extractorSegCtx = std::move(other.m_extractorSegCtx); + m_segInfo = std::move(other.m_segInfo); + m_projType = other.m_projType; + m_miniUpdatePeriod = other.m_miniUpdatePeriod; + memset_s(m_availableStartTime, 1024, 0); + m_publishTime = std::move(other.m_publishTime); + m_presentationDur = std::move(other.m_presentationDur); + m_timeScale = other.m_timeScale; + m_xmlDoc = std::move(other.m_xmlDoc); + m_frameRate.num = other.m_frameRate.num; + m_frameRate.den = other.m_frameRate.den; + m_vsNum = other.m_vsNum; + + return *this; } MpdGenerator::~MpdGenerator() @@ -97,7 +137,7 @@ int32_t MpdGenerator::Initialize() { if (chmod(&(m_segInfo->dirName[0]), modeFile) != 0) { - LOG(ERROR) << "Failed to change write mode for folder " << m_segInfo->dirName << " ! " << std::endl; + OMAF_LOG(LOG_ERROR, "Failed to change write mode for folder %s\n", m_segInfo->dirName); return OMAF_ERROR_CHANGE_FOLDERMODE_FAILED; } } @@ -106,7 +146,7 @@ int32_t MpdGenerator::Initialize() { if (mkdir(&(m_segInfo->dirName[0]), modeFile) != 0) { - LOG(ERROR) << "Failed to create folder " << m_segInfo->dirName << " ! " << std::endl; + OMAF_LOG(LOG_ERROR, "Failed to create folder %s\n", m_segInfo->dirName); return OMAF_ERROR_CREATE_FOLDER_FAILED; } } @@ -145,10 +185,10 @@ int32_t MpdGenerator::WriteTileTrackAS(XMLElement *periodEle, TrackSegmentCtx *p TrackSegmentCtx trackSegCtx = *pTrackSegCtx; char string[1024]; - memset(string, 0, 1024); + memset_s(string, 1024, 0); XMLElement *asEle = m_xmlDoc->NewElement(ADAPTATIONSET); - asEle->SetAttribute(INDEX, trackSegCtx.trackIdx.get()); + asEle->SetAttribute(INDEX, trackSegCtx.trackIdx.GetIndex()); asEle->SetAttribute(MIMETYPE, MIMETYPE_VALUE); //? asEle->SetAttribute(CODECS, CODECS_VALUE); asEle->SetAttribute(MAXWIDTH, trackSegCtx.tileInfo->tileWidth); @@ -166,8 +206,17 @@ int32_t MpdGenerator::WriteTileTrackAS(XMLElement *periodEle, TrackSegmentCtx *p XMLElement *supplementalEle = m_xmlDoc->NewElement(SUPPLEMENTALPROPERTY); supplementalEle->SetAttribute(SCHEMEIDURI, SCHEMEIDURI_SRD); - memset(string, 0, 1024); - snprintf(string, 1024, "1,%d,%d,%d,%d", trackSegCtx.tileInfo->horizontalPos, trackSegCtx.tileInfo->verticalPos, trackSegCtx.tileInfo->tileWidth, trackSegCtx.tileInfo->tileHeight); + memset_s(string, 1024, 0); + if ((m_projType == VCD::OMAF::ProjectionFormat::PF_ERP) || + (m_projType == VCD::OMAF::ProjectionFormat::PF_PLANAR)) + { + snprintf(string, 1024, "1,%d,%d,%d,%d", trackSegCtx.tileInfo->horizontalPos, trackSegCtx.tileInfo->verticalPos, trackSegCtx.tileInfo->tileWidth, trackSegCtx.tileInfo->tileHeight); + } + else if (m_projType == VCD::OMAF::ProjectionFormat::PF_CUBEMAP) + { + snprintf(string, 1024, "1,%d,%d,%d,%d", trackSegCtx.tileInfo->defaultHorPos, trackSegCtx.tileInfo->defaultVerPos, trackSegCtx.tileInfo->tileWidth, trackSegCtx.tileInfo->tileHeight); + } + supplementalEle->SetAttribute(COMMON_VALUE, string); asEle->InsertEndChild(supplementalEle); @@ -177,9 +226,9 @@ int32_t MpdGenerator::WriteTileTrackAS(XMLElement *periodEle, TrackSegmentCtx *p asEle->InsertEndChild(essentialEle1); XMLElement *representationEle = m_xmlDoc->NewElement(REPRESENTATION); - memset(string, 0, 1024); - snprintf(string, 1024, "%s_track%d", m_segInfo->outName, trackSegCtx.trackIdx.get()); - representationEle->SetAttribute(INDEX, string);//trackSegCtx.trackIdx.get()); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); + representationEle->SetAttribute(INDEX, string);//trackSegCtx.trackIdx.GetIndex()); representationEle->SetAttribute(QUALITYRANKING, trackSegCtx.qualityRanking); representationEle->SetAttribute(BANDWIDTH, trackSegCtx.codedMeta.bitrate.avgBitrate); representationEle->SetAttribute(WIDTH, trackSegCtx.tileInfo->tileWidth); @@ -190,15 +239,15 @@ int32_t MpdGenerator::WriteTileTrackAS(XMLElement *periodEle, TrackSegmentCtx *p representationEle->SetAttribute(STARTWITHSAP, 1); asEle->InsertEndChild(representationEle); - memset(string, 0, 1024); - snprintf(string, 1024, "%s_track%d.$Number$.mp4", m_segInfo->outName, trackSegCtx.trackIdx.get()); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d.$Number$.mp4", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); XMLElement *sgtTpeEle = m_xmlDoc->NewElement(SEGMENTTEMPLATE); sgtTpeEle->SetAttribute(MEDIA, string); - memset(string, 0, 1024); - snprintf(string, 1024, "%s_track%d.init.mp4", m_segInfo->outName, trackSegCtx.trackIdx.get()); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d.init.mp4", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); sgtTpeEle->SetAttribute(INITIALIZATION, string); sgtTpeEle->SetAttribute(DURATION, m_segInfo->segDuration * m_timeScale); - sgtTpeEle->SetAttribute(STARTNUMBER, 0); + sgtTpeEle->SetAttribute(STARTNUMBER, 1); sgtTpeEle->SetAttribute(TIMESCALE, m_timeScale); representationEle->InsertEndChild(sgtTpeEle); @@ -210,10 +259,10 @@ int32_t MpdGenerator::WriteExtractorTrackAS(XMLElement *periodEle, TrackSegmentC TrackSegmentCtx trackSegCtx = *pTrackSegCtx; char string[1024]; - memset(string, 0, 1024); + memset_s(string, 1024, 0); XMLElement *asEle = m_xmlDoc->NewElement(ADAPTATIONSET); - asEle->SetAttribute(INDEX, trackSegCtx.trackIdx.get()); + asEle->SetAttribute(INDEX, trackSegCtx.trackIdx.GetIndex()); asEle->SetAttribute(MIMETYPE, MIMETYPE_VALUE); //? asEle->SetAttribute(CODECS, CODECS_VALUE_EXTRACTORTRACK); asEle->SetAttribute(MAXWIDTH, trackSegCtx.codedMeta.width); @@ -265,16 +314,16 @@ int32_t MpdGenerator::WriteExtractorTrackAS(XMLElement *periodEle, TrackSegmentC shpQualityEle->InsertEndChild(qualityEle); } - memset(string, 0, 1024); - snprintf(string, 1024, "ext%d,%d ", trackSegCtx.trackIdx.get(), trackSegCtx.trackIdx.get()); - std::list::iterator itRefTrack; + memset_s(string, 1024, 0); + snprintf(string, 1024, "ext%d,%d ", trackSegCtx.trackIdx.GetIndex(), trackSegCtx.trackIdx.GetIndex()); + std::list::iterator itRefTrack; for (itRefTrack = trackSegCtx.refTrackIdxs.begin(); itRefTrack != trackSegCtx.refTrackIdxs.end(); itRefTrack++) { char string1[16]; - memset(string1, 0, 16); - snprintf(string1, 16, "%d ", (*itRefTrack).get()); + memset_s(string1, 16, 0); + snprintf(string1, 16, "%d ", (*itRefTrack).GetIndex()); strncat(string, string1, 16); } @@ -285,26 +334,72 @@ int32_t MpdGenerator::WriteExtractorTrackAS(XMLElement *periodEle, TrackSegmentC asEle->InsertEndChild(supplementalEle1); XMLElement *representationEle = m_xmlDoc->NewElement(REPRESENTATION); - memset(string, 0, 1024); - snprintf(string, 1024, "%s_track%d", m_segInfo->outName, trackSegCtx.trackIdx.get()); - representationEle->SetAttribute(INDEX, string);//trackSegCtx.trackIdx.get()); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); + representationEle->SetAttribute(INDEX, string);//trackSegCtx.trackIdx.GetIndex()); //representationEle->SetAttribute(BANDWIDTH, 19502); representationEle->SetAttribute(WIDTH, trackSegCtx.codedMeta.width); representationEle->SetAttribute(HEIGHT, trackSegCtx.codedMeta.height); - memset(string, 0, 1024); + memset_s(string, 1024, 0); snprintf(string, 1024, "%ld/%ld", m_frameRate.num, m_frameRate.den); representationEle->SetAttribute(FRAMERATE, string); asEle->InsertEndChild(representationEle); XMLElement *sgtTpeEle = m_xmlDoc->NewElement(SEGMENTTEMPLATE); - memset(string, 0, 1024); - snprintf(string, 1024, "%s_track%d.$Number$.mp4", m_segInfo->outName, trackSegCtx.trackIdx.get()); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d.$Number$.mp4", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); + sgtTpeEle->SetAttribute(MEDIA, string); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d.init.mp4", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); + sgtTpeEle->SetAttribute(INITIALIZATION, string); + sgtTpeEle->SetAttribute(DURATION, m_segInfo->segDuration * m_timeScale); + sgtTpeEle->SetAttribute(STARTNUMBER, 1); + sgtTpeEle->SetAttribute(TIMESCALE, m_timeScale); + representationEle->InsertEndChild(sgtTpeEle); + + return ERROR_NONE; +} + +int32_t MpdGenerator::WriteAudioTrackAS(XMLElement *periodEle, TrackSegmentCtx *pTrackSegCtx) +{ + TrackSegmentCtx trackSegCtx = *pTrackSegCtx; + + char string[1024]; + memset_s(string, 1024, 0); + + XMLElement *asEle = m_xmlDoc->NewElement(ADAPTATIONSET); + asEle->SetAttribute(INDEX, trackSegCtx.trackIdx.GetIndex()); + asEle->SetAttribute(MIMETYPE, MIMETYPE_AUDIO);//MIMETYPE_VALUE); //? + asEle->SetAttribute(CODECS, CODECS_AUDIO);//CODECS_VALUE); + asEle->SetAttribute(AUDIOSAMPLINGRATE, trackSegCtx.codedMeta.samplingFreq); + + asEle->SetAttribute(SEGMENTALIGNMENT, 1); + asEle->SetAttribute(SUBSEGMENTALIGNMENT, 1); + periodEle->InsertEndChild(asEle); + + XMLElement *representationEle = m_xmlDoc->NewElement(REPRESENTATION); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); + representationEle->SetAttribute(INDEX, string);//trackSegCtx.trackIdx.GetIndex()); + representationEle->SetAttribute(BANDWIDTH, trackSegCtx.codedMeta.bitrate.avgBitrate); + representationEle->SetAttribute(AUDIOSAMPLINGRATE, trackSegCtx.codedMeta.samplingFreq); + representationEle->SetAttribute(STARTWITHSAP, 1); + asEle->InsertEndChild(representationEle); + + XMLElement *audioChlCfgEle = m_xmlDoc->NewElement(AUDIOCHANNELCONFIGURATION); + audioChlCfgEle->SetAttribute(SCHEMEIDURI, SCHEMEIDURI_AUDIO); + audioChlCfgEle->SetAttribute(COMMON_VALUE, trackSegCtx.codedMeta.channelCfg); + representationEle->InsertEndChild(audioChlCfgEle); + + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d.$Number$.mp4", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); + XMLElement *sgtTpeEle = m_xmlDoc->NewElement(SEGMENTTEMPLATE); sgtTpeEle->SetAttribute(MEDIA, string); - memset(string, 0, 1024); - snprintf(string, 1024, "%s_track%d.init.mp4", m_segInfo->outName, trackSegCtx.trackIdx.get()); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%s_track%d.init.mp4", m_segInfo->outName, trackSegCtx.trackIdx.GetIndex()); sgtTpeEle->SetAttribute(INITIALIZATION, string); sgtTpeEle->SetAttribute(DURATION, m_segInfo->segDuration * m_timeScale); - sgtTpeEle->SetAttribute(STARTNUMBER, 0); + sgtTpeEle->SetAttribute(STARTNUMBER, 1); sgtTpeEle->SetAttribute(TIMESCALE, m_timeScale); representationEle->InsertEndChild(sgtTpeEle); @@ -327,11 +422,11 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) mpdEle->SetAttribute(XSI_SCHEMALOCATION, XSI_SCHEMALOCATION_VALUE); char string[1024]; - memset(string, 0, 1024); + memset_s(string, 1024, 0); snprintf(string, 1024, "PT%fS", (double)m_segInfo->segDuration); mpdEle->SetAttribute(MINBUFFERTIME, string); - memset(string, 0, 1024); + memset_s(string, 1024, 0); snprintf(string, 1024, "PT%fS", (double)m_segInfo->segDuration); mpdEle->SetAttribute(MAXSEGMENTDURATION, string); @@ -365,8 +460,9 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) return OMAF_ERROR_INVALID_TIME; char forCmp[1024]; - memset(forCmp, 0, 1024); - int32_t cmpRet = memcmp(m_availableStartTime, forCmp, 1024); + memset_s(forCmp, 1024, 0); + int32_t cmpRet = 0; + memcmp_s(m_availableStartTime, 1024, forCmp, 1024, &cmpRet); if (0 == cmpRet) { snprintf(m_availableStartTime, 1024, "%d-%d-%dT%d:%d:%dZ", 1900 + t->tm_year, @@ -379,20 +475,30 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) if (!m_publishTime) return OMAF_ERROR_NULL_PTR; } - memset(m_publishTime, 0, 1024); + memset_s(m_publishTime, 1024, 0); snprintf(m_publishTime, 1024, "%d-%02d-%02dT%02d:%02d:%02dZ", 1900+t->tm_year, t->tm_mon+1, t->tm_mday, t->tm_hour, t->tm_min, t->tm_sec); mpdEle->SetAttribute(AVAILABILITYSTARTTIME, m_availableStartTime); mpdEle->SetAttribute(TIMESHIFTBUFFERDEPTH, "PT5M"); - memset(string, 0, 1024); + memset_s(string, 1024, 0); snprintf(string, 1024, "PT%dS", m_miniUpdatePeriod); mpdEle->SetAttribute(MINIMUMUPDATEPERIOD, string); mpdEle->SetAttribute(PUBLISHTIME, m_publishTime); } else { - uint32_t totalDur = (uint32_t)(totalFramesNum * 1000 / (double)(m_frameRate.num / m_frameRate.den) + 0.5); + uint32_t fps1000 = (uint32_t) ((double)m_frameRate.num / m_frameRate.den * 1000); + uint32_t correctedfps = 0; + if (fps1000 == 29970) + correctedfps = 30000; + else if (fps1000 == 23976) + correctedfps = 24000; + else if (fps1000 == 59940) + correctedfps = 60000; + else + correctedfps = fps1000; + uint32_t totalDur = (uint32_t)((double)totalFramesNum * 1000 / ((double)correctedfps / 1000)); uint32_t hour = totalDur / 3600000; totalDur = totalDur % 3600000; uint32_t minute = totalDur / 60000; @@ -406,7 +512,7 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) if (!m_presentationDur) return OMAF_ERROR_NULL_PTR; } - memset(m_presentationDur, 0, 1024); + memset_s(m_presentationDur, 1024, 0); snprintf(m_presentationDur, 1024, "PT%02dH%02dM%02d.%03dS", hour, minute, second, msecond); @@ -441,22 +547,33 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) } mpdEle->InsertEndChild(periodEle); - //xmlDoc.InsertEndChild(periodEle); if (m_segInfo->hasMainAS) { - std::map::iterator it = m_extractorSegCtx->begin(); - if (it == m_extractorSegCtx->end()) - return OMAF_ERROR_EXTRACTORTRACK_NOT_FOUND; - - ExtractorTrack *extractorTrack = it->first; - std::list *picResList = extractorTrack->GetPicRes(); - std::list::iterator it1 = picResList->begin(); - if (it1 == picResList->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; + uint16_t maxWidth = 0; + uint16_t maxHeight = 0; + uint64_t maxRes = 0; + std::map::iterator it = m_streamSegCtx->begin(); + for ( ; it != m_streamSegCtx->end(); it++) + { + MediaStream *stream = it->first; + if (stream->GetMediaType() == VIDEOTYPE) + { + VideoStream *vs = (VideoStream*)stream; + uint16_t width = vs->GetSrcWidth(); + uint16_t height = vs->GetSrcHeight(); + uint64_t resolution = (uint64_t)(width) * (uint64_t)(height); + if (resolution > maxRes) + { + maxRes = resolution; + maxWidth = width; + maxHeight = height; + } + } + } - uint16_t mainWidth = it1->width; - uint16_t mainHeight = it1->height; + uint16_t mainWidth = maxWidth; + uint16_t mainHeight = maxHeight; XMLElement *asEle = m_xmlDoc->NewElement(ADAPTATIONSET); asEle->SetAttribute(INDEX, 0); //? @@ -473,30 +590,107 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) viewportEle->SetAttribute(COMMON_VALUE, "vpl"); asEle->InsertEndChild(viewportEle); - XMLElement *essentialEle1 = m_xmlDoc->NewElement(ESSENTIALPROPERTY); - essentialEle1->SetAttribute(SCHEMEIDURI, SCHEMEIDURI_SRD); - essentialEle1->SetAttribute(COMMON_VALUE, "1,0,0,0,0"); - asEle->InsertEndChild(essentialEle1); - - XMLElement *repEle = m_xmlDoc->NewElement(REPRESENTATION); - repEle->SetAttribute(INDEX, 0); - repEle->SetAttribute(MIMETYPE, MIMETYPE_VALUE); //? - repEle->SetAttribute(CODECS, CODECS_VALUE); - repEle->SetAttribute(WIDTH, mainWidth); - repEle->SetAttribute(HEIGHT, mainHeight); - memset(string, 0, 1024); - snprintf(string, 1024, "%ld/%ld", m_frameRate.num, m_frameRate.den); - repEle->SetAttribute(FRAMERATE, string); - repEle->SetAttribute(SAR, "1:1"); - repEle->SetAttribute(STARTWITHSAP, 1); - asEle->InsertEndChild(repEle); - - XMLElement *segTleEle1 = m_xmlDoc->NewElement(SEGMENTTEMPLATE); - segTleEle1->SetAttribute(TIMESCALE, m_timeScale); - segTleEle1->SetAttribute(DURATION, m_segInfo->segDuration * m_timeScale); - segTleEle1->SetAttribute(MEDIA, "track0_$Number$.m4s"); - segTleEle1->SetAttribute(STARTNUMBER, 0); - repEle->InsertEndChild(segTleEle1); + if (m_projType != VCD::OMAF::ProjectionFormat::PF_PLANAR) + { + XMLElement *essentialEle1 = m_xmlDoc->NewElement(ESSENTIALPROPERTY); + essentialEle1->SetAttribute(SCHEMEIDURI, SCHEMEIDURI_SRD); + essentialEle1->SetAttribute(COMMON_VALUE, "1,0,0,0,0"); + asEle->InsertEndChild(essentialEle1); + + XMLElement *repEle = m_xmlDoc->NewElement(REPRESENTATION); + repEle->SetAttribute(INDEX, 0); + repEle->SetAttribute(MIMETYPE, MIMETYPE_VALUE); //? + repEle->SetAttribute(CODECS, CODECS_VALUE); + repEle->SetAttribute(WIDTH, mainWidth); + repEle->SetAttribute(HEIGHT, mainHeight); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%ld/%ld", m_frameRate.num, m_frameRate.den); + repEle->SetAttribute(FRAMERATE, string); + repEle->SetAttribute(SAR, "1:1"); + repEle->SetAttribute(STARTWITHSAP, 1); + asEle->InsertEndChild(repEle); + + XMLElement *segTleEle1 = m_xmlDoc->NewElement(SEGMENTTEMPLATE); + segTleEle1->SetAttribute(TIMESCALE, m_timeScale); + segTleEle1->SetAttribute(DURATION, m_segInfo->segDuration * m_timeScale); + segTleEle1->SetAttribute(MEDIA, "track0_$Number$.m4s"); + segTleEle1->SetAttribute(STARTNUMBER, 1); + repEle->InsertEndChild(segTleEle1); + } + else + { + XMLElement *essentialEle1 = m_xmlDoc->NewElement(ESSENTIALPROPERTY); + essentialEle1->SetAttribute(SCHEMEIDURI, SCHEMEIDURI_SRD); + essentialEle1->SetAttribute(COMMON_VALUE, "1,0,0,0,0"); + asEle->InsertEndChild(essentialEle1); + + XMLElement *supplementalEle = m_xmlDoc->NewElement(SUPPLEMENTALPROPERTY); + supplementalEle->SetAttribute(SCHEMEIDURI, SCHEMEIDURI_2DQR); + //supplementalEle->SetAttribute(COMMON_VALUE, "1,0,0,0,0"); + asEle->InsertEndChild(supplementalEle); + + XMLElement *twoDQualityEle = m_xmlDoc->NewElement(OMAF_TWOD_REGIONQUALITY); + //shpQualityEle->SetAttribute(SHAPE_TYPE, trackSegCtx.codedMeta.qualityRankCoverage.get().shapeType); + //shpQualityEle->SetAttribute(REMAINING_AREA_FLAG, trackSegCtx.codedMeta.qualityRankCoverage.get().remainingArea); + //shpQualityEle->SetAttribute(QUALITY_RANKING_LOCAL_FLAG, false); + //shpQualityEle->SetAttribute(QUALITY_TYPE, trackSegCtx.codedMeta.qualityRankCoverage.get().qualityType); + //shpQualityEle->SetAttribute(DEFAULT_VIEW_IDC, 0); + supplementalEle->InsertEndChild(twoDQualityEle); + + uint32_t currQualityRanking = 1; + for (currQualityRanking = 1; currQualityRanking <= m_vsNum; currQualityRanking++) + { + std::map::iterator itStr; + for (itStr = m_streamSegCtx->begin(); itStr != m_streamSegCtx->end(); itStr++) + { + MediaStream *stream = itStr->first; + if (stream && (stream->GetMediaType() == VIDEOTYPE)) + { + VideoStream *vs = (VideoStream*)stream; + TrackSegmentCtx *segCtx = itStr->second; + if (segCtx && (segCtx->qualityRanking == currQualityRanking)) + { + uint16_t width = vs->GetSrcWidth(); + uint16_t height = vs->GetSrcHeight(); + uint8_t tileRows = vs->GetTileInCol(); + uint8_t tileCols = vs->GetTileInRow(); + uint16_t tileWidth = width / tileCols; + uint16_t tileHeight = height / tileRows; + + XMLElement *qualityEle = m_xmlDoc->NewElement(OMAF_QUALITY_INFO); + qualityEle->SetAttribute(QUALITY_RANKING, currQualityRanking); + qualityEle->SetAttribute(ORIGWIDTH, width); + qualityEle->SetAttribute(ORIGHEIGHT, height); + qualityEle->SetAttribute(REGIONWIDTH, tileWidth); + qualityEle->SetAttribute(REGIONHEIGHT, tileHeight); + twoDQualityEle->InsertEndChild(qualityEle); + + break; + } + } + } + } + + XMLElement *repEle = m_xmlDoc->NewElement(REPRESENTATION); + repEle->SetAttribute(INDEX, 0); + repEle->SetAttribute(MIMETYPE, MIMETYPE_VALUE); //? + repEle->SetAttribute(CODECS, CODECS_VALUE); + repEle->SetAttribute(WIDTH, mainWidth); + repEle->SetAttribute(HEIGHT, mainHeight); + memset_s(string, 1024, 0); + snprintf(string, 1024, "%ld/%ld", m_frameRate.num, m_frameRate.den); + repEle->SetAttribute(FRAMERATE, string); + repEle->SetAttribute(SAR, "1:1"); + repEle->SetAttribute(STARTWITHSAP, 1); + asEle->InsertEndChild(repEle); + + XMLElement *segTleEle1 = m_xmlDoc->NewElement(SEGMENTTEMPLATE); + segTleEle1->SetAttribute(TIMESCALE, m_timeScale); + segTleEle1->SetAttribute(DURATION, m_segInfo->segDuration * m_timeScale); + segTleEle1->SetAttribute(MEDIA, "track0_$Number$.m4s"); + segTleEle1->SetAttribute(STARTNUMBER, 1); + repEle->InsertEndChild(segTleEle1); + } } std::map::iterator itTrackCtx; @@ -505,25 +699,34 @@ int32_t MpdGenerator::WriteMpd(uint64_t totalFramesNum) itTrackCtx++) { MediaStream *stream = itTrackCtx->first; - if (stream->GetMediaType() != VIDEOTYPE) - return OMAF_ERROR_MEDIA_TYPE; - VideoStream *vs = (VideoStream*)stream; - uint32_t tilesNum = vs->GetTileInRow() * vs->GetTileInCol(); - TrackSegmentCtx *trackSegCtxs = itTrackCtx->second; - for (uint32_t i = 0; i < tilesNum; i++) + if (stream && (stream->GetMediaType() == VIDEOTYPE)) { - WriteTileTrackAS(periodEle, &(trackSegCtxs[i])); + VideoStream *vs = (VideoStream*)stream; + uint32_t tilesNum = vs->GetTileInRow() * vs->GetTileInCol(); + TrackSegmentCtx *trackSegCtxs = itTrackCtx->second; + for (uint32_t i = 0; i < tilesNum; i++) + { + WriteTileTrackAS(periodEle, &(trackSegCtxs[i])); + } + } + else if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + TrackSegmentCtx *trackSegCtx = itTrackCtx->second; + WriteAudioTrackAS(periodEle, trackSegCtx); } } - std::map::iterator itExtractorCtx; - for (itExtractorCtx = m_extractorSegCtx->begin(); - itExtractorCtx != m_extractorSegCtx->end(); - itExtractorCtx++) + if (m_extractorSegCtx->size()) { - TrackSegmentCtx *trackSegCtx = itExtractorCtx->second; - WriteExtractorTrackAS(periodEle, trackSegCtx); + std::map::iterator itExtractorCtx; + for (itExtractorCtx = m_extractorSegCtx->begin(); + itExtractorCtx != m_extractorSegCtx->end(); + itExtractorCtx++) + { + TrackSegmentCtx *trackSegCtx = itExtractorCtx->second; + WriteExtractorTrackAS(periodEle, trackSegCtx); + } } m_xmlDoc->SaveFile(m_mpdFileName); diff --git a/src/VROmafPacking/MpdGenerator.h b/src/VROmafPacking/MpdGenerator.h index e18f23a9..dfbf9aea 100644 --- a/src/VROmafPacking/MpdGenerator.h +++ b/src/VROmafPacking/MpdGenerator.h @@ -37,7 +37,6 @@ #ifndef _MPDGENERATOR_H_ #define _MPDGENERATOR_H_ -//#include "definitions.h" #include "MediaStream.h" #include "ExtractorTrackManager.h" #include "DashSegmenter.h" @@ -82,7 +81,12 @@ class MpdGenerator std::map *extractorSegCtxs, SegmentationInfo *segInfo, VCD::OMAF::ProjectionFormat projType, - Rational frameRate); + Rational frameRate, + uint8_t videoNum); + + MpdGenerator(const MpdGenerator& src); + + MpdGenerator& operator=(MpdGenerator&& other); //! @@ -141,6 +145,20 @@ class MpdGenerator //! int32_t WriteTileTrackAS(XMLElement *periodEle, TrackSegmentCtx *pTrackSegCtx); + //! + //! \brief Write AdaptationSet for audio track in mpd file + //! + //! \param [in] periodEle + //! pointer to period element has been create for + //! mpd file using tinyxml2 + //! \param [in] pTrackSegCtx + //! pointer to track segmentation context for audio track + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t WriteAudioTrackAS(XMLElement *periodEle, TrackSegmentCtx *pTrackSegCtx); + //! //! \brief Write AdaptationSet for extractor track in mpd file //! @@ -158,8 +176,6 @@ class MpdGenerator private: std::map *m_streamSegCtx; //!< map of media stream and its track segmentation context std::map *m_extractorSegCtx; //!< map of extractor track and its track segmentation context - //std::map *m_streamMap; //!< media streams map set up in OmafPackage - //ExtractorTrackManager *m_extractorTrackMan; //!< pointer to the extractor track manager created in OmafPackage SegmentationInfo *m_segInfo; //!< pointer to the segmentation information uint32_t m_miniUpdatePeriod; //!< minimum update period of mpd file, in the unit of second VCD::OMAF::ProjectionFormat m_projType; //!< projection type of the video frame @@ -167,12 +183,11 @@ class MpdGenerator char *m_publishTime; //!< publish time for mpd file char *m_presentationDur; //!< presentation duration of dash segments - //std::map m_rwpk; //!< region wise packing information map for all extractor tracks - //std::map m_covi; //!< content coverage information map for all extractor tracks char m_mpdFileName[1024]; //!< file name of MPD file Rational m_frameRate; //!< video stream frame rate uint16_t m_timeScale; //!< timescale of video stream XMLDocument *m_xmlDoc; //!< XML doc element for writting mpd file created using tinyxml2 + uint8_t m_vsNum; }; VCD_NS_END; diff --git a/src/VROmafPacking/OmafPackage.cpp b/src/VROmafPacking/OmafPackage.cpp index ae122ce3..59e63c6f 100644 --- a/src/VROmafPacking/OmafPackage.cpp +++ b/src/VROmafPacking/OmafPackage.cpp @@ -31,9 +31,11 @@ //! Created on April 30, 2019, 6:04 AM //! +#include + #include "OmafPackage.h" -#include "VideoStream.h" -#include "AudioStream.h" +#include "VideoStreamPluginAPI.h" +#include "AudioStreamPluginAPI.h" #include "DefaultSegmentation.h" VCD_NS_BEGIN @@ -44,12 +46,46 @@ OmafPackage::OmafPackage() m_segmentation = NULL; m_extractorTrackMan = NULL; m_isSegmentationStarted = false; - m_threadId = 0; + m_videoThreadId = 0; + m_hasAudio = false; + m_audioThreadId = 0; +} + +OmafPackage::OmafPackage(const OmafPackage& src) +{ + m_initInfo = std::move(src.m_initInfo); + m_segmentation = std::move(src.m_segmentation); + m_extractorTrackMan = std::move(src.m_extractorTrackMan); + m_isSegmentationStarted = src.m_isSegmentationStarted; + m_videoThreadId = src.m_videoThreadId; + m_hasAudio = src.m_hasAudio; + m_audioThreadId = src.m_audioThreadId; +} + +OmafPackage& OmafPackage::operator=(OmafPackage&& other) +{ + m_initInfo = std::move(other.m_initInfo); + m_segmentation = std::move(other.m_segmentation); + m_extractorTrackMan = std::move(other.m_extractorTrackMan); + m_isSegmentationStarted = other.m_isSegmentationStarted; + m_videoThreadId = other.m_videoThreadId; + m_hasAudio = other.m_hasAudio; + m_audioThreadId = other.m_audioThreadId; + + return *this; } OmafPackage::~OmafPackage() { - pthread_join(m_threadId, NULL); + if(m_videoThreadId != 0) + { + pthread_join(m_videoThreadId, NULL); + } + + if(m_audioThreadId != 0) + { + pthread_join(m_audioThreadId, NULL); + } DELETE_MEMORY(m_segmentation); DELETE_MEMORY(m_extractorTrackMan); @@ -57,11 +93,75 @@ OmafPackage::~OmafPackage() std::map::iterator it; for (it = m_streams.begin(); it != m_streams.end();) { - DELETE_MEMORY(it->second); + MediaStream *stream = it->second; + if (stream) + { + CodecId codec = stream->GetCodecId(); + std::map::iterator itHdl; + itHdl = m_streamPlugins.find(codec); + if (itHdl == m_streamPlugins.end()) + { + OMAF_LOG(LOG_ERROR, "Can't find corresponding stream plugin for codec %d\n", codec); + return; + } + void *pluginHdl = itHdl->second; + if (!pluginHdl) + { + OMAF_LOG(LOG_ERROR, "The stream process plugin handle is NULL !\n"); + return; + } + if (stream->GetMediaType() == VIDEOTYPE) + { + DestroyVideoStream* destroyVS = NULL; + destroyVS = (DestroyVideoStream*)dlsym(pluginHdl, "Destroy"); + const char *dlsymErr = dlerror(); + if (dlsymErr) + { + OMAF_LOG(LOG_ERROR, "Failed to load symbol Destroy for codec %d\n", codec); + return; + } + if (!destroyVS) + { + OMAF_LOG(LOG_ERROR, "NULL video stream destroyer !\n"); + return; + } + destroyVS((VideoStream*)(stream)); + } + else if (stream->GetMediaType() == AUDIOTYPE) + { + DestroyAudioStream* destroyAS = NULL; + destroyAS = (DestroyAudioStream*)dlsym(pluginHdl, "Destroy"); + const char *dlsymErr = dlerror(); + if (dlsymErr) + { + OMAF_LOG(LOG_ERROR, "Failed to load symbol Destroy for codec %d\n", codec); + return; + } + if (!destroyAS) + { + OMAF_LOG(LOG_ERROR, "NULL audio stream destroyer !\n"); + return; + } + destroyAS((AudioStream*)(stream)); + } + } m_streams.erase(it++); } m_streams.clear(); + + std::map::iterator itPlug; + for (itPlug = m_streamPlugins.begin(); itPlug != m_streamPlugins.end(); ) + { + void *plugHdl = itPlug->second; + if (plugHdl) + { + dlclose(plugHdl); + plugHdl = NULL; + } + m_streamPlugins.erase(itPlug++); + } + m_streamPlugins.clear(); } int32_t OmafPackage::AddMediaStream(uint8_t streamIdx, BSBuffer *bs) @@ -74,23 +174,172 @@ int32_t OmafPackage::AddMediaStream(uint8_t streamIdx, BSBuffer *bs) if (bs->mediaType == VIDEOTYPE) { - VideoStream *vs = new VideoStream(); - if (!vs) - return OMAF_ERROR_NULL_PTR; + if (bs->codecId == CODEC_ID_H265) + { + void *pluginHdl = NULL; + std::map::iterator it; + it = m_streamPlugins.find(CODEC_ID_H265); + if (it == m_streamPlugins.end()) + { + char hevcPluginName[1024] = { 0 };//"/usr/local/lib/libHevcVideoStreamProcess.so"; + uint32_t videoPluginPathLen = strlen(m_initInfo->videoProcessPluginPath); + if (m_initInfo->videoProcessPluginPath[videoPluginPathLen - 1] == '/') + { + snprintf(hevcPluginName, 1024, "%slib%s.so", m_initInfo->videoProcessPluginPath, m_initInfo->videoProcessPluginName); + } + else + { + snprintf(hevcPluginName, 1024, "%s/lib%s.so", m_initInfo->videoProcessPluginPath, m_initInfo->videoProcessPluginName); + } + OMAF_LOG(LOG_INFO, "Used video stream process plugin is %s\n", hevcPluginName); - ((MediaStream*)vs)->SetMediaType(VIDEOTYPE); + pluginHdl = dlopen(hevcPluginName, RTLD_LAZY); + const char *dlsymErr = dlerror(); + if (!pluginHdl) + { + OMAF_LOG(LOG_ERROR, "Failed to open HEVC video stream plugin %s\n", hevcPluginName); + if (dlsymErr) + { + OMAF_LOG(LOG_ERROR, "Get error msg %s\n", dlsymErr); + } + return OMAF_ERROR_DLOPEN; + } + m_streamPlugins.insert(std::make_pair(CODEC_ID_H265, pluginHdl)); + } + else + { + pluginHdl = it->second; + if (!pluginHdl) + { + OMAF_LOG(LOG_ERROR, "NULL HEVC video stream plugin !\n"); + return OMAF_ERROR_NULL_PTR; + } + } - vs->Initialize(streamIdx, bs, m_initInfo); + CreateVideoStream* createVS = NULL; + createVS = (CreateVideoStream*)dlsym(pluginHdl, "Create"); + const char* dlsymErr1 = dlerror(); + if (dlsymErr1) + { + OMAF_LOG(LOG_ERROR, "Failed to load symbol Create: %s\n", dlsymErr1); + return OMAF_ERROR_DLSYM; + } - m_streams.insert(std::make_pair(streamIdx, (MediaStream*)vs)); + if (!createVS) + { + OMAF_LOG(LOG_ERROR, "NULL video stream creator !\n"); + return OMAF_ERROR_NULL_PTR; + } + + VideoStream *vs = createVS(); + if (!vs) + { + OMAF_LOG(LOG_ERROR, "Failed to create HEVC video stream !\n"); + return OMAF_ERROR_NULL_PTR; + } + + ((MediaStream*)vs)->SetMediaType(VIDEOTYPE); + ((MediaStream*)vs)->SetCodecId(CODEC_ID_H265); + + m_streams.insert(std::make_pair(streamIdx, (MediaStream*)vs)); + int32_t ret = vs->Initialize(streamIdx, bs, m_initInfo); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to initialize HEVC video stream !\n"); + return ret; + } + + vs = NULL; + } + else + { + OMAF_LOG(LOG_ERROR, "Not supported video codec %d\n", bs->codecId); + return OMAF_ERROR_INVALID_CODEC; + } } else if (bs->mediaType == AUDIOTYPE) { - AudioStream *as = new AudioStream(); - if (!as) - return OMAF_ERROR_NULL_PTR; + m_hasAudio = true; + if (bs->codecId == CODEC_ID_AAC) + { + void *pluginHdl = NULL; + std::map::iterator it; + it = m_streamPlugins.find(CODEC_ID_AAC); + if (it == m_streamPlugins.end()) + { + char aacPluginName[1024] = { 0 };//"/usr/local/lib/libAACAudioStreamProcess.so"; + uint32_t audioPluginPathLen = strlen(m_initInfo->audioProcessPluginPath); + if (m_initInfo->audioProcessPluginPath[audioPluginPathLen - 1] == '/') + { + snprintf(aacPluginName, 1024, "%slib%s.so", m_initInfo->audioProcessPluginPath, m_initInfo->audioProcessPluginName); + } + else + { + snprintf(aacPluginName, 1024, "%s/lib%s.so", m_initInfo->audioProcessPluginPath, m_initInfo->audioProcessPluginName); + } + OMAF_LOG(LOG_INFO, "Used audio stream process plugin is %s\n", aacPluginName); + + pluginHdl = dlopen(aacPluginName, RTLD_LAZY); + const char *dlsymErr = dlerror(); + if (!pluginHdl) + { + OMAF_LOG(LOG_ERROR, "Failed to open AAC audio stream plugin %s\n", aacPluginName); + if (dlsymErr) + { + OMAF_LOG(LOG_ERROR, "Get error msg %s\n", dlsymErr); + } + return OMAF_ERROR_DLOPEN; + } + m_streamPlugins.insert(std::make_pair(CODEC_ID_AAC, pluginHdl)); + } + else + { + pluginHdl = it->second; + if (!pluginHdl) + { + OMAF_LOG(LOG_ERROR, "NULL AAC audio stream plugin !\n"); + return OMAF_ERROR_NULL_PTR; + } + } + + CreateAudioStream* createAS = NULL; + createAS = (CreateAudioStream*)dlsym(pluginHdl, "Create"); + const char* dlsymErr1 = dlerror(); + if (dlsymErr1) + { + OMAF_LOG(LOG_ERROR, "Failed to load symbol Create: %s\n", dlsymErr1); + return OMAF_ERROR_DLSYM; + } + + if (!createAS) + { + OMAF_LOG(LOG_ERROR, "NULL audio stream creator !\n"); + return OMAF_ERROR_NULL_PTR; + } + + AudioStream *as = createAS(); + if (!as) + { + OMAF_LOG(LOG_ERROR, "Failed to create AAC audio stream !\n"); + return OMAF_ERROR_NULL_PTR; + } - ((MediaStream*)as)->SetMediaType(AUDIOTYPE); + ((MediaStream*)as)->SetMediaType(AUDIOTYPE); + ((MediaStream*)as)->SetCodecId(CODEC_ID_AAC); - m_streams.insert(std::make_pair(streamIdx, (MediaStream*)as)); + m_streams.insert(std::make_pair(streamIdx, (MediaStream*)as)); + int32_t ret = as->Initialize(streamIdx, bs, m_initInfo); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to initialize AAC audio stream !\n"); + return ret; + } + OMAF_LOG(LOG_INFO, "Successfully add one audio stream !\n"); + as = NULL; + } + else + { + OMAF_LOG(LOG_ERROR, "Not supported audio codec %d\n", bs->codecId); + return OMAF_ERROR_INVALID_CODEC; + } } return ERROR_NONE; @@ -126,11 +375,17 @@ int32_t OmafPackage::InitOmafPackage(InitialInfo *initInfo) if (!initInfo->bsBuffers) return OMAF_ERROR_NULL_PTR; - m_initInfo = initInfo; + if (!initInfo->videoProcessPluginPath || !initInfo->videoProcessPluginName) + return OMAF_ERROR_NO_PLUGIN_SET; - uint8_t videoStreamsNum = initInfo->bsNumVideo; - if (!videoStreamsNum) - return OMAF_ERROR_VIDEO_NUM; + if (initInfo->bsNumAudio && (!initInfo->audioProcessPluginPath || !initInfo->audioProcessPluginName)) + return OMAF_ERROR_NO_PLUGIN_SET; + + m_initInfo = initInfo; + if (initInfo->logFunction) + logCallBack = (LogFunction)(initInfo->logFunction); + else + logCallBack = GlogFunction; //default log callback function uint8_t streamsNumTotal = initInfo->bsNumVideo + initInfo->bsNumAudio; uint8_t streamIdx = 0; @@ -154,42 +409,75 @@ int32_t OmafPackage::InitOmafPackage(InitialInfo *initInfo) return ERROR_NONE; } +int32_t OmafPackage::SetLogCallBack(LogFunction logFunction) +{ + if (!logFunction) + return OMAF_ERROR_NULL_PTR; + + logCallBack = logFunction; + return ERROR_NONE; +} + int32_t OmafPackage::SetFrameInfo(uint8_t streamIdx, FrameBSInfo *frameInfo) { MediaStream *stream = m_streams[streamIdx]; if (!stream) return OMAF_ERROR_NULL_PTR; - if (stream->GetMediaType() != VIDEOTYPE) + if ((stream->GetMediaType() != VIDEOTYPE) && (stream->GetMediaType() != AUDIOTYPE)) return OMAF_ERROR_MEDIA_TYPE; - int32_t ret = ((VideoStream*)stream)->AddFrameInfo(frameInfo); + int32_t ret = ERROR_NONE; + if (stream->GetMediaType() == VIDEOTYPE) + { + ret = ((VideoStream*)stream)->AddFrameInfo(frameInfo); + } + else if (stream->GetMediaType() == AUDIOTYPE) + { + //OMAF_LOG(LOG_INFO, "To add one audio frame with pts %d\n", frameInfo->pts); + ret = ((AudioStream*)stream)->AddFrameInfo(frameInfo); + } + if (ret) return OMAF_ERROR_ADD_FRAMEINFO; return ERROR_NONE; } -void* OmafPackage::SegmentationThread(void* pThis) +void* OmafPackage::VideoSegmentationThread(void* pThis) { OmafPackage *omafPackage = (OmafPackage*)pThis; - omafPackage->SegmentAllStreams(); + omafPackage->SegmentAllVideoStreams(); return NULL; } -void OmafPackage::SegmentAllStreams() +void OmafPackage::SegmentAllVideoStreams() { m_segmentation->VideoSegmentation(); } +void* OmafPackage::AudioSegmentationThread(void* pThis) +{ + OmafPackage *omafPackage = (OmafPackage*)pThis; + + omafPackage->SegmentAllAudioStreams(); + + return NULL; +} + +void OmafPackage::SegmentAllAudioStreams() +{ + m_segmentation->AudioSegmentation(); +} + int32_t OmafPackage::OmafPacketStream(uint8_t streamIdx, FrameBSInfo *frameInfo) { int32_t ret = SetFrameInfo(streamIdx, frameInfo); if (ret) return ret; - //printf("m_initInfo->segmentationInfo->needBufedFrames %d \n", m_initInfo->segmentationInfo->needBufedFrames); + if (!m_isSegmentationStarted) { uint32_t vsNum = 0; @@ -197,7 +485,7 @@ int32_t OmafPackage::OmafPacketStream(uint8_t streamIdx, FrameBSInfo *frameInfo) for (itMS = m_streams.begin(); itMS != m_streams.end(); itMS++) { MediaStream *stream = itMS->second; - if (stream->GetMediaType() == VIDEOTYPE) + if (stream && (stream->GetMediaType() == VIDEOTYPE)) { VideoStream *vs = (VideoStream*)stream; if (vs->GetBufferedFrameNum() >= (uint32_t)(m_initInfo->segmentationInfo->needBufedFrames)) @@ -206,12 +494,33 @@ int32_t OmafPackage::OmafPacketStream(uint8_t streamIdx, FrameBSInfo *frameInfo) } } } - if (vsNum == m_initInfo->bsNumVideo) + + uint32_t asNum = 0; + for (itMS = m_streams.begin(); itMS != m_streams.end(); itMS++) { - ret = pthread_create(&m_threadId, NULL, SegmentationThread, this); + MediaStream *stream = itMS->second; + if (stream && (stream->GetMediaType() == AUDIOTYPE)) + { + AudioStream *as = (AudioStream*)stream; + if (as->GetBufferedFrameNum() >= (uint32_t)(m_initInfo->segmentationInfo->needBufedFrames)) + { + asNum++; + } + } + } + + if ((vsNum == m_initInfo->bsNumVideo) && (asNum == m_initInfo->bsNumAudio)) + { + ret = pthread_create(&m_videoThreadId, NULL, VideoSegmentationThread, this); if (ret) return OMAF_ERROR_CREATE_THREAD; + if (m_hasAudio) + { + ret = pthread_create(&m_audioThreadId, NULL, AudioSegmentationThread, this); + if (ret) + return OMAF_ERROR_CREATE_THREAD; + } m_isSegmentationStarted = true; } } @@ -221,11 +530,19 @@ int32_t OmafPackage::OmafPacketStream(uint8_t streamIdx, FrameBSInfo *frameInfo) int32_t OmafPackage::OmafEndStreams() { - int32_t ret = m_segmentation->VideoEndSegmentation(); - if (ret) - return ret; + if (m_segmentation) + { + int32_t ret = m_segmentation->VideoEndSegmentation(); + if (ret) + return ret; - //pthread_join(m_threadId, NULL); + if (m_hasAudio) + { + ret = m_segmentation->AudioEndSegmentation(); + if (ret) + return ret; + } + } return ERROR_NONE; } diff --git a/src/VROmafPacking/OmafPackage.h b/src/VROmafPacking/OmafPackage.h index ab4851e7..693127a3 100644 --- a/src/VROmafPacking/OmafPackage.h +++ b/src/VROmafPacking/OmafPackage.h @@ -58,6 +58,10 @@ class OmafPackage //! OmafPackage(); + OmafPackage(const OmafPackage& src); + + OmafPackage& operator=(OmafPackage&& other); + //! //! \brief Destructor //! @@ -74,6 +78,17 @@ class OmafPackage //! int32_t InitOmafPackage(InitialInfo *initInfo); + //! + //! \brief Set the customized logging callback function + //! + //! \param [in] logFunction + //! the pointer to customized logging callback function + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t SetLogCallBack(LogFunction logFunction); + //! //! \brief Packet the specified media stream //! @@ -98,14 +113,24 @@ class OmafPackage private: //! - //! \brief Segmentation thread execution function + //! \brief Video Segmentation thread execution function //! //! \param [in] pThis //! this OmafPackage //! //! \return void //! - static void* SegmentationThread(void* pThis); + static void* VideoSegmentationThread(void* pThis); + + //! + //! \brief Audio Segmentation thread execution function + //! + //! \param [in] pThis + //! this OmafPackage + //! + //! \return void + //! + static void* AudioSegmentationThread(void* pThis); //! //! \brief Add media stream into media stream map @@ -152,18 +177,29 @@ class OmafPackage int32_t SetFrameInfo(uint8_t streamIdx, FrameBSInfo *frameInfo); //! - //! \brief Segment all media streams + //! \brief Segment all video media streams //! //! \return void //! - void SegmentAllStreams(); + void SegmentAllVideoStreams(); + + //! + //! \brief Segment all audio media streams + //! + //! \return void + //! + void SegmentAllAudioStreams(); + private: InitialInfo *m_initInfo; //!< the initial information input by library interface Segmentation *m_segmentation; //!< the segmentation for data segment ExtractorTrackManager *m_extractorTrackMan; //!< the extractor track manager std::map m_streams; //!< the media streams map + std::map m_streamPlugins; //!< the map of CodecId and corresponding stream plugin handles bool m_isSegmentationStarted; //!< whether the segmentation thread is started - pthread_t m_threadId; //!< thread index of segmentation thread + pthread_t m_videoThreadId; //!< thread index of video segmentation thread + bool m_hasAudio; + pthread_t m_audioThreadId; //!< thread index of audio segmentation thread }; VCD_NS_END; diff --git a/src/VROmafPacking/OmafPackingCommon.h b/src/VROmafPacking/OmafPackingCommon.h index 32b5afb8..7b870f83 100644 --- a/src/VROmafPacking/OmafPackingCommon.h +++ b/src/VROmafPacking/OmafPackingCommon.h @@ -34,26 +34,18 @@ #ifndef _COMMON_H_ #define _COMMON_H_ -#include "../utils/ns_def.h" -#include "../utils/error.h" -#include "../utils/GlogWrapper.h" +#include "ns_def.h" +#include "error.h" +//#include "common_data.h" +#include "OmafPackingLog.h" #include #include #include -#define DELETE_MEMORY(x) \ - if (x) \ - { \ - delete x; \ - x = NULL; \ - } - -#define DELETE_ARRAY(x) \ - if (x) \ - { \ - delete[] x; \ - x = NULL; \ - } +extern "C" +{ +#include "safestringlib/safe_mem_lib.h" +} #endif /* _COMMON_H_ */ diff --git a/src/VROmafPacking/OneVideoExtractorTrackGenerator.cpp b/src/VROmafPacking/OneVideoExtractorTrackGenerator.cpp deleted file mode 100644 index 14e7ffd2..00000000 --- a/src/VROmafPacking/OneVideoExtractorTrackGenerator.cpp +++ /dev/null @@ -1,491 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: OneVideoExtractorTrackGenerator.cpp -//! \brief: One video extractor track generator class implementation -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#include "OneVideoExtractorTrackGenerator.h" -#include "VideoStream.h" -#include "OneVideoRegionWisePackingGenerator.h" - -VCD_NS_BEGIN - -OneVideoExtractorTrackGenerator::~OneVideoExtractorTrackGenerator() -{ - DELETE_ARRAY(m_videoIdxInMedia); - DELETE_ARRAY(m_tilesInViewport); - DELETE_MEMORY(m_viewInfo); - DELETE_MEMORY(m_newSPSNalu); - DELETE_MEMORY(m_newPPSNalu); -} - -uint16_t OneVideoExtractorTrackGenerator::CalculateViewportNum() -{ - if (!m_videoIdxInMedia) - return 0; - - std::map::iterator it; - it = m_streams->find(m_videoIdxInMedia[0]); - if (it == m_streams->end()) - return 0; - VideoStream *vs = (VideoStream*)(it->second); - uint8_t tileInRow = vs->GetTileInRow(); - uint8_t tileInCol = vs->GetTileInCol(); - uint16_t viewportNum = tileInRow * tileInCol; - - return viewportNum; -} - -int32_t OneVideoExtractorTrackGenerator::FillDstRegionWisePacking( - uint8_t viewportIdx, - RegionWisePacking *dstRwpk) -{ - dstRwpk->projPicWidth = m_videoWidth; - dstRwpk->projPicHeight = m_videoHeight; - - int32_t ret = m_rwpkGen->GenerateDstRwpk(viewportIdx, dstRwpk); - if (ret) - return ret; - - m_packedPicWidth = m_rwpkGen->GetPackedPicWidth(); - m_packedPicHeight = m_rwpkGen->GetPackedPicHeight(); - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::FillTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir) -{ - if (!tilesMergeDir) - return OMAF_ERROR_NULL_PTR; - - int32_t ret = m_rwpkGen->GenerateTilesMergeDirection(viewportIdx, tilesMergeDir); - if (ret) - return ret; - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::FillDstContentCoverage( - uint8_t viewportIdx, - ContentCoverage *dstCovi) -{ - uint8_t tilesNumInViewRow = m_rwpkGen->GetTilesNumInViewportRow(); - uint8_t tileRowNumInView = m_rwpkGen->GetTileRowNumInViewport(); - - uint32_t projRegLeft = (viewportIdx % m_tileInRow) * m_tileWidth; - uint32_t projRegTop = (viewportIdx / m_tileInRow) * m_tileHeight; - uint32_t projRegWidth = 0; - uint32_t projRegHeight = 0; - - uint8_t viewIdxInRow = viewportIdx % m_tileInRow; - uint8_t viewIdxInCol = viewportIdx / m_tileInRow; - - if ((m_tileInRow - viewIdxInRow) >= tilesNumInViewRow) - { - for (uint8_t i = viewportIdx; i < (viewportIdx + tilesNumInViewRow); i++) - { - projRegWidth += m_tilesInfo[i].tileWidth; - } - } - else - { - for (uint8_t i = viewportIdx; i < (viewportIdx + (m_tileInRow - viewIdxInRow)); i++) - { - projRegWidth += m_tilesInfo[i].tileWidth; - } - for (uint8_t i = (viewIdxInCol*m_tileInRow); i < (viewIdxInCol*m_tileInRow + (tilesNumInViewRow-(m_tileInRow-viewIdxInRow))); i++) - { - projRegWidth += m_tilesInfo[i].tileWidth; - } - } - - if ((m_tileInCol - viewIdxInCol) >= tileRowNumInView) - { - for (uint8_t i = viewportIdx; i < (viewportIdx+m_tileInRow*tileRowNumInView); ) - { - projRegHeight += m_tilesInfo[i].tileHeight; - i += m_tileInRow; - } - } - else - { - for (uint8_t i = viewportIdx; i < (viewportIdx+(m_tileInCol-viewIdxInCol)*m_tileInRow);) - { - projRegHeight += m_tilesInfo[i].tileHeight; - i += m_tileInRow; - } - for (uint8_t i = viewIdxInRow; i < (viewIdxInRow+(tileRowNumInView-(m_tileInCol-viewIdxInCol))*m_tileInRow); ) - { - projRegHeight += m_tilesInfo[i].tileHeight; - i += m_tileInRow; - } - } - - if (m_projType == VCD::OMAF::ProjectionFormat::PF_ERP) - { - dstCovi->coverageShapeType = 1; - } - else - { - dstCovi->coverageShapeType = 0; - } - - dstCovi->numRegions = 1; - dstCovi->viewIdcPresenceFlag = false; - dstCovi->defaultViewIdc = 0; - - dstCovi->sphereRegions = new SphereRegion[dstCovi->numRegions]; - if (!dstCovi->sphereRegions) - return OMAF_ERROR_NULL_PTR; - - SphereRegion *sphereRegion = &(dstCovi->sphereRegions[0]); - memset(sphereRegion, 0, sizeof(SphereRegion)); - sphereRegion->viewIdc = 0; - sphereRegion->centreAzimuth = (int32_t)((((m_videoWidth / 2) - (float)(projRegLeft + projRegWidth / 2)) * 360 * 65536) / m_videoWidth); - sphereRegion->centreElevation = (int32_t)((((m_videoHeight / 2) - (float)(projRegTop + projRegHeight / 2)) * 180 * 65536) / m_videoHeight); - sphereRegion->centreTilt = 0; - sphereRegion->azimuthRange = (uint32_t)((projRegWidth * 360.f * 65536) / m_videoWidth); - sphereRegion->elevationRange = (uint32_t)((projRegHeight * 180.f * 65536) / m_videoHeight); - sphereRegion->interpolate = 0; - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::CheckAndFillInitInfo() -{ - if (!m_initInfo) - return OMAF_ERROR_NULL_PTR; - - if (m_initInfo->bsNumVideo != 1) - return OMAF_ERROR_VIDEO_NUM; - - uint8_t actualVideoNum = 0; - uint8_t totalStreamNum = m_initInfo->bsNumVideo + m_initInfo->bsNumAudio; - uint8_t vsIdx = 0; - m_videoIdxInMedia = new uint8_t[totalStreamNum]; - if (!m_videoIdxInMedia) - return OMAF_ERROR_NULL_PTR; - - for (uint8_t streamIdx = 0; streamIdx < totalStreamNum; streamIdx++) - { - BSBuffer *bs = &(m_initInfo->bsBuffers[streamIdx]); - if (bs->mediaType == VIDEOTYPE) - { - m_videoIdxInMedia[vsIdx] = streamIdx; - vsIdx++; - actualVideoNum++; - } - } - - if (actualVideoNum != m_initInfo->bsNumVideo) - return OMAF_ERROR_VIDEO_NUM; - - - std::map::iterator it; - it = m_streams->find(m_videoIdxInMedia[0]); - if (it == m_streams->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; - - VideoStream *vs1 = (VideoStream*)(it->second); - uint16_t width1 = vs1->GetSrcWidth(); - uint16_t height1 = vs1->GetSrcHeight(); - - (m_initInfo->viewportInfo)->inWidth = width1; - (m_initInfo->viewportInfo)->inHeight = height1; - (m_initInfo->viewportInfo)->tileInRow = vs1->GetTileInRow(); - (m_initInfo->viewportInfo)->tileInCol = vs1->GetTileInCol(); - (m_initInfo->viewportInfo)->outGeoType = 2; //viewport - (m_initInfo->viewportInfo)->inGeoType = vs1->GetProjType(); - - m_videoWidth = width1; - m_videoHeight = height1; - m_tileInRow = vs1->GetTileInRow(); - m_tileInCol = vs1->GetTileInCol(); - m_tilesInfo = vs1->GetAllTilesInfo(); - m_tileWidth = m_tilesInfo[0].tileWidth; - m_tileHeight = m_tilesInfo[0].tileHeight; - m_projType = (VCD::OMAF::ProjectionFormat)(vs1->GetProjType()); - - if ((m_initInfo->segmentationInfo)->extractorTracksPerSegThread == 0) - { - if ((m_tileInRow * m_tileInCol) % 4 == 0) - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 4; - } - else if ((m_tileInRow * m_tileInCol) % 3 == 0) - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 3; - } - else if ((m_tileInRow * m_tileInCol) % 2 == 0) - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 2; - } - else - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 1; - } - } - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::Initialize() -{ - if (!m_initInfo) - return OMAF_ERROR_NULL_PTR; - - int32_t ret = CheckAndFillInitInfo(); - if (ret) - return ret; - - std::map::iterator it; - it = m_streams->find(m_videoIdxInMedia[0]); //high resolution video stream - if (it == m_streams->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; - - VideoStream *vs = (VideoStream*)(it->second); - m_360scvpHandle = vs->Get360SCVPHandle(); - m_360scvpParam = vs->Get360SCVPParam(); - m_origVPSNalu = vs->GetVPSNalu(); - m_origSPSNalu = vs->GetSPSNalu(); - m_origPPSNalu = vs->GetPPSNalu(); - - m_tilesInViewport = new TileDef[1024]; - if (!m_tilesInViewport) - return OMAF_ERROR_NULL_PTR; - - m_viewInfo = new Param_ViewPortInfo; - if (!m_viewInfo) - return OMAF_ERROR_NULL_PTR; - - m_viewInfo->viewportWidth = (m_initInfo->viewportInfo)->viewportWidth; - m_viewInfo->viewportHeight = (m_initInfo->viewportInfo)->viewportHeight; - m_viewInfo->viewPortPitch = (m_initInfo->viewportInfo)->viewportPitch; - m_viewInfo->viewPortYaw = (m_initInfo->viewportInfo)->viewportYaw; - m_viewInfo->viewPortFOVH = (m_initInfo->viewportInfo)->horizontalFOVAngle; - m_viewInfo->viewPortFOVV = (m_initInfo->viewportInfo)->verticalFOVAngle; - m_viewInfo->geoTypeOutput = (EGeometryType)((m_initInfo->viewportInfo)->outGeoType); - m_viewInfo->geoTypeInput = (EGeometryType)((m_initInfo->viewportInfo)->inGeoType); - m_viewInfo->faceWidth = (m_initInfo->viewportInfo)->inWidth; - m_viewInfo->faceHeight = (m_initInfo->viewportInfo)->inHeight; - m_viewInfo->tileNumRow = (m_initInfo->viewportInfo)->tileInCol; - m_viewInfo->tileNumCol = (m_initInfo->viewportInfo)->tileInRow; - - ret = I360SCVP_SetParameter(m_360scvpHandle, ID_SCVP_PARAM_VIEWPORT, (void*)m_viewInfo); - if (ret) - return OMAF_ERROR_SCVP_SET_FAILED; - - m_360scvpParam->paramViewPort.viewportWidth = (m_initInfo->viewportInfo)->viewportWidth; - m_360scvpParam->paramViewPort.viewportHeight = (m_initInfo->viewportInfo)->viewportHeight; - m_360scvpParam->paramViewPort.viewPortPitch = (m_initInfo->viewportInfo)->viewportPitch; - m_360scvpParam->paramViewPort.viewPortYaw = (m_initInfo->viewportInfo)->viewportYaw; - m_360scvpParam->paramViewPort.viewPortFOVH = (m_initInfo->viewportInfo)->horizontalFOVAngle; - m_360scvpParam->paramViewPort.viewPortFOVV = (m_initInfo->viewportInfo)->verticalFOVAngle; - m_360scvpParam->paramViewPort.geoTypeOutput = (EGeometryType)((m_initInfo->viewportInfo)->outGeoType); - m_360scvpParam->paramViewPort.geoTypeInput = (EGeometryType)((m_initInfo->viewportInfo)->inGeoType); - m_360scvpParam->paramViewPort.faceWidth = (m_initInfo->viewportInfo)->inWidth; - m_360scvpParam->paramViewPort.faceHeight = (m_initInfo->viewportInfo)->inHeight; - m_360scvpParam->paramViewPort.tileNumRow = (m_initInfo->viewportInfo)->tileInCol; - m_360scvpParam->paramViewPort.tileNumCol = (m_initInfo->viewportInfo)->tileInRow; - - ret = I360SCVP_process(m_360scvpParam, m_360scvpHandle); - if (ret) - return OMAF_ERROR_SCVP_PROCESS_FAILED; - - Param_ViewportOutput paramViewportOutput; - m_tilesNumInViewport = I360SCVP_getFixedNumTiles( - m_tilesInViewport, - ¶mViewportOutput, - m_360scvpHandle); - - m_finalViewportWidth = paramViewportOutput.dstWidthAlignTile; - m_finalViewportHeight = paramViewportOutput.dstHeightAlignTile; - - if (!m_tilesNumInViewport || m_tilesNumInViewport > 1024) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - m_rwpkGen = new OneVideoRegionWisePackingGenerator(); - if (!m_rwpkGen) - return OMAF_ERROR_NULL_PTR; - - ret = m_rwpkGen->Initialize(m_streams, m_videoIdxInMedia, - m_tilesNumInViewport, m_tilesInViewport, - m_finalViewportWidth, m_finalViewportHeight); - if (ret) - return ret; - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::GenerateExtractorTracks(std::map& extractorTrackMap, std::map *streams) -{ - if (!streams) - return OMAF_ERROR_NULL_PTR; - - m_viewportNum = CalculateViewportNum(); - if (!m_viewportNum) - return OMAF_ERROR_VIEWPORT_NUM; - - for (uint8_t i = 0; i < m_viewportNum; i++) - { - ExtractorTrack *extractorTrack = new ExtractorTrack(i, streams, (m_initInfo->viewportInfo)->inGeoType); - if (!extractorTrack) - { - std::map::iterator itET = extractorTrackMap.begin(); - for ( ; itET != extractorTrackMap.end(); ) - { - ExtractorTrack *extractorTrack1 = itET->second; - DELETE_MEMORY(extractorTrack1); - extractorTrackMap.erase(itET++); - } - extractorTrackMap.clear(); - return OMAF_ERROR_NULL_PTR; - } - - int32_t retInit = extractorTrack->Initialize(); - if (retInit) - { - LOG(ERROR) << "Failed to initialize extractor track !" << std::endl; - - std::map::iterator itET = extractorTrackMap.begin(); - for ( ; itET != extractorTrackMap.end(); ) - { - ExtractorTrack *extractorTrack1 = itET->second; - DELETE_MEMORY(extractorTrack1); - extractorTrackMap.erase(itET++); - } - extractorTrackMap.clear(); - DELETE_MEMORY(extractorTrack); - return retInit; - } - - FillDstRegionWisePacking(i, extractorTrack->GetRwpk()); - - FillTilesMergeDirection(i, extractorTrack->GetTilesMergeDir()); - - FillDstContentCoverage(i, extractorTrack->GetCovi()); - - extractorTrackMap.insert(std::make_pair(i, extractorTrack)); - } - - int32_t ret = GenerateNewSPS(); - if (ret) - return ret; - - ret = GenerateNewPPS(); - if (ret) - return ret; - - std::map::iterator it; - for (it = extractorTrackMap.begin(); it != extractorTrackMap.end(); it++) - { - ExtractorTrack *extractorTrack = it->second; - extractorTrack->SetNalu(m_origVPSNalu, extractorTrack->GetVPS()); - extractorTrack->SetNalu(m_newSPSNalu, extractorTrack->GetSPS()); - extractorTrack->SetNalu(m_newPPSNalu, extractorTrack->GetPPS()); - - PicResolution highRes = { m_videoWidth, m_videoHeight }; - - std::list* picResList = extractorTrack->GetPicRes(); - picResList->push_back(highRes); - } - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::GenerateNewSPS() -{ - if (!m_packedPicWidth || !m_packedPicHeight) - return OMAF_ERROR_BAD_PARAM; - - if (!m_origSPSNalu || !m_360scvpParam || !m_360scvpHandle) - return OMAF_ERROR_NULL_PTR; - - if (!(m_origSPSNalu->data) || !(m_origSPSNalu->dataSize)) - return OMAF_ERROR_INVALID_SPS; - - m_newSPSNalu = new Nalu; - if (!m_newSPSNalu) - return OMAF_ERROR_NULL_PTR; - - m_newSPSNalu->data = new uint8_t[1024];//include start codes - if (!m_newSPSNalu->data) - return OMAF_ERROR_NULL_PTR; - - m_360scvpParam->pInputBitstream = m_origSPSNalu->data; - m_360scvpParam->inputBitstreamLen = m_origSPSNalu->dataSize; - m_360scvpParam->destWidth = m_packedPicWidth; - m_360scvpParam->destHeight = m_packedPicHeight; - m_360scvpParam->pOutputBitstream = m_newSPSNalu->data; - - int32_t ret = I360SCVP_GenerateSPS(m_360scvpParam, m_360scvpHandle); - if (ret) - return OMAF_ERROR_SCVP_OPERATION_FAILED; - - m_newSPSNalu->dataSize = m_360scvpParam->outputBitstreamLen; - m_newSPSNalu->startCodesSize = HEVC_STARTCODES_LEN; - m_newSPSNalu->naluType = HEVC_SPS_NALU_TYPE; - - return ERROR_NONE; -} - -int32_t OneVideoExtractorTrackGenerator::GenerateNewPPS() -{ - TileArrangement *tileArray = m_rwpkGen->GetMergedTilesArrange(); - if (!tileArray) - return OMAF_ERROR_NULL_PTR; - - m_newPPSNalu = new Nalu; - if (!m_newPPSNalu) - return OMAF_ERROR_NULL_PTR; - - m_newPPSNalu->data = new uint8_t[1024];//include start codes - if (!m_newPPSNalu->data) - return OMAF_ERROR_NULL_PTR; - - m_360scvpParam->pInputBitstream = m_origPPSNalu->data; //includes start codes - m_360scvpParam->inputBitstreamLen = m_origPPSNalu->dataSize; - - m_360scvpParam->pOutputBitstream = m_newPPSNalu->data; - - int32_t ret = I360SCVP_GeneratePPS(m_360scvpParam, tileArray, m_360scvpHandle); - if (ret) - return OMAF_ERROR_SCVP_OPERATION_FAILED; - - m_newPPSNalu->dataSize = m_360scvpParam->outputBitstreamLen; - m_newPPSNalu->startCodesSize = HEVC_STARTCODES_LEN; - m_newPPSNalu->naluType = HEVC_PPS_NALU_TYPE; - - return ERROR_NONE; -} - -VCD_NS_END diff --git a/src/VROmafPacking/OneVideoExtractorTrackGenerator.h b/src/VROmafPacking/OneVideoExtractorTrackGenerator.h deleted file mode 100644 index 7959e9f5..00000000 --- a/src/VROmafPacking/OneVideoExtractorTrackGenerator.h +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: OneVideoExtractorTrackGenerator.h -//! \brief: One video stream extractor track generator class definition -//! \detail: Define the operation of extractor track generator for one -//! video stream -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#ifndef _ONEVIDEOEXTRACTORTRACKGENERATOR_H_ -#define _ONEVIDEOEXTRACTORTRACKGENERATOR_H_ - -#include "ExtractorTrackGenerator.h" -#include "../utils/OmafStructure.h" - -VCD_NS_BEGIN - -//! -//! \class OneVideoExtractorTrackGenerator -//! \brief Define the operation of extractor track generator -//! for one video stream -//! - -class OneVideoExtractorTrackGenerator : public ExtractorTrackGenerator -{ -public: - //! - //! \brief Constructor - //! - OneVideoExtractorTrackGenerator() - { - m_videoIdxInMedia = NULL; - m_360scvpParam = NULL; - m_360scvpHandle = NULL; - m_tilesInViewport = NULL; - m_viewInfo = NULL; - m_tilesNumInViewport = 0; - m_finalViewportWidth = 0; - m_finalViewportHeight = 0; - m_videoWidth = 0; - m_videoHeight = 0; - m_tileInRow = 0; - m_tileInCol = 0; - m_tileWidth = 0; - m_tileHeight = 0; - m_tilesInfo = NULL; - m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; - m_packedPicWidth = 0; - m_packedPicHeight = 0; - m_origVPSNalu = NULL; - m_origSPSNalu = NULL; - m_origPPSNalu = NULL; - }; - - //! - //! \brief Copy Constructor - //! - //! \param [in] initInfo - //! initial information input by the library interface - //! \param [in] streams - //! pointer to the media streams map set up in OmafPackage - //! - OneVideoExtractorTrackGenerator(InitialInfo *initInfo, std::map *streams) : ExtractorTrackGenerator(initInfo, streams) - { - m_videoIdxInMedia = NULL; - m_360scvpParam = NULL; - m_360scvpHandle = NULL; - m_tilesInViewport = NULL; - m_viewInfo = NULL; - m_tilesNumInViewport = 0; - m_finalViewportWidth = 0; - m_finalViewportHeight = 0; - m_videoWidth = 0; - m_videoHeight = 0; - m_tileInRow = 0; - m_tileInCol = 0; - m_tileWidth = 0; - m_tileHeight = 0; - m_tilesInfo = NULL; - m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; - m_packedPicWidth = 0; - m_packedPicHeight = 0; - m_origVPSNalu = NULL; - m_origSPSNalu = NULL; - m_origPPSNalu = NULL; - }; - - //! - //! \brief Destructor - //! - virtual ~OneVideoExtractorTrackGenerator(); - - //! - //! \brief Initialize the extractor track generator - //! for two resolutions video streams - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t Initialize(); - - //! - //! \brief Generate all extractor tracks - //! - //! \param [in] extractorTrackMap - //! pointer to extractor tracks map which holds - //! all extractor tracks - //! \param [in] streams - //! pointer to the media streams map set up in OmafPackage - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t GenerateExtractorTracks(std::map& extractorTrackMap, std::map *streams); - -private: - //! - //! \brief Calculate the total viewport number - //! according to the initial information - //! - //! \return uint16_t - //! the total viewport number - //! - virtual uint16_t CalculateViewportNum(); - - //! - //! \brief Fill the region wise packing information - //! for the specified viewport - //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] dstRwpk - //! pointer to the region wise packing information for the - //! specified viewport generated according to srcRwpk and - //! two resolutions tiles merging strategy - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t FillDstRegionWisePacking(uint8_t viewportIdx, RegionWisePacking *dstRwpk); - - //! - //! \brief Fill the tiles merging direction information - //! for the specified viewport - //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] tilesMergeDir - //! pointer to the tiles merging direction information - //! for the specified viewport generated according to - //! two resolutions tiles merging strategy - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t FillTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir); - - //! - //! \brief Fill the content coverage information - //! for the specified viewport - //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] dstCovi - //! pointer to the content coverage information for the - //! specified viewport generated according to srcCovi and - //! two resolutions tiles merging strategy - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t FillDstContentCoverage(uint8_t viewportIdx, ContentCoverage *dstCovi); - - //! - //! \brief Check the validation of initial information - //! input by library interface, like whether the - //! TilesMergingType is correct compared to actual - //! streams information, meanwhile fill the lacked - //! information according to actual streams information - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t CheckAndFillInitInfo(); - - //! - //! \brief Generate the new SPS for tiles merged bitstream - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t GenerateNewSPS(); - - //! - //! \brief Generate the new PPS for tiles merged bitstream - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t GenerateNewPPS(); - -private: - uint8_t *m_videoIdxInMedia; //!< pointer to index of video streams in media streams - param_360SCVP *m_360scvpParam; //!< 360SCVP library initial parameter - void *m_360scvpHandle; //!< 360SCVP library handle - TileDef *m_tilesInViewport; //!< the list of tiles inside the viewport - Param_ViewPortInfo *m_viewInfo; //!< pointer to the viewport information for 360SCVP library - int32_t m_tilesNumInViewport; //!< tiles number in viewport - int32_t m_finalViewportWidth; //!< the final viewport width calculated by 360SCVP library - int32_t m_finalViewportHeight; //!< the final viewport height calculated by 360SCVP library - - uint16_t m_videoWidth; //!< frame width of high resolution video stream - uint16_t m_videoHeight; //!< frame height of high resolution video stream - //uint16_t m_lowResWidth; - //uint16_t m_lowResHeight; - uint8_t m_tileInRow; //!< the number of high resolution tiles in one row in original picture - uint8_t m_tileInCol; //!< the number of high resolution tiles in one column in original picture - uint16_t m_tileWidth; //!< the width of high resolution tile - uint16_t m_tileHeight; //!< the height of high resolution tile - TileInfo *m_tilesInfo; //!< pointer to tile information of all tiles in high resolution video stream - VCD::OMAF::ProjectionFormat m_projType; //!< the projection type - uint32_t m_packedPicWidth; //!< the width of tiles merged picture - uint32_t m_packedPicHeight; //!< the height of tiles merged picture - Nalu *m_origVPSNalu; //!< the pointer to original VPS nalu of high resolution video stream - Nalu *m_origSPSNalu; //!< the pointer to original SPS nalu of high resolution video stream - Nalu *m_origPPSNalu; //!< the pointer to original PPS nalu of high resolution video stream -}; - -VCD_NS_END; -#endif /* _TWORESEXTRACTORTRACKGENERATOR_H_ */ diff --git a/src/VROmafPacking/RegionWisePackingGenerator.cpp b/src/VROmafPacking/RegionWisePackingGenerator.cpp new file mode 100644 index 00000000..443cd7e9 --- /dev/null +++ b/src/VROmafPacking/RegionWisePackingGenerator.cpp @@ -0,0 +1,320 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: RegionWisePackingGenerator.cpp +//! \brief: Region wise packing generator wrapper class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include + +#include "VideoStreamPluginAPI.h" +#include "RegionWisePackingGenerator.h" + +VCD_NS_BEGIN + +RegionWisePackingGenerator::RegionWisePackingGenerator() +{ + m_pluginHdl = NULL; + m_rwpkGen = NULL; +} + +RegionWisePackingGenerator::~RegionWisePackingGenerator() +{ + if (m_pluginHdl) + { + if (m_rwpkGen) + { + DestroyRWPKGenerator* destroyRWPKGen = NULL; + destroyRWPKGen = (DestroyRWPKGenerator*)dlsym(m_pluginHdl, "Destroy"); + const char *dlsym_error = dlerror(); + if (dlsym_error) + { + OMAF_LOG(LOG_ERROR, "Failed to load symbol Destroy !\n"); + return; + } + + if (!destroyRWPKGen) + { + OMAF_LOG(LOG_ERROR, "NULL RWPK destructor !\n"); + return; + } + destroyRWPKGen(m_rwpkGen); + } + + dlclose(m_pluginHdl); + m_pluginHdl = NULL; + } +} + +int32_t RegionWisePackingGenerator::Initialize( + const char *rwpkGenPluginPath, + const char *rwpkGenPluginName, + std::map *streams, + uint8_t *videoIdxInMedia, + uint16_t tilesNumInViewport, + uint16_t maxSelectedTilesNum, + LogFunction logging) +{ + if (!streams || !videoIdxInMedia) + return OMAF_ERROR_NULL_PTR; + + if (!rwpkGenPluginName) + { + OMAF_LOG(LOG_ERROR, "NULL OMAF DASH Packing plugin name !\n"); + return OMAF_INVALID_PLUGIN_PARAM; + } + + if (!rwpkGenPluginPath) + rwpkGenPluginPath = "./"; + + int32_t ret = ERROR_NONE; + + uint32_t pathLen = strlen(rwpkGenPluginPath); + + char pluginLibName[1024]; + memset_s(pluginLibName, 1024, 0); + if (rwpkGenPluginPath[pathLen - 1] == '/') + { + snprintf(pluginLibName, 1024, "%slib%s.so", rwpkGenPluginPath, rwpkGenPluginName); + } + else + { + snprintf(pluginLibName, 1024, "%s/lib%s.so", rwpkGenPluginPath, rwpkGenPluginName); + } + + OMAF_LOG(LOG_INFO, "The plugin is %s\n", pluginLibName); + + m_pluginHdl = dlopen(pluginLibName, RTLD_LAZY); + const char* dlsymErr1 = dlerror(); + if (!m_pluginHdl) + { + OMAF_LOG(LOG_ERROR, "Failed to open plugin lib %s\n", pluginLibName); + if (dlsymErr1) + { + OMAF_LOG(LOG_ERROR, "Get error msg %s\n", dlsymErr1); + } + return OMAF_ERROR_DLOPEN; + } + + CreateRWPKGenerator* createRWPKGen = NULL; + createRWPKGen = (CreateRWPKGenerator*)dlsym(m_pluginHdl, "Create"); + const char* dlsymErr2 = dlerror(); + if (dlsymErr2) + { + OMAF_LOG(LOG_ERROR, "Failed to load symbol Create: %s\n", dlsymErr2); + return OMAF_ERROR_DLSYM; + } + + if (!createRWPKGen) + { + OMAF_LOG(LOG_ERROR, "NULL RWPK generator !\n"); + return OMAF_ERROR_NULL_PTR; + } + m_rwpkGen = createRWPKGen(); + if (!m_rwpkGen) + { + OMAF_LOG(LOG_ERROR, "Failed to create RWPK generator !\n"); + return OMAF_ERROR_NULL_PTR; + } + + std::map videoStreams; + std::map::iterator itStr; + for (itStr = streams->begin(); itStr != streams->end(); itStr++) + { + MediaStream *str = itStr->second; + if (str->GetMediaType() == VIDEOTYPE) + { + VideoStream *vs = (VideoStream*)str; + VideoStreamInfo *vsInfo = new VideoStreamInfo; + if (!vsInfo) + { + std::map::iterator itInfo; + for (itInfo = videoStreams.begin(); itInfo != videoStreams.end();) + { + VideoStreamInfo *info = itInfo->second; + DELETE_MEMORY(info); + videoStreams.erase(itInfo++); + } + videoStreams.clear(); + return OMAF_ERROR_NULL_PTR; + } + + vsInfo->tilesNumInRow = vs->GetTileInRow(); + vsInfo->tilesNumInCol = vs->GetTileInCol(); + vsInfo->srcRWPK = vs->GetSrcRwpk(); + + videoStreams.insert(std::make_pair(itStr->first, vsInfo)); + } + } + + ret = m_rwpkGen->Initialize( + &videoStreams, videoIdxInMedia, + tilesNumInViewport, maxSelectedTilesNum, (void*)logging); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to initialize RWPK generator !\n"); + } + + std::map::iterator itInfo1; + for (itInfo1 = videoStreams.begin(); itInfo1 != videoStreams.end();) + { + VideoStreamInfo *info1 = itInfo1->second; + DELETE_MEMORY(info1); + videoStreams.erase(itInfo1++); + } + videoStreams.clear(); + + return ret; +} + +int32_t RegionWisePackingGenerator::GenerateDstRwpk( + TileDef *tilesInViewport, + RegionWisePacking *dstRwpk) +{ + int32_t ret = ERROR_NONE; + if (m_rwpkGen) + { + ret = m_rwpkGen->GenerateDstRwpk(tilesInViewport, dstRwpk); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to generate destinate RWPK !\n"); + } + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + ret = OMAF_ERROR_NULL_PTR; + } + return ret; +} + +int32_t RegionWisePackingGenerator::GenerateTilesMergeDirection( + TileDef *tilesInViewport, + TilesMergeDirectionInCol *tilesMergeDir) +{ + int32_t ret = ERROR_NONE; + if (m_rwpkGen) + { + ret = m_rwpkGen->GenerateTilesMergeDirection(tilesInViewport, tilesMergeDir); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to generate tiles merge direction !\n"); + } + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + ret = OMAF_ERROR_NULL_PTR; + } + return ret; +} + +uint32_t RegionWisePackingGenerator::GetTotalTilesNumInPackedPic() +{ + uint32_t num = 0; + if (m_rwpkGen) + { + num = m_rwpkGen->GetTilesNumInPackedPic(); + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + } + + return num; +} + +uint32_t RegionWisePackingGenerator::GetPackedPicWidth() +{ + uint32_t width = 0; + if (m_rwpkGen) + { + width = m_rwpkGen->GetPackedPicWidth(); + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + } + + return width; +} + +uint32_t RegionWisePackingGenerator::GetPackedPicHeight() +{ + uint32_t height = 0; + if (m_rwpkGen) + { + height = m_rwpkGen->GetPackedPicHeight(); + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + } + + return height; +} + +TileArrangement* RegionWisePackingGenerator::GetMergedTilesArrange() +{ + TileArrangement *tilesArr = NULL; + if (m_rwpkGen) + { + tilesArr = m_rwpkGen->GetMergedTilesArrange(); + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + } + + return tilesArr; +} + +int32_t RegionWisePackingGenerator::GenerateMergedTilesArrange(TileDef *tilesInViewport) +{ + if (!tilesInViewport) + return OMAF_ERROR_NULL_PTR; + + int32_t ret = ERROR_NONE; + if (m_rwpkGen) + { + ret = m_rwpkGen->GenerateMergedTilesArrange(tilesInViewport); + if (ret) + { + OMAF_LOG(LOG_ERROR, "Failed to generate merged tiles arrangement !\n"); + } + } + else + { + OMAF_LOG(LOG_ERROR, "There is no RWPK generator !\n"); + ret = OMAF_ERROR_NULL_PTR; + } + return ret; +} + +VCD_NS_END diff --git a/src/VROmafPacking/RegionWisePackingGenerator.h b/src/VROmafPacking/RegionWisePackingGenerator.h index cfba07f4..d671e039 100644 --- a/src/VROmafPacking/RegionWisePackingGenerator.h +++ b/src/VROmafPacking/RegionWisePackingGenerator.h @@ -26,7 +26,7 @@ //! //! \file: RegionWisePackingGenerator.h -//! \brief: Region wise packing generator base class definition +//! \brief: Region wise packing generator wrapper class definition //! \detail: Define the basic operation of region wise packing generator. //! //! Created on April 30, 2019, 6:04 AM @@ -40,45 +40,12 @@ #include "OmafPackingCommon.h" #include "VROmafPacking_data.h" -#include "definitions.h" +#include "VROmafPacking_def.h" #include "MediaStream.h" +#include "OMAFPackingPluginAPI.h" VCD_NS_BEGIN -//! -//! \struct: SingleTile -//! \brief: define tile information for tiles merging -//! -struct SingleTile -{ - uint8_t streamIdxInMedia; //the index of video stream in all media streams - uint8_t origTileIdx; //the index of tile in original video frame - uint16_t dstCTUIndex; //the index of first CTU of tile in merged video frame -}; - -using TilesInRow = std::list; -using TilesInCol = std::list; - -//! -//! \struct: TilesMergeDirectionInRow -//! \brief: define tiles merging direction information -//! constructed in tile row -//! -struct TilesMergeDirectionInRow -{ - std::list tilesArrangeInRow; -}; - -//! -//! \struct: TilesMergeDirectionInCol -//! \brief: define tiles merging direction information -//! constructed in tile column -//! -struct TilesMergeDirectionInCol -{ - std::list tilesArrangeInCol; -}; - //! //! \class RegionWisePackingGenerator //! \brief Define the basic operation of region wise packing generator @@ -90,51 +57,48 @@ class RegionWisePackingGenerator //! //! \brief Constructor //! - RegionWisePackingGenerator() - { - m_packedPicWidth = 0; - m_packedPicHeight = 0; - m_mergedTilesArrange = NULL; - }; + RegionWisePackingGenerator(); //! //! \brief Destructor //! - virtual ~RegionWisePackingGenerator() {}; + ~RegionWisePackingGenerator(); //! //! \brief Initialize the region wise packing generator //! + //! \param [in] rwpkGenPluginPath + //! pointer to the OMAF packing plugin path + //! \param [in] rwpkGenPluginName + //! pointer to the OMAF packing plugin name //! \param [in] streams //! pointer to the media streams map set up in OmafPackage //! \param [in] videoIdxInMedia //! pointer to the index of each video in media streams //! \param [in] tilesNumInViewport //! the number of tiles in viewport - //! \param [in] tilesInViewport - //! pointer to tile information of all tiles in viewport - //! \param [in] finalViewportWidth - //! the final viewport width calculated by 360SCVP library - //! \param [in] finalViewportHeight - //! the final viewport height calculated by 360SCVP library + //! \param [in] maxSelectedTilesNum + //! the maxmum selected tiles number in viewport //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t Initialize( + int32_t Initialize( + const char *rwpkGenPluginPath, + const char *rwpkGenPluginName, std::map *streams, uint8_t *videoIdxInMedia, - uint8_t tilesNumInViewport, - TileDef *tilesInViewport, - int32_t finalViewportWidth, - int32_t finalViewportHeight) = 0; + uint16_t tilesNumInViewport, + uint16_t maxSelectedTilesNum, + LogFunction logging); //! //! \brief Generate the region wise packing information for //! specified viewport //! - //! \param [in] viewportIdx - //! the index of specified viewport + //! \param [in] tilesInViewport + //! the pointer to all tiles information in packed + //! sub-picture //! \param [out] dstRwpk //! pointer to the region wise packing information for //! the specified viewport @@ -142,16 +106,17 @@ class RegionWisePackingGenerator //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t GenerateDstRwpk( - uint8_t viewportIdx, - RegionWisePacking *dstRwpk) = 0; + int32_t GenerateDstRwpk( + TileDef *tilesInViewport, + RegionWisePacking *dstRwpk); //! //! \brief Generate the tiles merging direction information for //! specified viewport //! - //! \param [in] viewportIdx - //! the index of specified viewport + //! \param [in] tilesInViewport + //! the pointer to all tiles information in packed + //! sub-picture //! \param [out] tilesMergeDir //! pointer to the tiles merging direction information for //! the specified viewport @@ -159,25 +124,9 @@ class RegionWisePackingGenerator //! \return int32_t //! ERROR_NONE if success, else failed reason //! - virtual int32_t GenerateTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir) = 0; - - //! - //! \brief Get the number of tiles in one row in viewport - //! - //! \return uint8_t - //! the number of tiles in one row in viewport - //! - virtual uint8_t GetTilesNumInViewportRow() = 0; - - //! - //! \brief Get the number of tile rows in viewport - //! - //! \return uint8_t - //! the number of tile rows in viewport - //! - virtual uint8_t GetTileRowNumInViewport() = 0; + int32_t GenerateTilesMergeDirection( + TileDef *tilesInViewport, + TilesMergeDirectionInCol *tilesMergeDir); //! //! \brief Get the width of tiles merged picture @@ -185,7 +134,7 @@ class RegionWisePackingGenerator //! \return uint32_t //! the width of tiles merged picture //! - uint32_t GetPackedPicWidth() { return m_packedPicWidth; }; + uint32_t GetPackedPicWidth(); //! //! \brief Get the height of tiles merged picture @@ -193,7 +142,7 @@ class RegionWisePackingGenerator //! \return uint32_t //! the height of tiles merged picture //! - uint32_t GetPackedPicHeight() { return m_packedPicHeight; }; + uint32_t GetPackedPicHeight(); //! //! \brief Get the tiles arrangement information in tiles @@ -202,13 +151,15 @@ class RegionWisePackingGenerator //! \return TileArrangement* //! the pointer to the tiles arrangement information //! - TileArrangement* GetMergedTilesArrange() { return m_mergedTilesArrange; }; + TileArrangement* GetMergedTilesArrange(); + + int32_t GenerateMergedTilesArrange(TileDef *tilesInViewport); + + uint32_t GetTotalTilesNumInPackedPic(); protected: - std::map m_rwpkMap; //!< map of original region wise packing information of all video streams - uint32_t m_packedPicWidth; //!< the width of tiles merged picture - uint32_t m_packedPicHeight; //!< the height of tiles merged picture - TileArrangement *m_mergedTilesArrange; //!< pointer to the tiles arrangement information + void *m_pluginHdl; //!< pointer to OMAF packing plugin handle + RegionWisePackingGeneratorBase *m_rwpkGen; //!< pointer to detailed RWPK generator class instance corresponding to selected plugin }; VCD_NS_END; diff --git a/src/VROmafPacking/Segmentation.cpp b/src/VROmafPacking/Segmentation.cpp index d3d41591..47e7e5fe 100644 --- a/src/VROmafPacking/Segmentation.cpp +++ b/src/VROmafPacking/Segmentation.cpp @@ -57,8 +57,34 @@ Segmentation::Segmentation( m_mpdGen = NULL; m_trackIdStarter = 1; - m_frameRate.num = 0; - m_frameRate.den = 0; + m_frameRate.num = initInfo->bsBuffers[0].frameRate.num; + m_frameRate.den = initInfo->bsBuffers[0].frameRate.den; +} + +Segmentation::Segmentation(const Segmentation& src) +{ + m_streamMap = std::move(src.m_streamMap); + m_extractorTrackMan = std::move(src.m_extractorTrackMan); + m_segInfo = std::move(src.m_segInfo); + + m_mpdGen = std::move(src.m_mpdGen); + m_trackIdStarter = src.m_trackIdStarter; + m_frameRate.num = src.m_frameRate.num; + m_frameRate.den = src.m_frameRate.den; +} + +Segmentation& Segmentation::operator=(Segmentation&& other) +{ + m_streamMap = std::move(other.m_streamMap); + m_extractorTrackMan = std::move(other.m_extractorTrackMan); + m_segInfo = std::move(other.m_segInfo); + + m_mpdGen = std::move(other.m_mpdGen); + m_trackIdStarter = other.m_trackIdStarter; + m_frameRate.num = other.m_frameRate.num; + m_frameRate.den = other.m_frameRate.den; + + return *this; } Segmentation::~Segmentation() diff --git a/src/VROmafPacking/Segmentation.h b/src/VROmafPacking/Segmentation.h index ba6b6f87..9e3b555f 100644 --- a/src/VROmafPacking/Segmentation.h +++ b/src/VROmafPacking/Segmentation.h @@ -70,6 +70,10 @@ class Segmentation //! Segmentation(std::map *streams, ExtractorTrackManager *extractorTrackMan, InitialInfo *initInfo); + Segmentation(const Segmentation& src); + + Segmentation& operator=(Segmentation&& other); + //! //! \brief Destructor //! @@ -86,13 +90,31 @@ class Segmentation //! //! \brief End the segmentation process for - //! all media streams + //! all video streams //! //! \return int32_t //! ERROR_NONE if success, else failed reason //! virtual int32_t VideoEndSegmentation() = 0; + //! + //! \brief Execute the segmentation process for + //! all audio streams + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + virtual int32_t AudioSegmentation() = 0; + + //! + //! \brief End the segmentation process for + //! all audio streams + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + virtual int32_t AudioEndSegmentation() = 0; + private: //! //! \brief Write povd box for segments, diff --git a/src/VROmafPacking/TwoResExtractorTrackGenerator.cpp b/src/VROmafPacking/TwoResExtractorTrackGenerator.cpp deleted file mode 100644 index 6d6fbe54..00000000 --- a/src/VROmafPacking/TwoResExtractorTrackGenerator.cpp +++ /dev/null @@ -1,537 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: TwoResExtractorTrackGenerator.cpp -//! \brief: Two resolutions extractor track generator class implementation -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#include "TwoResExtractorTrackGenerator.h" -#include "VideoStream.h" -#include "TwoResRegionWisePackingGenerator.h" - -VCD_NS_BEGIN - -TwoResExtractorTrackGenerator::~TwoResExtractorTrackGenerator() -{ - DELETE_ARRAY(m_videoIdxInMedia); - DELETE_ARRAY(m_tilesInViewport); - DELETE_MEMORY(m_viewInfo); - DELETE_MEMORY(m_newSPSNalu); - DELETE_MEMORY(m_newPPSNalu); -} - -uint16_t TwoResExtractorTrackGenerator::CalculateViewportNum() -{ - if (!m_videoIdxInMedia) - return 0; - - std::map::iterator it; - it = m_streams->find(m_videoIdxInMedia[0]); - if (it == m_streams->end()) - return 0; - VideoStream *vs = (VideoStream*)(it->second); - uint8_t tileInRow = vs->GetTileInRow(); - uint8_t tileInCol = vs->GetTileInCol(); - uint16_t viewportNum = tileInRow * tileInCol; - - return viewportNum; -} - -int32_t TwoResExtractorTrackGenerator::FillDstRegionWisePacking( - uint8_t viewportIdx, - RegionWisePacking *dstRwpk) -{ - dstRwpk->projPicWidth = m_highResWidth; - dstRwpk->projPicHeight = m_highResHeight; - - int32_t ret = m_rwpkGen->GenerateDstRwpk(viewportIdx, dstRwpk); - if (ret) - return ret; - - m_packedPicWidth = m_rwpkGen->GetPackedPicWidth(); - m_packedPicHeight = m_rwpkGen->GetPackedPicHeight(); - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::FillTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir) -{ - if (!tilesMergeDir) - return OMAF_ERROR_NULL_PTR; - - int32_t ret = m_rwpkGen->GenerateTilesMergeDirection(viewportIdx, tilesMergeDir); - if (ret) - return ret; - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::FillDstContentCoverage( - uint8_t viewportIdx, - ContentCoverage *dstCovi) -{ - uint8_t tilesNumInViewRow = m_rwpkGen->GetTilesNumInViewportRow(); - uint8_t tileRowNumInView = m_rwpkGen->GetTileRowNumInViewport(); - - uint32_t projRegLeft = (viewportIdx % m_hrTileInRow) * m_hrTileWidth; - uint32_t projRegTop = (viewportIdx / m_hrTileInRow) * m_hrTileHeight; - uint32_t projRegWidth = 0; - uint32_t projRegHeight = 0; - - uint8_t viewIdxInRow = viewportIdx % m_hrTileInRow; - uint8_t viewIdxInCol = viewportIdx / m_hrTileInRow; - - if ((m_hrTileInRow - viewIdxInRow) >= tilesNumInViewRow) - { - for (uint8_t i = viewportIdx; i < (viewportIdx + tilesNumInViewRow); i++) - { - projRegWidth += m_tilesInfo[i].tileWidth; - } - } - else - { - for (uint8_t i = viewportIdx; i < (viewportIdx + (m_hrTileInRow - viewIdxInRow)); i++) - { - projRegWidth += m_tilesInfo[i].tileWidth; - } - for (uint8_t i = (viewIdxInCol*m_hrTileInRow); i < (viewIdxInCol*m_hrTileInRow + (tilesNumInViewRow-(m_hrTileInRow-viewIdxInRow))); i++) - { - projRegWidth += m_tilesInfo[i].tileWidth; - } - } - - if ((m_hrTileInCol - viewIdxInCol) >= tileRowNumInView) - { - for (uint8_t i = viewportIdx; i < (viewportIdx+m_hrTileInRow*tileRowNumInView); ) - { - projRegHeight += m_tilesInfo[i].tileHeight; - i += m_hrTileInRow; - } - } - else - { - for (uint8_t i = viewportIdx; i < (viewportIdx+(m_hrTileInCol-viewIdxInCol)*m_hrTileInRow);) - { - projRegHeight += m_tilesInfo[i].tileHeight; - i += m_hrTileInRow; - } - for (uint8_t i = viewIdxInRow; i < (viewIdxInRow+(tileRowNumInView-(m_hrTileInCol-viewIdxInCol))*m_hrTileInRow); ) - { - projRegHeight += m_tilesInfo[i].tileHeight; - i += m_hrTileInRow; - } - } - - if (m_projType == VCD::OMAF::ProjectionFormat::PF_ERP) - { - dstCovi->coverageShapeType = 1; - } - else - { - dstCovi->coverageShapeType = 0; - } - - dstCovi->numRegions = 1; - dstCovi->viewIdcPresenceFlag = false; - dstCovi->defaultViewIdc = 0; - - dstCovi->sphereRegions = new SphereRegion[dstCovi->numRegions]; - if (!dstCovi->sphereRegions) - return OMAF_ERROR_NULL_PTR; - - SphereRegion *sphereRegion = &(dstCovi->sphereRegions[0]); - memset(sphereRegion, 0, sizeof(SphereRegion)); - sphereRegion->viewIdc = 0; - sphereRegion->centreAzimuth = (int32_t)((((m_highResWidth / 2) - (float)(projRegLeft + projRegWidth / 2)) * 360 * 65536) / m_highResWidth); - sphereRegion->centreElevation = (int32_t)((((m_highResHeight / 2) - (float)(projRegTop + projRegHeight / 2)) * 180 * 65536) / m_highResHeight); - sphereRegion->centreTilt = 0; - sphereRegion->azimuthRange = (uint32_t)((projRegWidth * 360.f * 65536) / m_highResWidth); - sphereRegion->elevationRange = (uint32_t)((projRegHeight * 180.f * 65536) / m_highResHeight); - sphereRegion->interpolate = 0; - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::CheckAndFillInitInfo() -{ - if (!m_initInfo) - return OMAF_ERROR_NULL_PTR; - - if (m_initInfo->bsNumVideo != 2) - return OMAF_ERROR_VIDEO_NUM; - - uint8_t actualVideoNum = 0; - uint8_t totalStreamNum = m_initInfo->bsNumVideo + m_initInfo->bsNumAudio; - uint8_t vsIdx = 0; - m_videoIdxInMedia = new uint8_t[totalStreamNum]; - if (!m_videoIdxInMedia) - return OMAF_ERROR_NULL_PTR; - - for (uint8_t streamIdx = 0; streamIdx < totalStreamNum; streamIdx++) - { - BSBuffer *bs = &(m_initInfo->bsBuffers[streamIdx]); - if (bs->mediaType == VIDEOTYPE) - { - m_videoIdxInMedia[vsIdx] = streamIdx; - vsIdx++; - actualVideoNum++; - } - } - - if (actualVideoNum != m_initInfo->bsNumVideo) - return OMAF_ERROR_VIDEO_NUM; - - - std::map::iterator it; - it = m_streams->find(m_videoIdxInMedia[0]); - if (it == m_streams->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; - - VideoStream *vs1 = (VideoStream*)(it->second); - uint16_t width1 = vs1->GetSrcWidth(); - uint16_t height1 = vs1->GetSrcHeight(); - it = m_streams->find(m_videoIdxInMedia[1]); - if (it == m_streams->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; - - VideoStream *vs2 = (VideoStream*)(it->second); - uint16_t width2 = vs2->GetSrcWidth(); - uint16_t height2 = vs2->GetSrcHeight(); - - if ((width1 == width2) && (height1 == height2)) - return OMAF_ERROR_VIDEO_RESOLUTION; - - if ((width1 * height1) == (width2 * height2)) - return OMAF_ERROR_VIDEO_RESOLUTION; - - if ((width1 * height1) > (width2 * height2)) - { - (m_initInfo->viewportInfo)->inWidth = width1; - (m_initInfo->viewportInfo)->inHeight = height1; - (m_initInfo->viewportInfo)->tileInRow = vs1->GetTileInRow(); - (m_initInfo->viewportInfo)->tileInCol = vs1->GetTileInCol(); - (m_initInfo->viewportInfo)->outGeoType = 2; //viewport - (m_initInfo->viewportInfo)->inGeoType = vs1->GetProjType(); - - m_highResWidth = width1; - m_highResHeight = height1; - m_hrTileInRow = vs1->GetTileInRow(); - m_hrTileInCol = vs1->GetTileInCol(); - m_tilesInfo = vs1->GetAllTilesInfo(); - m_hrTileWidth = m_tilesInfo[0].tileWidth; - m_hrTileHeight = m_tilesInfo[0].tileHeight; - m_projType = (VCD::OMAF::ProjectionFormat)(vs1->GetProjType()); - m_lowResWidth = width2; - m_lowResHeight = height2; - } else { - (m_initInfo->viewportInfo)->inWidth = width2; - (m_initInfo->viewportInfo)->inHeight = height2; - (m_initInfo->viewportInfo)->tileInRow = vs2->GetTileInRow(); - (m_initInfo->viewportInfo)->tileInCol = vs2->GetTileInCol(); - (m_initInfo->viewportInfo)->outGeoType = 2; //viewport - (m_initInfo->viewportInfo)->inGeoType = vs2->GetProjType(); - - m_highResWidth = width2; - m_highResHeight = height2; - m_hrTileInRow = vs2->GetTileInRow(); - m_hrTileInCol = vs2->GetTileInCol(); - m_tilesInfo = vs2->GetAllTilesInfo(); - m_hrTileWidth = m_tilesInfo[0].tileWidth; - m_hrTileHeight = m_tilesInfo[0].tileHeight; - m_projType = (VCD::OMAF::ProjectionFormat)(vs2->GetProjType()); - m_lowResWidth = width1; - m_lowResHeight = height1; - - uint8_t tempIdx = m_videoIdxInMedia[1]; - m_videoIdxInMedia[1] = m_videoIdxInMedia[0]; - m_videoIdxInMedia[0] = tempIdx; //m_videoIdxInMedia[0] is always corresponding to the high resolution video stream - } - - if ((m_initInfo->segmentationInfo)->extractorTracksPerSegThread == 0) - { - if ((m_hrTileInRow * m_hrTileInCol) % 4 == 0) - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 4; - } - else if ((m_hrTileInRow * m_hrTileInCol) % 3 == 0) - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 3; - } - else if ((m_hrTileInRow * m_hrTileInCol) % 2 == 0) - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 2; - } - else - { - (m_initInfo->segmentationInfo)->extractorTracksPerSegThread = 1; - } - } - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::Initialize() -{ - if (!m_initInfo) - return OMAF_ERROR_NULL_PTR; - - int32_t ret = CheckAndFillInitInfo(); - if (ret) - return ret; - - std::map::iterator it; - it = m_streams->find(m_videoIdxInMedia[0]); //high resolution video stream - if (it == m_streams->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; - - VideoStream *vs = (VideoStream*)(it->second); - m_360scvpHandle = vs->Get360SCVPHandle(); - m_360scvpParam = vs->Get360SCVPParam(); - m_origVPSNalu = vs->GetVPSNalu(); - m_origSPSNalu = vs->GetSPSNalu(); - m_origPPSNalu = vs->GetPPSNalu(); - - m_tilesInViewport = new TileDef[1024]; - if (!m_tilesInViewport) - return OMAF_ERROR_NULL_PTR; - - m_viewInfo = new Param_ViewPortInfo; - if (!m_viewInfo) - return OMAF_ERROR_NULL_PTR; - - m_viewInfo->viewportWidth = (m_initInfo->viewportInfo)->viewportWidth; - m_viewInfo->viewportHeight = (m_initInfo->viewportInfo)->viewportHeight; - m_viewInfo->viewPortPitch = (m_initInfo->viewportInfo)->viewportPitch; - m_viewInfo->viewPortYaw = (m_initInfo->viewportInfo)->viewportYaw; - m_viewInfo->viewPortFOVH = (m_initInfo->viewportInfo)->horizontalFOVAngle; - m_viewInfo->viewPortFOVV = (m_initInfo->viewportInfo)->verticalFOVAngle; - m_viewInfo->geoTypeOutput = (EGeometryType)((m_initInfo->viewportInfo)->outGeoType); - m_viewInfo->geoTypeInput = (EGeometryType)((m_initInfo->viewportInfo)->inGeoType); - m_viewInfo->faceWidth = (m_initInfo->viewportInfo)->inWidth; - m_viewInfo->faceHeight = (m_initInfo->viewportInfo)->inHeight; - m_viewInfo->tileNumRow = (m_initInfo->viewportInfo)->tileInCol; - m_viewInfo->tileNumCol = (m_initInfo->viewportInfo)->tileInRow; - - ret = I360SCVP_SetParameter(m_360scvpHandle, ID_SCVP_PARAM_VIEWPORT, (void*)m_viewInfo); - if (ret) - return OMAF_ERROR_SCVP_SET_FAILED; - - m_360scvpParam->paramViewPort.viewportWidth = (m_initInfo->viewportInfo)->viewportWidth; - m_360scvpParam->paramViewPort.viewportHeight = (m_initInfo->viewportInfo)->viewportHeight; - m_360scvpParam->paramViewPort.viewPortPitch = (m_initInfo->viewportInfo)->viewportPitch; - m_360scvpParam->paramViewPort.viewPortYaw = (m_initInfo->viewportInfo)->viewportYaw; - m_360scvpParam->paramViewPort.viewPortFOVH = (m_initInfo->viewportInfo)->horizontalFOVAngle; - m_360scvpParam->paramViewPort.viewPortFOVV = (m_initInfo->viewportInfo)->verticalFOVAngle; - m_360scvpParam->paramViewPort.geoTypeOutput = (EGeometryType)((m_initInfo->viewportInfo)->outGeoType); - m_360scvpParam->paramViewPort.geoTypeInput = (EGeometryType)((m_initInfo->viewportInfo)->inGeoType); - m_360scvpParam->paramViewPort.faceWidth = (m_initInfo->viewportInfo)->inWidth; - m_360scvpParam->paramViewPort.faceHeight = (m_initInfo->viewportInfo)->inHeight; - m_360scvpParam->paramViewPort.tileNumRow = (m_initInfo->viewportInfo)->tileInCol; - m_360scvpParam->paramViewPort.tileNumCol = (m_initInfo->viewportInfo)->tileInRow; - ret = I360SCVP_process(m_360scvpParam, m_360scvpHandle); - if (ret) - return OMAF_ERROR_SCVP_PROCESS_FAILED; - - Param_ViewportOutput paramViewportOutput; - m_tilesNumInViewport = I360SCVP_getFixedNumTiles( - m_tilesInViewport, - ¶mViewportOutput, - m_360scvpHandle); - - m_finalViewportWidth = paramViewportOutput.dstWidthAlignTile; - m_finalViewportHeight = paramViewportOutput.dstHeightAlignTile; - - LOG(INFO) << "Calculated Viewport has width " << m_finalViewportWidth << " and height " << m_finalViewportHeight << " ! " << std::endl; - - if (!m_tilesNumInViewport || m_tilesNumInViewport > 1024) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - m_rwpkGen = new TwoResRegionWisePackingGenerator(); - if (!m_rwpkGen) - return OMAF_ERROR_NULL_PTR; - - ret = m_rwpkGen->Initialize(m_streams, m_videoIdxInMedia, - m_tilesNumInViewport, m_tilesInViewport, - m_finalViewportWidth, m_finalViewportHeight); - if (ret) - return ret; - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::GenerateExtractorTracks(std::map& extractorTrackMap, std::map *streams) -{ - if (!streams) - return OMAF_ERROR_NULL_PTR; - - m_viewportNum = CalculateViewportNum(); - if (!m_viewportNum) - return OMAF_ERROR_VIEWPORT_NUM; - - for (uint8_t i = 0; i < m_viewportNum; i++) - { - ExtractorTrack *extractorTrack = new ExtractorTrack(i, streams, (m_initInfo->viewportInfo)->inGeoType); - if (!extractorTrack) - { - std::map::iterator itET = extractorTrackMap.begin(); - for ( ; itET != extractorTrackMap.end(); ) - { - ExtractorTrack *extractorTrack1 = itET->second; - DELETE_MEMORY(extractorTrack1); - extractorTrackMap.erase(itET++); - } - extractorTrackMap.clear(); - return OMAF_ERROR_NULL_PTR; - } - - int32_t retInit = extractorTrack->Initialize(); - if (retInit) - { - LOG(ERROR) << "Failed to initialize extractor track !" << std::endl; - - std::map::iterator itET = extractorTrackMap.begin(); - for ( ; itET != extractorTrackMap.end(); ) - { - ExtractorTrack *extractorTrack1 = itET->second; - DELETE_MEMORY(extractorTrack1); - extractorTrackMap.erase(itET++); - } - extractorTrackMap.clear(); - DELETE_MEMORY(extractorTrack); - return retInit; - } - - FillDstRegionWisePacking(i, extractorTrack->GetRwpk()); - - FillTilesMergeDirection(i, extractorTrack->GetTilesMergeDir()); - - FillDstContentCoverage(i, extractorTrack->GetCovi()); - - extractorTrackMap.insert(std::make_pair(i, extractorTrack)); - } - - int32_t ret = GenerateNewSPS(); - if (ret) - return ret; - - ret = GenerateNewPPS(); - if (ret) - return ret; - - std::map::iterator it; - for (it = extractorTrackMap.begin(); it != extractorTrackMap.end(); it++) - { - ExtractorTrack *extractorTrack = it->second; - //extractorTrack->SetVPS(m_origVPSNalu); - //extractorTrack->SetSPS(m_newSPSNalu); - //extractorTrack->SetPPS(m_newPPSNalu); - extractorTrack->SetNalu(m_origVPSNalu, extractorTrack->GetVPS()); - extractorTrack->SetNalu(m_newSPSNalu, extractorTrack->GetSPS()); - extractorTrack->SetNalu(m_newPPSNalu, extractorTrack->GetPPS()); - - PicResolution highRes = { m_highResWidth, m_highResHeight }; - PicResolution lowRes = { m_lowResWidth, m_lowResHeight }; - - std::list* picResList = extractorTrack->GetPicRes(); - picResList->push_back(highRes); - picResList->push_back(lowRes); - } - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::GenerateNewSPS() -{ - if (!m_packedPicWidth || !m_packedPicHeight) - return OMAF_ERROR_BAD_PARAM; - - if (!m_origSPSNalu || !m_360scvpParam || !m_360scvpHandle) - return OMAF_ERROR_NULL_PTR; - - if (!(m_origSPSNalu->data) || !(m_origSPSNalu->dataSize)) - return OMAF_ERROR_INVALID_SPS; - - m_newSPSNalu = new Nalu; - if (!m_newSPSNalu) - return OMAF_ERROR_NULL_PTR; - - m_newSPSNalu->data = new uint8_t[1024];//include start codes - if (!m_newSPSNalu->data) - return OMAF_ERROR_NULL_PTR; - - m_360scvpParam->pInputBitstream = m_origSPSNalu->data; - m_360scvpParam->inputBitstreamLen = m_origSPSNalu->dataSize; - m_360scvpParam->destWidth = m_packedPicWidth; - m_360scvpParam->destHeight = m_packedPicHeight; - m_360scvpParam->pOutputBitstream = m_newSPSNalu->data; - - int32_t ret = I360SCVP_GenerateSPS(m_360scvpParam, m_360scvpHandle); - if (ret) - return OMAF_ERROR_SCVP_OPERATION_FAILED; - - m_newSPSNalu->dataSize = m_360scvpParam->outputBitstreamLen; - m_newSPSNalu->startCodesSize = HEVC_STARTCODES_LEN; - m_newSPSNalu->naluType = HEVC_SPS_NALU_TYPE; - - return ERROR_NONE; -} - -int32_t TwoResExtractorTrackGenerator::GenerateNewPPS() -{ - TileArrangement *tileArray = m_rwpkGen->GetMergedTilesArrange(); - if (!tileArray) - return OMAF_ERROR_NULL_PTR; - - m_newPPSNalu = new Nalu; - if (!m_newPPSNalu) - return OMAF_ERROR_NULL_PTR; - - m_newPPSNalu->data = new uint8_t[1024];//include start codes - if (!m_newPPSNalu->data) - return OMAF_ERROR_NULL_PTR; - - m_360scvpParam->pInputBitstream = m_origPPSNalu->data; //includes start codes - m_360scvpParam->inputBitstreamLen = m_origPPSNalu->dataSize; - - m_360scvpParam->pOutputBitstream = m_newPPSNalu->data; - - int32_t ret = I360SCVP_GeneratePPS(m_360scvpParam, tileArray, m_360scvpHandle); - if (ret) - return OMAF_ERROR_SCVP_OPERATION_FAILED; - - m_newPPSNalu->dataSize = m_360scvpParam->outputBitstreamLen; - m_newPPSNalu->startCodesSize = HEVC_STARTCODES_LEN; - m_newPPSNalu->naluType = HEVC_PPS_NALU_TYPE; - - return ERROR_NONE; -} - -VCD_NS_END diff --git a/src/VROmafPacking/TwoResExtractorTrackGenerator.h b/src/VROmafPacking/TwoResExtractorTrackGenerator.h deleted file mode 100644 index 42ab0099..00000000 --- a/src/VROmafPacking/TwoResExtractorTrackGenerator.h +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: TwoResExtractorTrackGenerator.h -//! \brief: Two resolutions extractor track generator class definition -//! \detail: Define the operation of extractor track generator for two -//! resolutions video streams -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#ifndef _TWORESEXTRACTORTRACKGENERATOR_H_ -#define _TWORESEXTRACTORTRACKGENERATOR_H_ - -#include "ExtractorTrackGenerator.h" -#include "../utils/OmafStructure.h" - -VCD_NS_BEGIN - -//! -//! \class TwoResExtractorTrackGenerator -//! \brief Define the operation of extractor track generator -//! for two resolutions video streams -//! - -class TwoResExtractorTrackGenerator : public ExtractorTrackGenerator -{ -public: - //! - //! \brief Constructor - //! - TwoResExtractorTrackGenerator() - { - m_videoIdxInMedia = NULL; - m_360scvpParam = NULL; - m_360scvpHandle = NULL; - m_tilesInViewport = NULL; - m_viewInfo = NULL; - m_tilesNumInViewport = 0; - m_finalViewportWidth = 0; - m_finalViewportHeight = 0; - m_highResWidth = 0; - m_highResHeight = 0; - m_lowResWidth = 0; - m_lowResHeight = 0; - m_hrTileInRow = 0; - m_hrTileInCol = 0; - m_hrTileWidth = 0; - m_hrTileHeight = 0; - m_tilesInfo = NULL; - m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; - m_packedPicWidth = 0; - m_packedPicHeight = 0; - m_origVPSNalu = NULL; - m_origSPSNalu = NULL; - m_origPPSNalu = NULL; - }; - - //! - //! \brief Copy Constructor - //! - //! \param [in] initInfo - //! initial information input by the library interface - //! \param [in] streams - //! pointer to the media streams map set up in OmafPackage - //! - TwoResExtractorTrackGenerator(InitialInfo *initInfo, std::map *streams) : ExtractorTrackGenerator(initInfo, streams) - { - m_videoIdxInMedia = NULL; - m_360scvpParam = NULL; - m_360scvpHandle = NULL; - m_tilesInViewport = NULL; - m_viewInfo = NULL; - m_tilesNumInViewport = 0; - m_finalViewportWidth = 0; - m_finalViewportHeight = 0; - m_highResWidth = 0; - m_highResHeight = 0; - m_lowResWidth = 0; - m_lowResHeight = 0; - m_hrTileInRow = 0; - m_hrTileInCol = 0; - m_hrTileWidth = 0; - m_hrTileHeight = 0; - m_tilesInfo = NULL; - m_projType = VCD::OMAF::ProjectionFormat::PF_ERP; - m_packedPicWidth = 0; - m_packedPicHeight = 0; - m_origVPSNalu = NULL; - m_origSPSNalu = NULL; - m_origPPSNalu = NULL; - }; - - //! - //! \brief Destructor - //! - virtual ~TwoResExtractorTrackGenerator(); - - //! - //! \brief Initialize the extractor track generator - //! for two resolutions video streams - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t Initialize(); - - //! - //! \brief Generate all extractor tracks - //! - //! \param [in] extractorTrackMap - //! pointer to extractor tracks map which holds - //! all extractor tracks - //! \param [in] streams - //! pointer to the media streams map set up in OmafPackage - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t GenerateExtractorTracks(std::map& extractorTrackMap, std::map *streams); - -private: - //! - //! \brief Calculate the total viewport number - //! according to the initial information - //! - //! \return uint16_t - //! the total viewport number - //! - virtual uint16_t CalculateViewportNum(); - - //! - //! \brief Fill the region wise packing information - //! for the specified viewport - //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] dstRwpk - //! pointer to the region wise packing information for the - //! specified viewport generated according to srcRwpk and - //! two resolutions tiles merging strategy - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t FillDstRegionWisePacking(uint8_t viewportIdx, RegionWisePacking *dstRwpk); - - //! - //! \brief Fill the tiles merging direction information - //! for the specified viewport - //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] tilesMergeDir - //! pointer to the tiles merging direction information - //! for the specified viewport generated according to - //! two resolutions tiles merging strategy - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t FillTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir); - - //! - //! \brief Fill the content coverage information - //! for the specified viewport - //! - //! \param [in] viewportIdx - //! the index of the specified viewport - //! \param [in] dstCovi - //! pointer to the content coverage information for the - //! specified viewport generated according to srcCovi and - //! two resolutions tiles merging strategy - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t FillDstContentCoverage(uint8_t viewportIdx, ContentCoverage *dstCovi); - - //! - //! \brief Check the validation of initial information - //! input by library interface, like whether the - //! TilesMergingType is correct compared to actual - //! streams information, meanwhile fill the lacked - //! information according to actual streams information - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t CheckAndFillInitInfo(); - - //! - //! \brief Generate the new SPS for tiles merged bitstream - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t GenerateNewSPS(); - - //! - //! \brief Generate the new PPS for tiles merged bitstream - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - virtual int32_t GenerateNewPPS(); - -private: - uint8_t *m_videoIdxInMedia; //!< pointer to index of video streams in media streams - param_360SCVP *m_360scvpParam; //!< 360SCVP library initial parameter - void *m_360scvpHandle; //!< 360SCVP library handle - TileDef *m_tilesInViewport; //!< the list of tiles inside the viewport - Param_ViewPortInfo *m_viewInfo; //!< pointer to the viewport information for 360SCVP library - int32_t m_tilesNumInViewport; //!< tiles number in viewport - int32_t m_finalViewportWidth; //!< the final viewport width calculated by 360SCVP library - int32_t m_finalViewportHeight; //!< the final viewport height calculated by 360SCVP library - - uint16_t m_highResWidth; //!< frame width of high resolution video stream - uint16_t m_highResHeight; //!< frame height of high resolution video stream - uint16_t m_lowResWidth; - uint16_t m_lowResHeight; - uint8_t m_hrTileInRow; //!< the number of high resolution tiles in one row in original picture - uint8_t m_hrTileInCol; //!< the number of high resolution tiles in one column in original picture - uint16_t m_hrTileWidth; //!< the width of high resolution tile - uint16_t m_hrTileHeight; //!< the height of high resolution tile - TileInfo *m_tilesInfo; //!< pointer to tile information of all tiles in high resolution video stream - VCD::OMAF::ProjectionFormat m_projType; //!< the projection type - uint32_t m_packedPicWidth; //!< the width of tiles merged picture - uint32_t m_packedPicHeight; //!< the height of tiles merged picture - Nalu *m_origVPSNalu; //!< the pointer to original VPS nalu of high resolution video stream - Nalu *m_origSPSNalu; //!< the pointer to original SPS nalu of high resolution video stream - Nalu *m_origPPSNalu; //!< the pointer to original PPS nalu of high resolution video stream -}; - -VCD_NS_END; -#endif /* _TWORESEXTRACTORTRACKGENERATOR_H_ */ diff --git a/src/VROmafPacking/TwoResRegionWisePackingGenerator.cpp b/src/VROmafPacking/TwoResRegionWisePackingGenerator.cpp deleted file mode 100644 index cbe576fc..00000000 --- a/src/VROmafPacking/TwoResRegionWisePackingGenerator.cpp +++ /dev/null @@ -1,582 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: TwoResRegionWisePackingGenerator.cpp -//! \brief: Two resolutions region wise packing generator class implementation -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#include - -#include "TwoResRegionWisePackingGenerator.h" -#include "VideoStream.h" - -VCD_NS_BEGIN - -TwoResRegionWisePackingGenerator::TwoResRegionWisePackingGenerator() -{ - m_streamIdxInMedia[0] = 0; - m_streamIdxInMedia[1] = 0; - m_tilesNumInViewRow = 0; - m_tileRowNumInView = 0; - - m_origHRTilesInRow = 0; - m_origHRTilesInCol = 0; - m_highTileWidth = 0; - m_highTileHeight = 0; - m_origLRTilesInRow = 0; - m_origLRTilesInCol = 0; - m_lowTileWidth = 0; - m_lowTileHeight = 0; - - m_hrTilesInRow = 0; - m_hrTilesInCol = 0; - m_lrTilesInRow = 0; - m_lrTilesInCol = 0; - - m_highResTilesInView = new TilesMergeDirectionInRow; - if (!m_highResTilesInView) - return; - - m_mergedTilesArrange = new TileArrangement; - if (!m_mergedTilesArrange) - return; -} - -TwoResRegionWisePackingGenerator::~TwoResRegionWisePackingGenerator() -{ - if (m_highResTilesInView) - { - std::list::iterator itRow; - for (itRow = m_highResTilesInView->tilesArrangeInRow.begin(); itRow != m_highResTilesInView->tilesArrangeInRow.end();) - { - TilesInRow *tileRow = *itRow; - std::list::iterator itTile; - for (itTile = tileRow->begin(); itTile != tileRow->end();) - { - SingleTile *tile = *itTile; - DELETE_MEMORY(tile); - itTile = tileRow->erase(itTile); - } - - delete tileRow; - tileRow = NULL; - - itRow = m_highResTilesInView->tilesArrangeInRow.erase(itRow); - } - - delete m_highResTilesInView; - m_highResTilesInView = NULL; - } - - if (m_mergedTilesArrange) - { - DELETE_ARRAY(m_mergedTilesArrange->tileRowHeight); - DELETE_ARRAY(m_mergedTilesArrange->tileColWidth); - - delete m_mergedTilesArrange; - m_mergedTilesArrange = NULL; - } -} - -int32_t TwoResRegionWisePackingGenerator::GetOrigHighResTilesArrange( - uint8_t tilesNumInViewport, - TileDef *tilesInViewport, - int32_t finalViewportWidth, - int32_t finalViewportHeight) -{ - if (!tilesInViewport) - return OMAF_ERROR_NULL_PTR; - - if (!tilesNumInViewport) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - if (!finalViewportWidth || !finalViewportHeight) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - uint16_t tileRowNum = finalViewportHeight / m_highTileHeight; - uint16_t tileColNum = finalViewportWidth / m_highTileWidth; - - if (tileRowNum * tileColNum != tilesNumInViewport) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - for (uint16_t i = 0; i < tileRowNum; i++) - { - TilesInRow *currRow = new TilesInRow; - if (!currRow) - return OMAF_ERROR_NULL_PTR; - - for (uint16_t j = 0; j < tileColNum; j++) - { - SingleTile *tile = new SingleTile; - if (!tile) - { - DELETE_MEMORY(currRow); - return OMAF_ERROR_NULL_PTR; - } - - tile->streamIdxInMedia = m_streamIdxInMedia[0]; - - currRow->push_back(tile); - } - m_highResTilesInView->tilesArrangeInRow.push_back(currRow); - } -/* - TileDef *prevTileDef = NULL; - TilesInRow *currRow = NULL; - for (uint8_t tileIdx = 0; tileIdx < tilesNumInViewport; tileIdx++) - { - TileDef *tileDef = &(tilesInViewport[tileIdx]); - if (tileIdx == 0) - { - SingleTile *tile = new SingleTile; - if (!tile) - return OMAF_ERROR_NULL_PTR; - - tile->streamIdxInMedia = m_streamIdxInMedia[0]; - - TilesInRow *tilesRow = new TilesInRow; - if (!tilesRow) - return OMAF_ERROR_NULL_PTR; - - tilesRow->push_back(tile); - - prevTileDef = tileDef; - currRow = tilesRow; - } - else - { - if ((tileDef->y) == (prevTileDef->y)) - { - SingleTile *tile = new SingleTile; - if (!tile) - return OMAF_ERROR_NULL_PTR; - - tile->streamIdxInMedia = m_streamIdxInMedia[0]; - - currRow->push_back(tile); - prevTileDef = tileDef; - } - else - { - m_highResTilesInView->tilesArrangeInRow.push_back(currRow); - - SingleTile *tile = new SingleTile; - if (!tile) - return OMAF_ERROR_NULL_PTR; - - tile->streamIdxInMedia = m_streamIdxInMedia[0]; - - TilesInRow *tilesRow = new TilesInRow; - if (!tilesRow) - return OMAF_ERROR_NULL_PTR; - - tilesRow->push_back(tile); - - prevTileDef = tileDef; - currRow = tilesRow; - } - } - if ((tileIdx + 1) == tilesNumInViewport) - { - m_highResTilesInView->tilesArrangeInRow.push_back(currRow); - } - } -*/ - return ERROR_NONE; -} - -static uint32_t gcd(uint32_t a, uint32_t b) -{ - for ( ; ; ) - { - if (a == 0) return b; - b %= a; - if (b == 0) return a; - a %= b; - } -} - -static uint32_t lcm(uint32_t a, uint32_t b) -{ - uint32_t temp = gcd(a, b); - - return temp ? (a / temp * b) : 0; -} - -int32_t TwoResRegionWisePackingGenerator::GenerateMergedTilesArrange() -{ - uint16_t highResTilesNum = m_tilesNumInViewRow * m_tileRowNumInView; - uint16_t lowResTilesNum = m_origLRTilesInRow * m_origLRTilesInCol; - - uint32_t height = 0; - uint8_t tilesNumInHeight = 0; - - height = lcm(m_highTileHeight, m_lowTileHeight); - uint16_t sqrtH = (uint16_t)sqrt(highResTilesNum); - while(sqrtH && highResTilesNum%sqrtH) { sqrtH--; } - - tilesNumInHeight = height / m_highTileHeight; - tilesNumInHeight = lcm(tilesNumInHeight, sqrtH); - height = tilesNumInHeight * m_highTileHeight; - - if (height == 0 || - tilesNumInHeight == 0 || - height % m_lowTileHeight || - highResTilesNum % tilesNumInHeight || - lowResTilesNum % (height / m_lowTileHeight)) - return OMAF_ERROR_UNDEFINED_OPERATION; - - m_hrTilesInCol = tilesNumInHeight; - m_hrTilesInRow = highResTilesNum / m_hrTilesInCol; - m_lrTilesInCol = height / m_lowTileHeight; - m_lrTilesInRow = lowResTilesNum / m_lrTilesInCol; - - m_mergedTilesArrange->tileRowsNum = 1; - m_mergedTilesArrange->tileColsNum = m_hrTilesInRow + m_lrTilesInRow; - m_mergedTilesArrange->tileRowHeight = new uint16_t[m_mergedTilesArrange->tileRowsNum]; - if (!(m_mergedTilesArrange->tileRowHeight)) - return OMAF_ERROR_NULL_PTR; - - m_mergedTilesArrange->tileRowHeight[0] = height; - - m_mergedTilesArrange->tileColWidth = new uint16_t[m_mergedTilesArrange->tileColsNum]; - if (!(m_mergedTilesArrange->tileColWidth)) - return OMAF_ERROR_NULL_PTR; - -#define LCU_SIZE 64 - - for (uint8_t i = 0; i < m_mergedTilesArrange->tileColsNum; i++) - { - if (i < m_hrTilesInRow) - { - m_mergedTilesArrange->tileColWidth[i] = m_highTileWidth / LCU_SIZE; - } - else - { - m_mergedTilesArrange->tileColWidth[i] = m_lowTileWidth / LCU_SIZE; - } - } - - return ERROR_NONE; -} - -int32_t TwoResRegionWisePackingGenerator::Initialize( - std::map *streams, - uint8_t *videoIdxInMedia, - uint8_t tilesNumInViewport, - TileDef *tilesInViewport, - int32_t finalViewportWidth, - int32_t finalViewportHeight) -{ - if (!streams || !videoIdxInMedia || !tilesInViewport) - return OMAF_ERROR_NULL_PTR; - - uint8_t videoStreamIdx = 0; - std::map::iterator it; - it = streams->find(videoIdxInMedia[0]); - if (it == streams->end()) - return OMAF_ERROR_STREAM_NOT_FOUND; - - VideoStream *vs1 = (VideoStream*)(it->second); - m_origHRTilesInRow = vs1->GetTileInRow(); - m_origHRTilesInCol = vs1->GetTileInCol(); - RegionWisePacking *rwpk1 = vs1->GetSrcRwpk(); - m_rwpkMap.insert(std::make_pair(videoStreamIdx, rwpk1)); - RectangularRegionWisePacking *rectRwpk = &(rwpk1->rectRegionPacking[0]); - m_highTileWidth = rectRwpk->projRegWidth; - m_highTileHeight = rectRwpk->projRegHeight; - - videoStreamIdx++; - it = streams->find(videoIdxInMedia[1]); - VideoStream *vs2 = (VideoStream*)(it->second); - m_origLRTilesInRow = vs2->GetTileInRow(); - m_origLRTilesInCol = vs2->GetTileInCol(); - RegionWisePacking *rwpk2 = vs2->GetSrcRwpk(); - m_rwpkMap.insert(std::make_pair(videoStreamIdx, rwpk2)); - rectRwpk = &(rwpk2->rectRegionPacking[0]); - m_lowTileWidth = rectRwpk->projRegWidth; - m_lowTileHeight = rectRwpk->projRegHeight; - - m_streamIdxInMedia[0] = videoIdxInMedia[0]; - m_streamIdxInMedia[1] = videoIdxInMedia[1]; - - int32_t ret = GetOrigHighResTilesArrange( - tilesNumInViewport, - tilesInViewport, - finalViewportWidth, - finalViewportHeight); - if (ret) - return ret; - - m_tileRowNumInView = m_highResTilesInView->tilesArrangeInRow.size(); - if (!m_tileRowNumInView) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - TilesInRow *tilesRow = m_highResTilesInView->tilesArrangeInRow.front(); - m_tilesNumInViewRow = tilesRow->size(); - if (!m_tilesNumInViewRow) - return OMAF_ERROR_SCVP_INCORRECT_RESULT; - - ret = GenerateMergedTilesArrange(); - if (ret) - return ret; - - return ERROR_NONE; -} - -int32_t TwoResRegionWisePackingGenerator::GenerateTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir) -{ - if (!tilesMergeDir) - return OMAF_ERROR_NULL_PTR; - - uint8_t highTilesNum = m_tilesNumInViewRow * m_tileRowNumInView; - uint8_t *highTilesIdx = new uint8_t[highTilesNum]; - if (!highTilesIdx) - return OMAF_ERROR_NULL_PTR; - - highTilesIdx[0] = viewportIdx; - for (uint8_t i = 1; i < highTilesNum; i++) - { - if (i % m_tilesNumInViewRow) - { - highTilesIdx[i] = highTilesIdx[i-1] + 1; - if (highTilesIdx[i] >= (highTilesIdx[i-1] / m_origHRTilesInRow + 1) * m_origHRTilesInRow) - { - highTilesIdx[i] = highTilesIdx[i] - m_origHRTilesInRow; - } - } - else - { - //highTilesIdx[i] = highTilesIdx[i-1] + m_origHRTilesInRow - (m_tilesNumInViewRow - 1); - highTilesIdx[i] = (i / m_tilesNumInViewRow) * m_origHRTilesInRow + highTilesIdx[0]; - if (highTilesIdx[i] >= m_origHRTilesInRow * m_origHRTilesInCol) - { - highTilesIdx[i] = highTilesIdx[i] - m_origHRTilesInRow * m_origHRTilesInCol; - } - } - } -#define LCU_SIZE 64 - - uint8_t tileColsNum = m_hrTilesInRow + m_lrTilesInRow; - uint16_t tileIdx = 0; - for (uint8_t i = 0; i < tileColsNum; i++) - { - TilesInCol *tileCol = new TilesInCol; - if (!tileCol) - { - DELETE_ARRAY(highTilesIdx); - return OMAF_ERROR_NULL_PTR; - } - - if (i < m_hrTilesInRow) - { - for (uint8_t j = 0; j < m_hrTilesInCol; j++) - { - SingleTile *tile = new SingleTile; - if (!tile) - { - DELETE_MEMORY(tileCol); - DELETE_ARRAY(highTilesIdx); - return OMAF_ERROR_NULL_PTR; - } - - tile->streamIdxInMedia = m_streamIdxInMedia[0]; - tile->origTileIdx = highTilesIdx[(tileIdx % m_hrTilesInCol) * m_hrTilesInRow + tileIdx / m_hrTilesInCol]; - tile->dstCTUIndex = (tileIdx % m_hrTilesInCol) * - (m_highTileHeight / LCU_SIZE) * - (m_packedPicWidth / LCU_SIZE) + - (tileIdx / m_hrTilesInCol) * - (m_highTileWidth / LCU_SIZE); - - tileCol->push_back(tile); - - tileIdx++; - } - tilesMergeDir->tilesArrangeInCol.push_back(tileCol); - } - else - { - for (uint8_t j = 0; j < m_lrTilesInCol; j++) - { - SingleTile *tile = new SingleTile; - if (!tile) - { - DELETE_MEMORY(tileCol); - DELETE_ARRAY(highTilesIdx); - return OMAF_ERROR_NULL_PTR; - } - - tile->streamIdxInMedia = m_streamIdxInMedia[1]; - tile->origTileIdx = tileIdx - highTilesNum; - tile->dstCTUIndex = ((tileIdx - highTilesNum) % m_lrTilesInCol) * - (m_lowTileHeight / LCU_SIZE) * - (m_packedPicWidth / LCU_SIZE) + - ((tileIdx - highTilesNum) / m_lrTilesInCol) * - (m_lowTileWidth / LCU_SIZE) + - m_hrTilesInRow * (m_highTileWidth / LCU_SIZE); - - tileCol->push_back(tile); - - tileIdx++; - } - tilesMergeDir->tilesArrangeInCol.push_back(tileCol); - } - } - - DELETE_ARRAY(highTilesIdx); - - return ERROR_NONE; -} - -int32_t TwoResRegionWisePackingGenerator::GenerateDstRwpk( - uint8_t viewportIdx, - RegionWisePacking *dstRwpk) -{ - if (!dstRwpk) - return OMAF_ERROR_NULL_PTR; - - dstRwpk->constituentPicMatching = 0; - dstRwpk->numRegions = m_tilesNumInViewRow * m_tileRowNumInView + m_origLRTilesInRow * m_origLRTilesInCol; - - dstRwpk->packedPicWidth = m_highTileWidth * m_hrTilesInRow + m_lowTileWidth * m_lrTilesInRow; - dstRwpk->packedPicHeight = m_highTileHeight * m_hrTilesInCol; - - - m_packedPicWidth = dstRwpk->packedPicWidth; - m_packedPicHeight = dstRwpk->packedPicHeight; - - dstRwpk->rectRegionPacking = new RectangularRegionWisePacking[dstRwpk->numRegions]; - if (!(dstRwpk->rectRegionPacking)) - return OMAF_ERROR_NULL_PTR; - - uint8_t highTilesNum = m_tilesNumInViewRow * m_tileRowNumInView; - uint8_t *highTilesIdx = new uint8_t[highTilesNum]; - if (!highTilesIdx) - return OMAF_ERROR_NULL_PTR; - - highTilesIdx[0] = viewportIdx; - for (uint8_t i = 1; i < highTilesNum; i++) - { - if (i % m_tilesNumInViewRow) - { - highTilesIdx[i] = highTilesIdx[i-1] + 1; - if (highTilesIdx[i] >= (highTilesIdx[i-1] / m_origHRTilesInRow + 1) * m_origHRTilesInRow) - { - highTilesIdx[i] = highTilesIdx[i] - m_origHRTilesInRow; - } - } - else - { - highTilesIdx[i] = (i / m_tilesNumInViewRow) * m_origHRTilesInRow + highTilesIdx[0]; - if (highTilesIdx[i] >= m_origHRTilesInRow * m_origHRTilesInCol) - { - highTilesIdx[i] = highTilesIdx[i] - m_origHRTilesInRow * m_origHRTilesInCol; - } - } - } - - std::map::iterator it; - it = m_rwpkMap.find(0); - if (it == m_rwpkMap.end()) - { - DELETE_ARRAY(highTilesIdx); - return OMAF_ERROR_STREAM_NOT_FOUND; - } - RegionWisePacking *rwpkHighRes = it->second; - it = m_rwpkMap.find(1); - if (it == m_rwpkMap.end()) - { - DELETE_ARRAY(highTilesIdx); - return OMAF_ERROR_STREAM_NOT_FOUND; - } - RegionWisePacking *rwpkLowRes = it->second; - - for (uint8_t regionIdx = 0; regionIdx < dstRwpk->numRegions; regionIdx++) - { - RectangularRegionWisePacking *rwpk = &(dstRwpk->rectRegionPacking[regionIdx]); - memset(rwpk, 0, sizeof(RectangularRegionWisePacking)); - rwpk->transformType = 0; - rwpk->guardBandFlag = false; - if (regionIdx < highTilesNum) - { - RectangularRegionWisePacking *rectRwpkHigh = &(rwpkHighRes->rectRegionPacking[highTilesIdx[(regionIdx % m_hrTilesInCol) * m_hrTilesInRow + regionIdx / m_hrTilesInCol]]); - rwpk->projRegWidth = rectRwpkHigh->projRegWidth; - rwpk->projRegHeight = rectRwpkHigh->projRegHeight; - rwpk->projRegTop = rectRwpkHigh->projRegTop; - rwpk->projRegLeft = rectRwpkHigh->projRegLeft; - - rwpk->packedRegWidth = rwpk->projRegWidth; - rwpk->packedRegHeight = rwpk->projRegHeight; - rwpk->packedRegTop = (regionIdx % m_hrTilesInCol) * m_highTileHeight; - rwpk->packedRegLeft = (regionIdx / m_hrTilesInCol) * m_highTileWidth; - - rwpk->leftGbWidth = 0; - rwpk->rightGbWidth = 0; - rwpk->topGbHeight = 0; - rwpk->bottomGbHeight = 0; - rwpk->gbNotUsedForPredFlag = true; - rwpk->gbType0 = 0; - rwpk->gbType1 = 0; - rwpk->gbType2 = 0; - rwpk->gbType3 = 0; - } - else - { - RectangularRegionWisePacking *rectRwpkLow = &(rwpkLowRes->rectRegionPacking[regionIdx-highTilesNum]); - - rwpk->projRegWidth = rectRwpkLow->projRegWidth; - rwpk->projRegHeight = rectRwpkLow->projRegHeight; - rwpk->projRegTop = rectRwpkLow->projRegTop; - rwpk->projRegLeft = rectRwpkLow->projRegLeft; - - rwpk->packedRegWidth = rwpk->projRegWidth; - rwpk->packedRegHeight = rwpk->projRegHeight; - rwpk->packedRegTop = ((regionIdx-highTilesNum) % m_lrTilesInCol) * m_lowTileHeight; - rwpk->packedRegLeft = ((regionIdx-highTilesNum) / m_lrTilesInCol) * m_lowTileWidth + m_highTileWidth * m_hrTilesInRow; - - rwpk->leftGbWidth = 0; - rwpk->rightGbWidth = 0; - rwpk->topGbHeight = 0; - rwpk->bottomGbHeight = 0; - rwpk->gbNotUsedForPredFlag = true; - rwpk->gbType0 = 0; - rwpk->gbType1 = 0; - rwpk->gbType2 = 0; - rwpk->gbType3 = 0; - } - } - - DELETE_ARRAY(highTilesIdx); - - return ERROR_NONE; -} - -VCD_NS_END diff --git a/src/VROmafPacking/TwoResRegionWisePackingGenerator.h b/src/VROmafPacking/TwoResRegionWisePackingGenerator.h deleted file mode 100644 index 6eeb1d9f..00000000 --- a/src/VROmafPacking/TwoResRegionWisePackingGenerator.h +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: TwoResRegionWisePackingGenerator.h -//! \brief: Two resolutions region wise packing generator class definition -//! \detail: Define the operation of two resolutions region wise packing generator. -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#ifndef _TWORESREGIONWISEPACKINGGENERATOR_H_ -#define _TWORESREGIONWISEPACKINGGENERATOR_H_ - -#include "RegionWisePackingGenerator.h" - -VCD_NS_BEGIN - -//! -//! \class TwoResRegionWisePackingGenerator -//! \brief Define the operation of two resolutions region wise packing generator -//! - -class TwoResRegionWisePackingGenerator : public RegionWisePackingGenerator -{ -public: - //! - //! \brief Constructor - //! - TwoResRegionWisePackingGenerator(); - - //! - //! \brief Destructor - //! - ~TwoResRegionWisePackingGenerator(); - - //! - //! \brief Initialize the region wise packing generator - //! - //! \param [in] streams - //! pointer to the media streams map set up in OmafPackage - //! \param [in] videoIdxInMedia - //! pointer to the index of each video in media streams - //! \param [in] tilesNumInViewport - //! the number of tiles in viewport - //! \param [in] tilesInViewport - //! pointer to tile information of all tiles in viewport - //! \param [in] finalViewportWidth - //! the final viewport width calculated by 360SCVP library - //! \param [in] finalViewportHeight - //! the final viewport height calculated by 360SCVP library - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - int32_t Initialize( - std::map *streams, - uint8_t *videoIdxInMedia, - uint8_t tilesNumInViewport, - TileDef *tilesInViewport, - int32_t finalViewportWidth, - int32_t finalViewportHeight); - - //! - //! \brief Generate the region wise packing information for - //! specified viewport - //! - //! \param [in] viewportIdx - //! the index of specified viewport - //! \param [out] dstRwpk - //! pointer to the region wise packing information for - //! the specified viewport - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - int32_t GenerateDstRwpk(uint8_t viewportIdx, RegionWisePacking *dstRwpk); - - //! - //! \brief Generate the tiles merging direction information for - //! specified viewport - //! - //! \param [in] viewportIdx - //! the index of specified viewport - //! \param [out] tilesMergeDir - //! pointer to the tiles merging direction information for - //! the specified viewport - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - int32_t GenerateTilesMergeDirection( - uint8_t viewportIdx, - TilesMergeDirectionInCol *tilesMergeDir); - - //! - //! \brief Get the number of tiles in one row in viewport - //! - //! \return uint8_t - //! the number of tiles in one row in viewport - //! - uint8_t GetTilesNumInViewportRow() { return m_tilesNumInViewRow; }; - - //! - //! \brief Get the number of tile rows in viewport - //! - //! \return uint8_t - //! the number of tile rows in viewport - //! - uint8_t GetTileRowNumInViewport() { return m_tileRowNumInView; }; - -private: - //! - //! \brief Get the original high resolution tiles arrangement - //! in viewport - //! - //! \param [in] tilesNumInViewport - //! the number of high resolution tiles in viewport - //! \param [in] tilesInViewport - //! pointer to the tile information of all tiles - //! in viewport - //! \param [in] finalViewportWidth - //! the final viewport width calculated by 360SCVP library - //! \param [in] finalViewportHeight - //! the final viewport height calculated by 360SCVP library - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - int32_t GetOrigHighResTilesArrange( - uint8_t tilesNumInViewport, - TileDef *tilesInViewport, - int32_t finalViewportWidth, - int32_t finalViewportHeight); - - //! - //! \brief Generate tiles arrangement in tiles merged picture - //! - //! \return int32_t - //! ERROR_NONE if success, else failed reason - //! - int32_t GenerateMergedTilesArrange(); - -private: - uint8_t m_streamIdxInMedia[2]; //!< array for video index in media streams - TilesMergeDirectionInRow *m_highResTilesInView; //!< pointer to original high resolution tiles arrangement in viewport - uint8_t m_tilesNumInViewRow; //!< the number of high resolution tiles in one row in viewport - uint8_t m_tileRowNumInView; //!< the number of high resolution tile rows in viewport - uint8_t m_origHRTilesInRow; //!< the number of tiles in one row in high resolution video stream - uint8_t m_origHRTilesInCol; //!< the number of tiles in one column in high resolution video stream - uint16_t m_highTileWidth; //!< the width of high resolution tile - uint16_t m_highTileHeight; //!< the height of high resolution tile - uint8_t m_origLRTilesInRow; //!< the number of tiles in one row in low resolution video stream - uint8_t m_origLRTilesInCol; //!< the number of tiles in one column in low resolution video stream - uint16_t m_lowTileWidth; //!< the width of low resolution tile - uint16_t m_lowTileHeight; //!< the height of low resolution tile - - uint8_t m_hrTilesInRow; //!< the number of high resolution tiles in one row in tiles merged picture - uint8_t m_hrTilesInCol; //!< the number of high resolution tiles in one column in tiles merged picture - uint8_t m_lrTilesInRow; //!< the number of low resolution tiles in one row in tiles merged picture - uint8_t m_lrTilesInCol; //!< the number of low resolution tiles in one column in tiles merged picture -}; - -VCD_NS_END; -#endif /* _TWORESREGIONWISEPACKING_H_ */ diff --git a/src/VROmafPacking/VROmafPacking.pc b/src/VROmafPacking/VROmafPacking.pc index 9748be5e..f8d112d0 100644 --- a/src/VROmafPacking/VROmafPacking.pc +++ b/src/VROmafPacking/VROmafPacking.pc @@ -7,4 +7,4 @@ Name:VR OMAF Compliance Packing Description: OMAF Compliance packing for muxer Version:0.0.1-DEV Cflags: -I${prefix}/include -Libs: -L${libdir} -lVROmafPacking -static-libstdc++ -l360SCVP -lmp4vr_static -lmp4vr_static_fpic -lstreamsegmenter_static -lstreamsegmenter_static_fpic -lpthread -L/usr/local/lib64 +Libs: -L${libdir} -lVROmafPacking -static-libstdc++ -l360SCVP -lpthread -L/usr/local/lib64 diff --git a/src/VROmafPacking/VROmafPackingAPI.h b/src/VROmafPacking/VROmafPackingAPI.h index 7ecd5cd0..e698124f 100644 --- a/src/VROmafPacking/VROmafPackingAPI.h +++ b/src/VROmafPacking/VROmafPackingAPI.h @@ -57,6 +57,22 @@ typedef void* Handler; //! Handler VROmafPackingInit(InitialInfo *initInfo); +//! +//! \brief VR OMAF Packing library set customized logging callback. +//! The default logging callback is glog. +//! If customized logging callback is needed, call this API +//! after initialization API +//! +//! \param [in] hdl +//! VR OMAF Packing library handle +//! \param [in] externalLog +//! the customized logging callback function pointer +//! +//! \return int32_t +//! ERROR_NONE if success, else failed reason +//! +int32_t VROmafPackingSetLogCallBack(Handler hdl, void* externalLog); + //! //! \brief VR OMAF Packing library writes segment for specified //! media stream, called when one new frame is needed to diff --git a/src/VROmafPacking/VROmafPackingAPIImpl.cpp b/src/VROmafPacking/VROmafPackingAPIImpl.cpp index 8e7bc192..99212529 100644 --- a/src/VROmafPacking/VROmafPackingAPIImpl.cpp +++ b/src/VROmafPacking/VROmafPackingAPIImpl.cpp @@ -33,6 +33,10 @@ #include "VROmafPackingAPI.h" #include "OmafPackage.h" +#include "Log.h" +#ifdef _USE_TRACE_ +#include "../trace/E2E_latency_tp.h" +#endif VCD_USE_VRVIDEO; @@ -55,9 +59,37 @@ Handler VROmafPackingInit(InitialInfo *initInfo) return (Handler)((long)omafPackage); } +int32_t VROmafPackingSetLogCallBack(Handler hdl, void* externalLog) +{ + OmafPackage *omafPackage = (OmafPackage*)hdl; + if (!omafPackage) + return OMAF_ERROR_NULL_PTR; + + LogFunction logFunction = (LogFunction)externalLog; + if (!logFunction) + return OMAF_ERROR_NULL_PTR; + + int32_t ret = omafPackage->SetLogCallBack(logFunction); + if (ret) + return ret; + + return ERROR_NONE; +} + int32_t VROmafPackingWriteSegment(Handler hdl, uint8_t streamIdx, FrameBSInfo *frameInfo) { OmafPackage *omafPackage = (OmafPackage*)hdl; + if (!omafPackage) + return OMAF_ERROR_NULL_PTR; + +#ifdef _USE_TRACE_ + string tag = "StremIdx:" + to_string(streamIdx); + tracepoint(E2E_latency_tp_provider, + pre_op_info, + frameInfo->pts, + tag.c_str()); +#endif + int32_t ret = omafPackage->OmafPacketStream(streamIdx, frameInfo); if (ret) return ret; @@ -68,6 +100,9 @@ int32_t VROmafPackingWriteSegment(Handler hdl, uint8_t streamIdx, FrameBSInfo *f int32_t VROmafPackingEndStreams(Handler hdl) { OmafPackage *omafPackage = (OmafPackage*)hdl; + if (!omafPackage) + return OMAF_ERROR_NULL_PTR; + int32_t ret = omafPackage->OmafEndStreams(); if (ret) return ret; @@ -78,9 +113,10 @@ int32_t VROmafPackingEndStreams(Handler hdl) int32_t VROmafPackingClose(Handler hdl) { OmafPackage *omafPackage = (OmafPackage*)hdl; - - delete omafPackage; - omafPackage = NULL; - + if (omafPackage) + { + delete omafPackage; + omafPackage = NULL; + } return ERROR_NONE; } diff --git a/src/VROmafPacking/VideoStream.cpp b/src/VROmafPacking/VideoStream.cpp deleted file mode 100644 index 1632651d..00000000 --- a/src/VROmafPacking/VideoStream.cpp +++ /dev/null @@ -1,456 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -//! -//! \file: VideoStream.cpp -//! \brief: Video stream class implementation -//! -//! Created on April 30, 2019, 6:04 AM -//! - -#include "VideoStream.h" -#include "AvcNaluParser.h" -#include "HevcNaluParser.h" - -VCD_NS_BEGIN - -VideoStream::VideoStream() -{ - m_streamIdx = 0; - m_codecId = CODEC_ID_H265; - m_width = 0; - m_height = 0; - m_tileInRow = 0; - m_tileInCol = 0; - m_tilesInfo = NULL; - m_projType = 0; - m_frameRate.num = 0; - m_frameRate.den = 0; - m_bitRate = 0; - - m_srcRwpk = NULL; - m_srcCovi = NULL; - - m_videoSegInfoGen = NULL; - m_currFrameInfo = NULL; - - m_360scvpParam = NULL; - m_360scvpHandle = NULL; - m_naluParser = NULL; - m_isEOS = false; -} - -VideoStream::~VideoStream() -{ - if (m_srcRwpk) - { - DELETE_ARRAY(m_srcRwpk->rectRegionPacking); - - delete m_srcRwpk; - m_srcRwpk = NULL; - } - - if (m_srcCovi) - { - DELETE_ARRAY(m_srcCovi->sphereRegions); - - delete m_srcCovi; - m_srcCovi = NULL; - } - - if (m_tilesInfo) - { - uint16_t tilesNum = m_tileInRow * m_tileInCol; - for (uint16_t tileIdx = 0; tileIdx < tilesNum; tileIdx++) - { - DELETE_MEMORY(m_tilesInfo[tileIdx].tileNalu); - } - - delete[] m_tilesInfo; - m_tilesInfo = NULL; - } - - DELETE_MEMORY(m_videoSegInfoGen); - - std::list::iterator it1; - for (it1 = m_frameInfoList.begin(); it1 != m_frameInfoList.end();) - { - FrameBSInfo *frameInfo = *it1; - if (frameInfo) - { - DELETE_ARRAY(frameInfo->data); - - delete frameInfo; - frameInfo = NULL; - } - - it1 = m_frameInfoList.erase(it1); - } - m_frameInfoList.clear(); - - std::list::iterator it2; - for (it2 = m_framesToOneSeg.begin(); it2 != m_framesToOneSeg.end();) - { - FrameBSInfo *frameInfo = *it2; - if (frameInfo) - { - DELETE_ARRAY(frameInfo->data); - - delete frameInfo; - frameInfo = NULL; - } - - it2 = m_framesToOneSeg.erase(it2); - } - m_framesToOneSeg.clear(); - - DELETE_MEMORY(m_360scvpParam); - - if (m_360scvpHandle) - { - I360SCVP_unInit(m_360scvpHandle); - } - - DELETE_MEMORY(m_naluParser); -} - -int32_t VideoStream::ParseHeader() -{ - m_naluParser->ParseHeaderData(); - m_width = m_naluParser->GetSrcWidth(); - m_height = m_naluParser->GetSrcHeight(); - m_tileInRow = m_naluParser->GetTileInRow(); - m_tileInCol = m_naluParser->GetTileInCol(); - m_projType = m_naluParser->GetProjectionType(); - - uint16_t tilesNum = m_tileInRow * m_tileInCol; - m_tilesInfo = new TileInfo[tilesNum]; - if (!m_tilesInfo) - return OMAF_ERROR_NULL_PTR; - - for (uint16_t tileIdx = 0; tileIdx < tilesNum; tileIdx++) - { - m_naluParser->GetTileInfo(tileIdx, &(m_tilesInfo[tileIdx])); - m_tilesInfo[tileIdx].tileNalu = new Nalu; - if (!(m_tilesInfo[tileIdx].tileNalu)) - return OMAF_ERROR_NULL_PTR; - } - - return ERROR_NONE; -} - -int32_t VideoStream::FillRegionWisePacking() -{ - if (!m_srcRwpk) - return OMAF_ERROR_NULL_PTR; - - if (!m_tilesInfo) - return OMAF_ERROR_NULL_PTR; - - m_srcRwpk->constituentPicMatching = 0; - m_srcRwpk->numRegions = m_tileInRow * m_tileInCol; - m_srcRwpk->projPicWidth = m_width; - m_srcRwpk->projPicHeight = m_height; - m_srcRwpk->packedPicWidth = m_width; - m_srcRwpk->packedPicHeight = m_height; - - m_srcRwpk->rectRegionPacking = new RectangularRegionWisePacking[m_srcRwpk->numRegions]; - if (!(m_srcRwpk->rectRegionPacking)) - return OMAF_ERROR_NULL_PTR; - - for (uint8_t regionIdx = 0; regionIdx < m_srcRwpk->numRegions; regionIdx++) - { - RectangularRegionWisePacking *rectRwpk = &(m_srcRwpk->rectRegionPacking[regionIdx]); - TileInfo *tileInfo = &(m_tilesInfo[regionIdx]); - - memset(rectRwpk, 0, sizeof(RectangularRegionWisePacking)); - rectRwpk->transformType = 0; - rectRwpk->guardBandFlag = 0; - rectRwpk->projRegWidth = tileInfo->tileWidth; - rectRwpk->projRegHeight = tileInfo->tileHeight; - rectRwpk->projRegLeft = tileInfo->horizontalPos; - rectRwpk->projRegTop = tileInfo->verticalPos; - - rectRwpk->packedRegWidth = tileInfo->tileWidth; - rectRwpk->packedRegHeight = tileInfo->tileHeight; - rectRwpk->packedRegLeft = tileInfo->horizontalPos; - rectRwpk->packedRegTop = tileInfo->verticalPos; - - rectRwpk->leftGbWidth = 0; - rectRwpk->rightGbWidth = 0; - rectRwpk->topGbHeight = 0; - rectRwpk->bottomGbHeight = 0; - rectRwpk->gbNotUsedForPredFlag = true; - rectRwpk->gbType0 = 0; - rectRwpk->gbType1 = 0; - rectRwpk->gbType2 = 0; - rectRwpk->gbType3 = 0; - } - - return ERROR_NONE; -} - -int32_t VideoStream::FillContentCoverage() -{ - if (!m_srcCovi) - return OMAF_ERROR_NULL_PTR; - - if (!m_srcRwpk) - return OMAF_ERROR_NULL_PTR; - - if (m_projType == 0) //ERP projection type - { - m_srcCovi->coverageShapeType = 1;// TwoAzimuthAndTwoElevationCircles - } - else - { - m_srcCovi->coverageShapeType = 0; //FourGreatCircles - } - - m_srcCovi->numRegions = m_tileInRow * m_tileInCol; - m_srcCovi->viewIdcPresenceFlag = false; - m_srcCovi->defaultViewIdc = 0; - - m_srcCovi->sphereRegions = new SphereRegion[m_srcCovi->numRegions]; - if (!(m_srcCovi->sphereRegions)) - return OMAF_ERROR_NULL_PTR; - - // Fill sphere region information for each tile - for (uint8_t regionIdx = 0; regionIdx < m_srcCovi->numRegions; regionIdx++) - { - SphereRegion *sphereRegion = &(m_srcCovi->sphereRegions[regionIdx]); - RectangularRegionWisePacking *rectRwpk = &(m_srcRwpk->rectRegionPacking[regionIdx]); - - memset(sphereRegion, 0, sizeof(SphereRegion)); - sphereRegion->viewIdc = 0; //doesn't take effect when viewIdcPresenceFlag is 0 - sphereRegion->centreAzimuth = (int32_t)((((m_width / 2) - (float)(rectRwpk->projRegLeft + rectRwpk->projRegWidth / 2)) * 360 * 65536) / m_width); - sphereRegion->centreElevation = (int32_t)((((m_height / 2) - (float)(rectRwpk->projRegTop + rectRwpk->projRegHeight / 2)) * 180 * 65536) / m_height); - sphereRegion->centreTilt = 0; - sphereRegion->azimuthRange = (uint32_t)((rectRwpk->projRegWidth * 360.f * 65536) / m_width); - sphereRegion->elevationRange = (uint32_t)((rectRwpk->projRegHeight * 180.f * 65536) / m_height); - sphereRegion->interpolate = 0; - } - - return ERROR_NONE; -} - -int32_t VideoStream::Initialize( - uint8_t streamIdx, - BSBuffer *bs, - InitialInfo *initInfo) -{ - if (!bs || !initInfo) - return OMAF_ERROR_NULL_PTR; - - m_srcRwpk = new RegionWisePacking; - if (!m_srcRwpk) - return OMAF_ERROR_NULL_PTR; - - m_srcCovi = new ContentCoverage; - if (!m_srcCovi) - return OMAF_ERROR_NULL_PTR; - - m_streamIdx = streamIdx; - - m_codecId = bs->codecId; - m_frameRate = bs->frameRate; - m_bitRate = bs->bitRate; - - m_360scvpParam = new param_360SCVP; - if (!m_360scvpParam) - return OMAF_ERROR_NULL_PTR; - - memset(m_360scvpParam, 0, sizeof(param_360SCVP)); - - m_360scvpParam->usedType = E_PARSER_ONENAL; - m_360scvpParam->pInputBitstream = bs->data; - m_360scvpParam->inputBitstreamLen = bs->dataSize; - - m_360scvpHandle = I360SCVP_Init(m_360scvpParam); - if (!m_360scvpHandle) - return OMAF_ERROR_SCVP_INIT_FAILED; - - if (m_codecId == 0) //CODEC_ID_H264 - { - m_naluParser = new AvcNaluParser(m_360scvpHandle, m_360scvpParam); - if (!m_naluParser) - return OMAF_ERROR_NULL_PTR; - } else if (m_codecId == 1) { //CODEC_ID_H265 - m_naluParser = new HevcNaluParser(m_360scvpHandle, m_360scvpParam); - if (!m_naluParser) - return OMAF_ERROR_NULL_PTR; - } else { - return OMAF_ERROR_UNDEFINED_OPERATION; - } - - int32_t ret = ParseHeader(); - if (ret) - return ret; - - m_videoSegInfoGen = new VideoSegmentInfoGenerator( - bs, initInfo, m_streamIdx, - m_width, m_height, - m_tileInRow, m_tileInCol); - if (!m_videoSegInfoGen) - return OMAF_ERROR_NULL_PTR; - - ret = m_videoSegInfoGen->Initialize(m_tilesInfo); - if (ret) - return ret; - - ret = FillRegionWisePacking(); - if (ret) - return ret; - - ret = FillContentCoverage(); - if (ret) - return ret; - - //uint32_t tilesNum = m_tileInRow * m_tileInCol; - //m_trackSegCtxs = new TrackSegmentCtx[tilesNum]; - //if (!m_trackSegCtxs); - //return OMAF_ERROR_NULL_PTR; - - return ERROR_NONE; -} - -int32_t VideoStream::AddFrameInfo(FrameBSInfo *frameInfo) -{ - if (!frameInfo || !(frameInfo->data)) - return OMAF_ERROR_NULL_PTR; - - if (!frameInfo->dataSize) - return OMAF_ERROR_DATA_SIZE; - - FrameBSInfo *newFrameInfo = new FrameBSInfo; - if (!newFrameInfo) - return OMAF_ERROR_NULL_PTR; - - memset(newFrameInfo, 0, sizeof(FrameBSInfo)); - - uint8_t *localData = new uint8_t[frameInfo->dataSize]; - if (!localData) - { - delete newFrameInfo; - newFrameInfo = NULL; - return OMAF_ERROR_NULL_PTR; - } - memcpy(localData, frameInfo->data, frameInfo->dataSize); - - newFrameInfo->data = localData; - newFrameInfo->dataSize = frameInfo->dataSize; - newFrameInfo->pts = frameInfo->pts; - newFrameInfo->isKeyFrame = frameInfo->isKeyFrame; - - m_frameInfoList.push_back(newFrameInfo); - - return ERROR_NONE; -} - -void VideoStream::SetCurrFrameInfo() -{ - if (m_frameInfoList.size() > 0) - { - m_currFrameInfo = m_frameInfoList.front(); - m_frameInfoList.pop_front(); - } -} - -int32_t VideoStream::UpdateTilesNalu() -{ - if (!m_currFrameInfo) - return OMAF_ERROR_NULL_PTR; - - uint16_t tilesNum = m_tileInRow * m_tileInCol; - int32_t ret = m_naluParser->ParseSliceNalu(m_currFrameInfo->data, m_currFrameInfo->dataSize, tilesNum, m_tilesInfo); - if (ret) - return ret; - - return ERROR_NONE; -} - -TileInfo* VideoStream::GetAllTilesInfo() -{ - return m_tilesInfo; -} - -FrameBSInfo* VideoStream::GetCurrFrameInfo() -{ - return m_currFrameInfo; -} - -void VideoStream::DestroyCurrSegmentFrames() -{ - std::list::iterator it; - for (it = m_framesToOneSeg.begin(); it != m_framesToOneSeg.end(); ) - { - FrameBSInfo *frameInfo = *it; - if (frameInfo) - { - DELETE_ARRAY(frameInfo->data); - delete frameInfo; - frameInfo = NULL; - } - - //m_framesToOneSeg.erase(it++); - it = m_framesToOneSeg.erase(it); - - } - m_framesToOneSeg.clear(); -} - -void VideoStream::DestroyCurrFrameInfo() -{ - if (m_currFrameInfo) - { - DELETE_ARRAY(m_currFrameInfo->data); - - delete m_currFrameInfo; - m_currFrameInfo = NULL; - } -} - -Nalu* VideoStream::GetVPSNalu() -{ - if (m_codecId == CODEC_ID_H265) - { - return ((HevcNaluParser*)m_naluParser)->GetVPSNalu(); - } - else - return NULL; -} - -Nalu* VideoStream::GetSPSNalu() -{ - return m_naluParser->GetSPSNalu(); -} - -Nalu* VideoStream::GetPPSNalu() -{ - return m_naluParser->GetPPSNalu(); -} - -VCD_NS_END diff --git a/src/VROmafPacking/compile.sh b/src/VROmafPacking/compile.sh deleted file mode 100755 index 4c09584d..00000000 --- a/src/VROmafPacking/compile.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -cd ../external -./build_glog.sh -./build_Nokia_omaf.sh -cd ../VROmafPacking -cmake ./ -make diff --git a/src/VROmafPacking/test/compile.sh b/src/VROmafPacking/test/compile.sh index d7acf0da..f77898e5 100755 --- a/src/VROmafPacking/test/compile.sh +++ b/src/VROmafPacking/test/compile.sh @@ -2,12 +2,20 @@ cp ../../google_test/libgtest.a . -g++ -I../ -I../../google_test/ -std=c++11 -g -c testHevcNaluParser.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -g++ -I../ -I../../google_test/ -std=c++11 -g -c testVideoStream.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -g++ -I../ -I../../google_test/ -std=c++11 -g -c testExtractorTrack.cpp -D_GLIBCXX_USE_CXX11_ABI=0 -g++ -I../ -I../../google_test/ -std=c++11 -g -c testDefaultSegmentation.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +mkdir vs_plugin +cd vs_plugin +cp ../../../plugins/StreamProcess_Plugin/VideoStream_Plugin/common/NaluParser.h ./ +cp ../../../plugins/StreamProcess_Plugin/VideoStream_Plugin/HevcVideoStream/HevcNaluParser.h ./ +cp ../../../utils/VROmafPacking_def.h ./ +cp ../../../utils/OmafPackingLog.h ./ +cd ../ -LD_FLAGS="-L/usr/local/lib -lVROmafPacking -l360SCVP -lstdc++ -lpthread -lm -L/usr/local/lib" +g++ -I../ -I./vs_plugin -I../../isolib -I../../google_test/ -std=c++11 -g -c testHevcNaluParser.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../ -I./vs_plugin -I../../isolib -I../../google_test/ -std=c++11 -g -c testVideoStream.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../ -I./vs_plugin -I../../isolib -I../../google_test/ -std=c++11 -g -c testExtractorTrack.cpp -D_GLIBCXX_USE_CXX11_ABI=0 +g++ -I../ -I./vs_plugin -I../../isolib -I../../google_test/ -std=c++11 -g -c testDefaultSegmentation.cpp -D_GLIBCXX_USE_CXX11_ABI=0 + +LD_FLAGS="-L/usr/local/lib -lVROmafPacking -l360SCVP -lHevcVideoStreamProcess -lsafestring_shared -ldl -lstdc++ -lpthread -lm -L/usr/local/lib" g++ -L/usr/local/lib testHevcNaluParser.o libgtest.a -o testHevcNaluParser ${LD_FLAGS} g++ -L/usr/local/lib testVideoStream.o libgtest.a -o testVideoStream ${LD_FLAGS} @@ -18,3 +26,5 @@ g++ -L/usr/local/lib testDefaultSegmentation.o libgtest.a -o testDefaultSegmenta ./testVideoStream ./testExtractorTrack ./testDefaultSegmentation + +rm -rf vs_plugin diff --git a/src/VROmafPacking/test/extractorTrack_pps.bin b/src/VROmafPacking/test/extractorTrack0_pps.bin similarity index 100% rename from src/VROmafPacking/test/extractorTrack_pps.bin rename to src/VROmafPacking/test/extractorTrack0_pps.bin diff --git a/src/VROmafPacking/test/extractorTrack_sps.bin b/src/VROmafPacking/test/extractorTrack0_sps.bin similarity index 100% rename from src/VROmafPacking/test/extractorTrack_sps.bin rename to src/VROmafPacking/test/extractorTrack0_sps.bin diff --git a/src/VROmafPacking/test/extractorTrack1_pps.bin b/src/VROmafPacking/test/extractorTrack1_pps.bin new file mode 100644 index 00000000..c20c5392 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack1_pps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack1_sps.bin b/src/VROmafPacking/test/extractorTrack1_sps.bin new file mode 100644 index 00000000..ec849e41 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack1_sps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack2_pps.bin b/src/VROmafPacking/test/extractorTrack2_pps.bin new file mode 100644 index 00000000..0e6511be Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack2_pps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack2_sps.bin b/src/VROmafPacking/test/extractorTrack2_sps.bin new file mode 100644 index 00000000..b9684c70 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack2_sps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack3_pps.bin b/src/VROmafPacking/test/extractorTrack3_pps.bin new file mode 100644 index 00000000..c20c5392 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack3_pps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack3_sps.bin b/src/VROmafPacking/test/extractorTrack3_sps.bin new file mode 100644 index 00000000..ec849e41 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack3_sps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack4_pps.bin b/src/VROmafPacking/test/extractorTrack4_pps.bin new file mode 100644 index 00000000..0e6511be Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack4_pps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack4_sps.bin b/src/VROmafPacking/test/extractorTrack4_sps.bin new file mode 100644 index 00000000..b9684c70 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack4_sps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack5_pps.bin b/src/VROmafPacking/test/extractorTrack5_pps.bin new file mode 100644 index 00000000..c1e48fae Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack5_pps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack5_sps.bin b/src/VROmafPacking/test/extractorTrack5_sps.bin new file mode 100644 index 00000000..b56173df Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack5_sps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack6_pps.bin b/src/VROmafPacking/test/extractorTrack6_pps.bin new file mode 100644 index 00000000..c20c5392 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack6_pps.bin differ diff --git a/src/VROmafPacking/test/extractorTrack6_sps.bin b/src/VROmafPacking/test/extractorTrack6_sps.bin new file mode 100644 index 00000000..ec849e41 Binary files /dev/null and b/src/VROmafPacking/test/extractorTrack6_sps.bin differ diff --git a/src/VROmafPacking/test/extractorsDataOffset.bin b/src/VROmafPacking/test/extractorsDataOffset.bin deleted file mode 100644 index 62ea7a83..00000000 --- a/src/VROmafPacking/test/extractorsDataOffset.bin +++ /dev/null @@ -1,40 +0,0 @@ -4,5,5,5,4,5 -5,5,5,5,4,5 -5,5,5,5,4,5 -5,5,4,5,4,5 -5,4,5,5,4,5 -5,5,5,5,4,5 -5,5,5,5,4,5 -5,5,5,4,4,5 -7,8,8,8,7,8 -8,8,8,8,7,8 -8,8,8,8,7,8 -8,8,7,8,7,8 -8,7,8,8,7,8 -8,8,8,8,7,8 -8,8,8,8,7,8 -8,8,8,7,7,8 -7,8,8,8,7,8 -8,8,8,8,7,8 -8,8,8,8,7,8 -8,8,7,8,7,8 -8,7,8,8,7,8 -8,8,8,8,7,8 -8,8,8,8,7,8 -8,8,8,7,7,8 -8,9,9,9,7,8 -9,9,9,9,7,8 -9,9,9,9,7,8 -9,9,8,9,7,8 -9,8,9,9,7,8 -9,9,9,9,7,8 -9,9,9,9,7,8 -9,9,9,8,7,8 -6,8,8,8,6,7 -8,8,8,8,6,7 -8,8,8,8,6,7 -8,8,6,8,6,7 -8,6,8,8,6,7 -8,8,8,8,6,7 -8,8,8,8,6,7 -8,8,8,6,6,7 diff --git a/src/VROmafPacking/test/testDefaultSegmentation.cpp b/src/VROmafPacking/test/testDefaultSegmentation.cpp index 55e4388e..4106857b 100644 --- a/src/VROmafPacking/test/testDefaultSegmentation.cpp +++ b/src/VROmafPacking/test/testDefaultSegmentation.cpp @@ -128,10 +128,13 @@ class DefaultSegmentationTest : public testing::Test return; } - memset(m_initInfo, 0, sizeof(InitialInfo)); + memset_s(m_initInfo, sizeof(InitialInfo), 0); m_initInfo->bsNumVideo = 2; m_initInfo->bsNumAudio = 0; - m_initInfo->tilesMergingType = TilesMergingType::TwoResTilesMerging; + m_initInfo->packingPluginPath = "/usr/local/lib"; + m_initInfo->packingPluginName = "HighResPlusFullLowResPacking"; + m_initInfo->videoProcessPluginPath = "/usr/local/lib"; + m_initInfo->videoProcessPluginName = "HevcVideoStreamProcess"; m_initInfo->bsBuffers = new BSBuffer[2]; if (!m_initInfo->bsBuffers) { @@ -171,7 +174,7 @@ class DefaultSegmentationTest : public testing::Test return; } - memset(m_initInfo->segmentationInfo, 0, sizeof(SegmentationInfo)); + memset_s(m_initInfo->segmentationInfo, sizeof(SegmentationInfo), 0); m_initInfo->segmentationInfo->needBufedFrames = 0; m_initInfo->segmentationInfo->segDuration = 2; m_initInfo->segmentationInfo->dirName = "./test/"; @@ -193,13 +196,17 @@ class DefaultSegmentationTest : public testing::Test return; } - memset(m_initInfo->viewportInfo, 0, sizeof(ViewportInformation)); + memset_s(m_initInfo->viewportInfo, sizeof(ViewportInformation), 0); m_initInfo->viewportInfo->viewportWidth = 1024; m_initInfo->viewportInfo->viewportHeight = 1024; m_initInfo->viewportInfo->viewportPitch = 0; m_initInfo->viewportInfo->viewportYaw = 90; m_initInfo->viewportInfo->horizontalFOVAngle = 80; m_initInfo->viewportInfo->verticalFOVAngle = 90; + m_initInfo->viewportInfo->outGeoType = E_SVIDEO_VIEWPORT; + m_initInfo->viewportInfo->inGeoType = E_SVIDEO_EQUIRECT; + + m_initInfo->projType = E_SVIDEO_EQUIRECT; m_omafPackage = new OmafPackage(); if (!m_omafPackage) @@ -324,7 +331,7 @@ TEST_F(DefaultSegmentationTest, AllProcess) EXPECT_TRUE(buf.st_size != 0); } - for (uint8_t i = 0; i < 8; i++) + for (uint8_t i = 0; i < 7; i++) { snprintf(initSegName2, 1024, "./test/Test_track%d.init.mp4", 1000+i); EXPECT_TRUE(access(initSegName2, 0) == 0); @@ -344,7 +351,7 @@ TEST_F(DefaultSegmentationTest, AllProcess) EXPECT_TRUE(buf.st_size != 0); } - for (uint8_t i = 0; i < 8; i++) + for (uint8_t i = 0; i < 7; i++) { snprintf(segName, 1024, "./test/Test_track%d.1.mp4", i + 1000); EXPECT_TRUE(access(segName, 0) == 0); diff --git a/src/VROmafPacking/test/testExtractorTrack.cpp b/src/VROmafPacking/test/testExtractorTrack.cpp index ef61beb8..5cfbdbd6 100644 --- a/src/VROmafPacking/test/testExtractorTrack.cpp +++ b/src/VROmafPacking/test/testExtractorTrack.cpp @@ -31,7 +31,9 @@ //! Created on April 30, 2019, 6:04 AM //! +#include #include "gtest/gtest.h" +#include "VideoStreamPluginAPI.h" #include "../ExtractorTrackManager.h" VCD_USE_VRVIDEO; @@ -131,10 +133,13 @@ class ExtractorTrackTest : public testing::Test return; } - memset(m_initInfo, 0, sizeof(InitialInfo)); + memset_s(m_initInfo, sizeof(InitialInfo), 0); m_initInfo->bsNumVideo = 2; m_initInfo->bsNumAudio = 0; - m_initInfo->tilesMergingType = TilesMergingType::TwoResTilesMerging; + m_initInfo->packingPluginPath = "/usr/local/lib"; + m_initInfo->packingPluginName = "HighResPlusFullLowResPacking"; + m_initInfo->videoProcessPluginPath = "/usr/local/lib"; + m_initInfo->videoProcessPluginName = "HevcVideoStreamProcess"; m_initInfo->bsBuffers = new BSBuffer[2]; if (!m_initInfo->bsBuffers) { @@ -200,8 +205,77 @@ class ExtractorTrackTest : public testing::Test m_initInfo->viewportInfo->viewportYaw = 90; m_initInfo->viewportInfo->horizontalFOVAngle = 80; m_initInfo->viewportInfo->verticalFOVAngle = 90; + m_initInfo->viewportInfo->outGeoType = E_SVIDEO_VIEWPORT; + m_initInfo->viewportInfo->inGeoType = E_SVIDEO_EQUIRECT; - VideoStream *vsLow = new VideoStream(); + m_initInfo->projType = E_SVIDEO_EQUIRECT; + + m_vsPlugin = NULL; + m_vsPlugin = dlopen("/usr/local/lib/libHevcVideoStreamProcess.so", RTLD_LAZY); + if (!m_vsPlugin) + { + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo->viewportInfo); + DELETE_MEMORY(m_initInfo); + return; + } + + CreateVideoStream* createVS = NULL; + createVS = (CreateVideoStream*)dlsym(m_vsPlugin, "Create"); + const char* dlsymErr1 = dlerror(); + if (dlsymErr1) + { + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo->viewportInfo); + DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + return; + } + if (!createVS) + { + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo->viewportInfo); + DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + return; + } + + DestroyVideoStream* destroyVS = NULL; + destroyVS = (DestroyVideoStream*)dlsym(m_vsPlugin, "Destroy"); + const char *dlsymErr = dlerror(); + if (dlsymErr || !destroyVS) + { + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo->viewportInfo); + DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + return; + } + + VideoStream *vsLow = createVS(); if (!vsLow) { DELETE_ARRAY(m_highResHeader); @@ -212,10 +286,13 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo->viewportInfo); DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } ((MediaStream*)vsLow)->SetMediaType(VIDEOTYPE); + ((MediaStream*)vsLow)->SetCodecId(CODEC_ID_H265); int32_t ret = vsLow->Initialize(lowResStreamIdx, &(m_initInfo->bsBuffers[0]), m_initInfo); if (ret) @@ -228,14 +305,16 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo->viewportInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(vsLow); + destroyVS((VideoStream*)(vsLow)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } m_streams.insert(std::make_pair(lowResStreamIdx, (MediaStream*)vsLow)); //Create and Initialize high resolution video stream - VideoStream *vsHigh = new VideoStream(); + VideoStream *vsHigh = createVS(); if (!vsHigh) { DELETE_ARRAY(m_highResHeader); @@ -246,11 +325,14 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo->viewportInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(vsLow); + destroyVS((VideoStream*)(vsLow)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } ((MediaStream*)vsHigh)->SetMediaType(VIDEOTYPE); + ((MediaStream*)vsHigh)->SetCodecId(CODEC_ID_H265); ret = vsHigh->Initialize(highResStreamIdx, &(m_initInfo->bsBuffers[1]), m_initInfo); if (ret) @@ -263,8 +345,10 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo->viewportInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(vsLow); - DELETE_MEMORY(vsHigh); + destroyVS((VideoStream*)(vsLow)); + destroyVS((VideoStream*)(vsHigh)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } @@ -281,8 +365,10 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo->viewportInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(vsLow); - DELETE_MEMORY(vsHigh); + destroyVS((VideoStream*)(vsLow)); + destroyVS((VideoStream*)(vsHigh)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } @@ -297,8 +383,10 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo->viewportInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(vsLow); - DELETE_MEMORY(vsHigh); + destroyVS((VideoStream*)(vsLow)); + destroyVS((VideoStream*)(vsHigh)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; DELETE_MEMORY(m_extractorTrackMan); return; } @@ -322,14 +410,35 @@ class ExtractorTrackTest : public testing::Test DELETE_MEMORY(m_extractorTrackMan); - std::map::iterator it; - for (it = m_streams.begin(); it != m_streams.end();) + if (m_vsPlugin) { - DELETE_MEMORY(it->second); + DestroyVideoStream* destroyVS = NULL; + destroyVS = (DestroyVideoStream*)dlsym(m_vsPlugin, "Destroy"); + const char *dlsymErr = dlerror(); + if (dlsymErr) + { + return; + } + if (!destroyVS) + { + return; + } + + std::map::iterator it; + for (it = m_streams.begin(); it != m_streams.end();) + { + MediaStream *stream = it->second; + if (stream && (stream->GetCodecId() == CODEC_ID_H265)) + { + destroyVS((VideoStream*)(stream)); + } + m_streams.erase(it++); + } + m_streams.clear(); - m_streams.erase(it++); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; } - m_streams.clear(); } std::map m_streams; @@ -339,6 +448,7 @@ class ExtractorTrackTest : public testing::Test uint8_t *m_lowResHeader; uint8_t *m_totalDataLow; uint8_t *m_totalDataHigh; + void *m_vsPlugin; }; TEST_F(ExtractorTrackTest, AllProcess) @@ -348,13 +458,6 @@ TEST_F(ExtractorTrackTest, AllProcess) uint64_t offsetLow = 0; uint64_t offsetHigh = 0; - FILE *fpDataOffset = fopen("extractorsDataOffset.bin", "r"); - EXPECT_TRUE(fpDataOffset != NULL); - if (!fpDataOffset) - return; - - uint32_t sliceHrdLen[8]; - memset(sliceHrdLen, 0, 8 * sizeof(uint32_t)); int32_t ret = 0; for (uint8_t frameIdx = 0; frameIdx < 5; frameIdx++) { @@ -362,8 +465,6 @@ TEST_F(ExtractorTrackTest, AllProcess) EXPECT_TRUE(frameLowRes != NULL); if (!frameLowRes) { - fclose(fpDataOffset); - fpDataOffset = NULL; return; } frameLowRes->data = m_totalDataLow + offsetLow; @@ -383,8 +484,6 @@ TEST_F(ExtractorTrackTest, AllProcess) EXPECT_TRUE(frameHighRes != NULL); if (!frameHighRes) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); return; } @@ -421,12 +520,13 @@ TEST_F(ExtractorTrackTest, AllProcess) ret = vsHigh->UpdateTilesNalu(); EXPECT_TRUE(ret == ERROR_NONE); - std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); + std::map *extractorTracks = m_extractorTrackMan->GetAllExtractorTracks(); EXPECT_TRUE(extractorTracks != NULL); - - std::map::iterator it; + std::map::iterator it; for (it = extractorTracks->begin(); it != extractorTracks->end(); it++) { + uint16_t viewportIdx = it->first; + ExtractorTrack *extractorTrack = it->second; Nalu *vpsNalu = extractorTrack->GetVPS(); Nalu *spsNalu = extractorTrack->GetSPS(); @@ -442,9 +542,6 @@ TEST_F(ExtractorTrackTest, AllProcess) FILE *fp = fopen("extractorTrack_vps.bin", "r"); if (!fp) { - - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); return; @@ -455,8 +552,6 @@ TEST_F(ExtractorTrackTest, AllProcess) uint8_t *vpsData = new uint8_t[vpsLen]; if (!vpsData) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); fclose(fp); @@ -465,7 +560,9 @@ TEST_F(ExtractorTrackTest, AllProcess) } fread(vpsData, 1, vpsLen, fp); - int32_t compRet = memcmp(vpsNalu->data, vpsData, vpsLen); + int32_t compRet = 0; + errno_t result = memcmp_s(vpsNalu->data, vpsLen, vpsData, vpsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(vpsNalu->dataSize == vpsLen); EXPECT_TRUE(compRet == 0); EXPECT_TRUE(vpsNalu->startCodesSize == 4); @@ -475,12 +572,11 @@ TEST_F(ExtractorTrackTest, AllProcess) vpsData = NULL; fclose(fp); fp = NULL; - - fp = fopen("extractorTrack_sps.bin", "r"); + char spsFile[256] = { 0 }; + snprintf(spsFile, 256, "extractorTrack%d_sps.bin", viewportIdx); + fp = fopen(spsFile, "r"); if (!fp) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); return; @@ -491,8 +587,6 @@ TEST_F(ExtractorTrackTest, AllProcess) uint8_t *spsData = new uint8_t[spsLen]; if (!spsData) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); fclose(fp); @@ -500,8 +594,8 @@ TEST_F(ExtractorTrackTest, AllProcess) return; } fread(spsData, 1, spsLen, fp); - - compRet = memcmp(spsNalu->data, spsData, spsLen); + result = memcmp_s(spsNalu->data, spsLen, spsData, spsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(spsNalu->dataSize == spsLen); //includes start codes EXPECT_TRUE(compRet == 0); EXPECT_TRUE(spsNalu->startCodesSize == 4); @@ -512,11 +606,12 @@ TEST_F(ExtractorTrackTest, AllProcess) fclose(fp); fp = NULL; - fp = fopen("extractorTrack_pps.bin", "r"); + char ppsFile[256] = { 0 }; + snprintf(ppsFile, 256, "extractorTrack%d_pps.bin", viewportIdx); + fp = fopen(ppsFile, "r"); + if (!fp) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); return; @@ -527,8 +622,6 @@ TEST_F(ExtractorTrackTest, AllProcess) uint8_t *ppsData = new uint8_t[ppsLen]; if (!ppsData) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); fclose(fp); @@ -536,8 +629,8 @@ TEST_F(ExtractorTrackTest, AllProcess) return; } fread(ppsData, 1, ppsLen, fp); - - compRet = memcmp(ppsNalu->data, ppsData, ppsLen); + result = memcmp_s(ppsNalu->data, ppsLen, ppsData, ppsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(ppsNalu->dataSize == ppsLen); //includes start codes EXPECT_TRUE(compRet == 0); EXPECT_TRUE(ppsNalu->startCodesSize == 4); @@ -547,174 +640,99 @@ TEST_F(ExtractorTrackTest, AllProcess) ppsData = NULL; fclose(fp); fp = NULL; - - RegionWisePacking rwpk; - rwpk.constituentPicMatching = 0; - rwpk.numRegions = 6; - rwpk.projPicWidth = 3840; - rwpk.projPicHeight = 1920; - rwpk.packedPicWidth = 2880; - rwpk.packedPicHeight = 1920; - rwpk.rectRegionPacking = new RectangularRegionWisePacking[6]; - EXPECT_TRUE(rwpk.rectRegionPacking != NULL); - if (!(rwpk.rectRegionPacking)) - { - fclose(fpDataOffset); - fpDataOffset = NULL; - DELETE_MEMORY(frameLowRes); - DELETE_MEMORY(frameHighRes); - return; + switch (viewportIdx) { + case 0: + case 2: + case 4: + EXPECT_TRUE(dstRwpk->constituentPicMatching == 0); + EXPECT_TRUE(dstRwpk->numRegions == 6); + EXPECT_TRUE(dstRwpk->projPicWidth == 3840); + EXPECT_TRUE(dstRwpk->projPicHeight == 1920); + EXPECT_TRUE(dstRwpk->packedPicWidth == 2880); + EXPECT_TRUE(dstRwpk->packedPicHeight == 1920); + break; + case 1: + case 3: + case 6: + EXPECT_TRUE(dstRwpk->constituentPicMatching == 0); + EXPECT_TRUE(dstRwpk->numRegions == 8); + EXPECT_TRUE(dstRwpk->projPicWidth == 3840); + EXPECT_TRUE(dstRwpk->projPicHeight == 1920); + EXPECT_TRUE(dstRwpk->packedPicWidth == 3840); + EXPECT_TRUE(dstRwpk->packedPicHeight == 1920); + break; + case 5: + EXPECT_TRUE(dstRwpk->constituentPicMatching == 0); + EXPECT_TRUE(dstRwpk->numRegions == 10); + EXPECT_TRUE(dstRwpk->projPicWidth == 3840); + EXPECT_TRUE(dstRwpk->projPicHeight == 1920); + EXPECT_TRUE(dstRwpk->packedPicWidth == 4800); + EXPECT_TRUE(dstRwpk->packedPicHeight == 1920); + break; + default: + EXPECT_TRUE(dstRwpk->constituentPicMatching == 0); + EXPECT_TRUE(dstRwpk->numRegions == 10); + EXPECT_TRUE(dstRwpk->projPicWidth == 3840); + EXPECT_TRUE(dstRwpk->projPicHeight == 1920); + EXPECT_TRUE(dstRwpk->packedPicWidth == 2880); + EXPECT_TRUE(dstRwpk->packedPicHeight == 1920); + break; } - - for (int32_t i = 0; i < 4; i++) - { - memset(&(rwpk.rectRegionPacking[i]), 0, sizeof(RectangularRegionWisePacking)); - rwpk.rectRegionPacking[i].transformType = 0; - rwpk.rectRegionPacking[i].guardBandFlag = false; - rwpk.rectRegionPacking[i].projRegWidth = 960; - rwpk.rectRegionPacking[i].projRegHeight = 960; - if (it->first < 4) - { - if (i % 2 == 0) - { - rwpk.rectRegionPacking[i].projRegTop = 0; - } - else - { - rwpk.rectRegionPacking[i].projRegTop = 960; - } - } - else - { - if (i % 2 == 0) - { - rwpk.rectRegionPacking[i].projRegTop = 960; - } - else - { - rwpk.rectRegionPacking[i].projRegTop = 0; - } - } - if ((it->first) != 3 && (it->first) != 7) - { - if (i == 0 || i == 1) - { - rwpk.rectRegionPacking[i].projRegLeft = ((it->first) % 4) * 960; - } - else - { - rwpk.rectRegionPacking[i].projRegLeft = ((it->first) % 4) * 960 + 960; - } - } - else - { - if (i == 0 || i == 1) - { - rwpk.rectRegionPacking[i].projRegLeft = 2880; - } - else - { - rwpk.rectRegionPacking[i].projRegLeft = 0; - } - } - rwpk.rectRegionPacking[i].packedRegWidth = 960; - rwpk.rectRegionPacking[i].packedRegHeight = 960; - if (i == 0 || i == 2) - { - rwpk.rectRegionPacking[i].packedRegTop = 0; - } - else - { - rwpk.rectRegionPacking[i].packedRegTop = 960; - } - if (i == 0 || i == 1) - { - rwpk.rectRegionPacking[i].packedRegLeft = 0; - } - else - { - rwpk.rectRegionPacking[i].packedRegLeft = 960; - } - rwpk.rectRegionPacking[i].gbNotUsedForPredFlag = true; - } - memset(&(rwpk.rectRegionPacking[4]), 0, sizeof(RectangularRegionWisePacking)); - rwpk.rectRegionPacking[4].transformType = 0; - rwpk.rectRegionPacking[4].guardBandFlag = false; - rwpk.rectRegionPacking[4].projRegWidth = 960; - rwpk.rectRegionPacking[4].projRegHeight = 960; - rwpk.rectRegionPacking[4].projRegTop = 0; - rwpk.rectRegionPacking[4].projRegLeft = 0; - rwpk.rectRegionPacking[4].packedRegWidth = 960; - rwpk.rectRegionPacking[4].packedRegHeight = 960; - rwpk.rectRegionPacking[4].packedRegTop = 0; - rwpk.rectRegionPacking[4].packedRegLeft = 1920; - rwpk.rectRegionPacking[4].gbNotUsedForPredFlag = true; - - memset(&(rwpk.rectRegionPacking[5]), 0, sizeof(RectangularRegionWisePacking)); - rwpk.rectRegionPacking[5].transformType = 0; - rwpk.rectRegionPacking[5].guardBandFlag = false; - rwpk.rectRegionPacking[5].projRegWidth = 960; - rwpk.rectRegionPacking[5].projRegHeight = 960; - rwpk.rectRegionPacking[5].projRegTop = 0; - rwpk.rectRegionPacking[5].projRegLeft = 960; - rwpk.rectRegionPacking[5].packedRegWidth = 960; - rwpk.rectRegionPacking[5].packedRegHeight = 960; - rwpk.rectRegionPacking[5].packedRegTop = 960; - rwpk.rectRegionPacking[5].packedRegLeft = 1920; - rwpk.rectRegionPacking[5].gbNotUsedForPredFlag = true; - - EXPECT_TRUE(dstRwpk->constituentPicMatching == rwpk.constituentPicMatching); - EXPECT_TRUE(dstRwpk->numRegions == rwpk.numRegions); - EXPECT_TRUE(dstRwpk->projPicWidth == rwpk.projPicWidth); - EXPECT_TRUE(dstRwpk->projPicHeight == rwpk.projPicHeight); - EXPECT_TRUE(dstRwpk->packedPicWidth == rwpk.packedPicWidth); - EXPECT_TRUE(dstRwpk->packedPicHeight == rwpk.packedPicHeight); - - compRet = 0; - for (uint16_t idx = 0; idx < rwpk.numRegions; idx++) - { - compRet = memcmp(&(dstRwpk->rectRegionPacking[idx]), &(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking)); - EXPECT_TRUE(compRet == 0); - } - - ContentCoverage covi; - covi.coverageShapeType = 1; - covi.numRegions = 1; - covi.viewIdcPresenceFlag = false; - covi.defaultViewIdc = 0; - covi.sphereRegions = new SphereRegion[covi.numRegions]; - EXPECT_TRUE(covi.sphereRegions != NULL); - if (!(covi.sphereRegions)) - { - fclose(fpDataOffset); - fpDataOffset = NULL; - DELETE_MEMORY(frameLowRes); - DELETE_MEMORY(frameHighRes); - DELETE_ARRAY(rwpk.rectRegionPacking); - return; - } - - for (uint16_t idx = 0; idx < covi.numRegions; idx++) - { - memset(&(covi.sphereRegions[idx]), 0, sizeof(SphereRegion)); - covi.sphereRegions[idx].viewIdc = 0; - covi.sphereRegions[idx].centreAzimuth = (int32_t)((((3840 / 2) - (float)(((it->first) % 4) * 960 + 1920 / 2)) * 360 * 65536) / 3840); - covi.sphereRegions[idx].centreElevation = (int32_t)((((1920 / 2) - (float)(((it->first) / 4) * 960 + 1920 / 2)) * 180 * 65536) / 1920); - covi.sphereRegions[idx].centreTilt = 0; - covi.sphereRegions[idx].azimuthRange = (uint32_t)((1920 * 360.f * 65536) / 3840); - covi.sphereRegions[idx].elevationRange = (uint32_t)((1920 * 180.f * 65536) / 1920); - covi.sphereRegions[idx].interpolate = 0; - } - - EXPECT_TRUE(dstCovi->coverageShapeType == covi.coverageShapeType); - EXPECT_TRUE(dstCovi->numRegions == covi.numRegions); - EXPECT_TRUE(dstCovi->viewIdcPresenceFlag == covi.viewIdcPresenceFlag); - EXPECT_TRUE(dstCovi->defaultViewIdc == covi.defaultViewIdc); - compRet = 0; - for (uint16_t idx = 0; idx < dstCovi->numRegions; idx++) - { - compRet = memcmp(&(dstCovi->sphereRegions[idx]), &(covi.sphereRegions[idx]), sizeof(SphereRegion)); - EXPECT_TRUE(compRet == 0); + EXPECT_TRUE(dstCovi->coverageShapeType == 1); + EXPECT_TRUE(dstCovi->numRegions == 1); + EXPECT_TRUE(dstCovi->viewIdcPresenceFlag == false); + EXPECT_TRUE(dstCovi->defaultViewIdc == 0); + uint16_t idx = 0; + + switch (viewportIdx) { + case 0: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -11796480); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == -5898240); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + case 1: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -11796480); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == -2949120); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + case 2: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -11796480); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == 0); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + case 3: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -11796480); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == 2949120); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + case 4: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -11796480); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == 5898240); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + case 5: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -8847360); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == -2949120); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + case 6: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == -8847360); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == 0); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 8192000); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 8847360); + break; + default: + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreAzimuth == 5898240); + EXPECT_TRUE(dstCovi->sphereRegions[idx].centreElevation == 0); + EXPECT_TRUE(dstCovi->sphereRegions[idx].azimuthRange == 11796480); + EXPECT_TRUE(dstCovi->sphereRegions[idx].elevationRange == 11796480); + break; } EXPECT_TRUE(picResList->size() == 2); @@ -729,12 +747,8 @@ TEST_F(ExtractorTrackTest, AllProcess) fp = fopen("extractorTrack_projSEI.bin", "r"); if (!fp) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); - DELETE_ARRAY(rwpk.rectRegionPacking); - DELETE_ARRAY(covi.sphereRegions); return; } fseek(fp, 0L, SEEK_END); @@ -743,19 +757,16 @@ TEST_F(ExtractorTrackTest, AllProcess) uint8_t *projSEIData = new uint8_t[projSEILen]; if (!projSEIData) { - fclose(fpDataOffset); - fpDataOffset = NULL; DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); - DELETE_ARRAY(rwpk.rectRegionPacking); - DELETE_ARRAY(covi.sphereRegions); fclose(fp); fp = NULL; return; } fread(projSEIData, 1, projSEILen, fp); - compRet = memcmp(projSEI->data, projSEIData, projSEILen); + result = memcmp_s(projSEI->data, projSEILen, projSEIData, projSEILen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(projSEI->dataSize == projSEILen); EXPECT_TRUE(compRet == 0); @@ -769,79 +780,178 @@ TEST_F(ExtractorTrackTest, AllProcess) EXPECT_TRUE(rwpkSEI->dataSize != 4); EXPECT_TRUE(tilesMerDir != NULL); - EXPECT_TRUE(tilesMerDir->tilesArrangeInCol.size() == 3); + int32_t tilesArrangeInColSize = tilesMerDir->tilesArrangeInCol.size(); + if ((viewportIdx == 0) || (viewportIdx == 2) || (viewportIdx == 4)) + { + EXPECT_TRUE(tilesMerDir->tilesArrangeInCol.size() == 3); + } + else if ((viewportIdx == 1) || (viewportIdx == 3) || (viewportIdx == 6)) + { + EXPECT_TRUE(tilesMerDir->tilesArrangeInCol.size() == 4); + } + else if (viewportIdx == 5) + { + EXPECT_TRUE(tilesMerDir->tilesArrangeInCol.size() == 5); + } + std::list::iterator itCol; itCol = tilesMerDir->tilesArrangeInCol.begin(); + tilesArrangeInColSize--; TilesInCol *tileCol = *itCol; - EXPECT_TRUE(tileCol->size() == 2); std::list::iterator itTile; + SingleTile *tile; + + EXPECT_TRUE(tileCol->size() == 2); itTile = tileCol->begin(); - SingleTile *tile = *itTile; + tile = *itTile; EXPECT_TRUE(tile->streamIdxInMedia == 1); - EXPECT_TRUE(tile->origTileIdx == (it->first)); EXPECT_TRUE(tile->dstCTUIndex == 0); itTile++; tile = *itTile; EXPECT_TRUE(tile->streamIdxInMedia == 1); - EXPECT_TRUE(tile->origTileIdx == ((it->first) < 4 ? (it->first) + 4 : (it->first) - 4)); - EXPECT_TRUE(tile->dstCTUIndex == 675); + + if ((viewportIdx == 0) || (viewportIdx == 2) || (viewportIdx == 4)) + { + EXPECT_TRUE(tile->dstCTUIndex == 675); + } + else if ((viewportIdx == 1) || (viewportIdx == 3) || (viewportIdx == 6)) + { + EXPECT_TRUE(tile->dstCTUIndex == 900); + } + else if (viewportIdx == 5) + { + EXPECT_TRUE(tile->dstCTUIndex == 1125); + } + itCol++; + tilesArrangeInColSize--; tileCol = *itCol; EXPECT_TRUE(tileCol->size() == 2); itTile = tileCol->begin(); tile = *itTile; EXPECT_TRUE(tile->streamIdxInMedia == 1); - if (it->first != 3 && it->first != 7) - { - EXPECT_TRUE(tile->origTileIdx == ((it->first) + 1)); - } - else - { - EXPECT_TRUE(tile->origTileIdx == ((it->first) - 3)); - } EXPECT_TRUE(tile->dstCTUIndex == 15); + itTile++; tile = *itTile; EXPECT_TRUE(tile->streamIdxInMedia == 1); - if (it->first != 3 && it->first != 7) + if ((viewportIdx == 0) || (viewportIdx == 2) || (viewportIdx == 4)) { - if (it->first < 4) - { - EXPECT_TRUE(tile->origTileIdx == ((it->first) + 5)); - } - else - { - EXPECT_TRUE(tile->origTileIdx == ((it->first) - 3)); - } + EXPECT_TRUE(tile->dstCTUIndex == 690); } - else if ((it->first) == 3) + else if ((viewportIdx == 1) || (viewportIdx == 3) || (viewportIdx == 6)) { - EXPECT_TRUE(tile->origTileIdx == 4); + EXPECT_TRUE(tile->dstCTUIndex == 915); } - else + else if (viewportIdx == 5) { - EXPECT_TRUE(tile->origTileIdx == 0); + EXPECT_TRUE(tile->dstCTUIndex == 1140); } - EXPECT_TRUE(tile->dstCTUIndex == 690); + itCol++; + tilesArrangeInColSize--; tileCol = *itCol; EXPECT_TRUE(tileCol->size() == 2); itTile = tileCol->begin(); tile = *itTile; - EXPECT_TRUE(tile->streamIdxInMedia == 0); - EXPECT_TRUE(tile->origTileIdx == 0); + switch (viewportIdx) { + case 0: + case 2: + case 4: + EXPECT_TRUE(tile->streamIdxInMedia == 0); + break; + case 1: + case 3: + case 5: + case 6: + EXPECT_TRUE(tile->streamIdxInMedia == 1); + break; + default: + EXPECT_TRUE(tile->streamIdxInMedia == 1); + break; + } EXPECT_TRUE(tile->dstCTUIndex == 30); + itTile++; tile = *itTile; - EXPECT_TRUE(tile->streamIdxInMedia == 0); - EXPECT_TRUE(tile->origTileIdx == 1); - EXPECT_TRUE(tile->dstCTUIndex == 705); - - fscanf(fpDataOffset, "%u,%u,%u,%u,%u,%u", &sliceHrdLen[0], &sliceHrdLen[1], &sliceHrdLen[2], &sliceHrdLen[3], &sliceHrdLen[4], &sliceHrdLen[5]); + if ((viewportIdx == 0) || (viewportIdx == 2) || (viewportIdx == 4)) + { + EXPECT_TRUE(tile->streamIdxInMedia == 0); + EXPECT_TRUE(tile->dstCTUIndex == 705); + } + else if ((viewportIdx == 1) || (viewportIdx == 3) || (viewportIdx == 6)) + { + EXPECT_TRUE(tile->streamIdxInMedia == 1); + EXPECT_TRUE(tile->dstCTUIndex == 930); + } + else if (viewportIdx == 5) + { + EXPECT_TRUE(tile->streamIdxInMedia == 1); + EXPECT_TRUE(tile->dstCTUIndex == 1155); + } + if (tilesArrangeInColSize != 0) { + if ((viewportIdx == 1) || (viewportIdx == 3) || (viewportIdx == 6)) + { + itCol++; + tileCol = *itCol; + EXPECT_TRUE(tileCol->size() == 2); + itTile = tileCol->begin(); + tile = *itTile; + EXPECT_TRUE(tile->streamIdxInMedia == 0); + EXPECT_TRUE(tile->dstCTUIndex == 45); + itTile++; + tile = *itTile; + EXPECT_TRUE(tile->streamIdxInMedia == 0); + EXPECT_TRUE(tile->dstCTUIndex == 945); + } + else if (viewportIdx == 5) + { + itCol++; + tileCol = *itCol; + EXPECT_TRUE(tileCol->size() == 2); + itTile = tileCol->begin(); + tile = *itTile; + EXPECT_TRUE(tile->streamIdxInMedia == 1); + EXPECT_TRUE(tile->dstCTUIndex == 45); + itTile++; + tile = *itTile; + EXPECT_TRUE(tile->streamIdxInMedia == 1); + EXPECT_TRUE(tile->dstCTUIndex == 1170); + } + tilesArrangeInColSize--; + } + if (tilesArrangeInColSize != 0) { + if (viewportIdx == 5) + { + itCol++; + tileCol = *itCol; + EXPECT_TRUE(tileCol->size() == 2); + itTile = tileCol->begin(); + tile = *itTile; + EXPECT_TRUE(tile->streamIdxInMedia == 0); + EXPECT_TRUE(tile->dstCTUIndex == 60); + itTile++; + tile = *itTile; + EXPECT_TRUE(tile->streamIdxInMedia == 0); + EXPECT_TRUE(tile->dstCTUIndex == 1185); + } + } extractorTrack->ConstructExtractors(); std::map *extractors = extractorTrack->GetAllExtractors(); - EXPECT_TRUE(extractors->size() == 6); + if ((viewportIdx == 0) || (viewportIdx == 2) || (viewportIdx == 4)) + { + EXPECT_TRUE(extractors->size() == 6); + } + else if ((viewportIdx == 1) || (viewportIdx == 3) || (viewportIdx == 6)) + { + EXPECT_TRUE(extractors->size() == 8); + } + else if (viewportIdx == 5) + { + EXPECT_TRUE(extractors->size() == 10); + } + std::map::iterator itExtractor; for (itExtractor = extractors->begin(); itExtractor != extractors->end(); @@ -855,24 +965,15 @@ TEST_F(ExtractorTrackTest, AllProcess) InlineConstructor *inlineCtor = *itInlineCtor; EXPECT_TRUE(inlineCtor->length != 0); EXPECT_TRUE(inlineCtor->inlineData != NULL); - std::list::iterator itSmpCtor; - itSmpCtor = extractor->sampleConstructor.begin(); - SampleConstructor *sampleCtor = *itSmpCtor; - EXPECT_TRUE(sampleCtor->dataOffset == (DASH_SAMPLELENFIELD_SIZE + HEVC_NALUHEADER_LEN + sliceHrdLen[itExtractor->first])); } ret = extractorTrack->DestroyExtractors(); EXPECT_TRUE(ret == ERROR_NONE); - - DELETE_ARRAY(rwpk.rectRegionPacking); - DELETE_ARRAY(covi.sphereRegions); } DELETE_MEMORY(frameLowRes); DELETE_MEMORY(frameHighRes); } - fclose(fpDataOffset); - fpDataOffset = NULL; } } diff --git a/src/VROmafPacking/test/testHevcNaluParser.cpp b/src/VROmafPacking/test/testHevcNaluParser.cpp index 6ba261a5..74208664 100644 --- a/src/VROmafPacking/test/testHevcNaluParser.cpp +++ b/src/VROmafPacking/test/testHevcNaluParser.cpp @@ -32,11 +32,15 @@ //! #include "gtest/gtest.h" -#include "../HevcNaluParser.h" +//#include "../../plugins/StreamProcess_Plugin/VideoStream_Plugin/HevcVideoStream//HevcNaluParser.h" +#include "HevcNaluParser.h" +#include "OmafPackingLog.h" -VCD_USE_VRVIDEO; +extern "C" +{ +#include "safestringlib/safe_mem_lib.h" +} -namespace { class HevcNaluParserTest : public testing::Test { public: @@ -103,10 +107,11 @@ class HevcNaluParserTest : public testing::Test return; } - memset(m_360scvpParam, 0, sizeof(param_360SCVP)); + memset_s(m_360scvpParam, sizeof(param_360SCVP), 0); m_360scvpParam->usedType = E_PARSER_ONENAL; m_360scvpParam->pInputBitstream = m_headerData; m_360scvpParam->inputBitstreamLen = m_headerSize; + m_360scvpParam->logFunction = (void*)logCallBack; m_360scvpHandle = I360SCVP_Init(m_360scvpParam); if (!m_360scvpHandle) @@ -237,9 +242,12 @@ TEST_F(HevcNaluParserTest, ParseHevcHeader) } fread(vpsData, 1, vpsLen, fp); - int32_t compRet = memcmp(vpsNalu->data, vpsData, vpsLen); + int32_t diff = 0; + errno_t compRet = EOK; + compRet = memcmp_s(vpsNalu->data, vpsLen, vpsData, vpsLen, &diff); + EXPECT_TRUE(compRet == EOK); EXPECT_TRUE(vpsNalu->dataSize == vpsLen); - EXPECT_TRUE(compRet == 0); + EXPECT_TRUE(diff == 0); EXPECT_TRUE(vpsNalu->startCodesSize == 4); EXPECT_TRUE(vpsNalu->naluType == 32); @@ -271,9 +279,10 @@ TEST_F(HevcNaluParserTest, ParseHevcHeader) } fread(spsData, 1, spsLen, fp); - compRet = memcmp(spsNalu->data, spsData, spsLen); + compRet = memcmp_s(spsNalu->data, spsLen, spsData, spsLen, &diff); + EXPECT_TRUE(compRet == EOK); EXPECT_TRUE(spsNalu->dataSize == spsLen); //includes start codes - EXPECT_TRUE(compRet == 0); + EXPECT_TRUE(diff == 0); EXPECT_TRUE(spsNalu->startCodesSize == 4); EXPECT_TRUE(spsNalu->naluType == 33); @@ -305,9 +314,10 @@ TEST_F(HevcNaluParserTest, ParseHevcHeader) } fread(ppsData, 1, ppsLen, fp); - compRet = memcmp(ppsNalu->data, ppsData, ppsLen); + compRet = memcmp_s(ppsNalu->data, ppsLen, ppsData, ppsLen, &diff); + EXPECT_TRUE(compRet == EOK); EXPECT_TRUE(ppsNalu->dataSize == ppsLen); //includes start codes - EXPECT_TRUE(compRet == 0); + EXPECT_TRUE(diff == 0); EXPECT_TRUE(ppsNalu->startCodesSize == 4); EXPECT_TRUE(ppsNalu->naluType == 34); @@ -426,4 +436,3 @@ TEST_F(HevcNaluParserTest, ParseSliceNalu) delete parser; parser = NULL; } -} diff --git a/src/VROmafPacking/test/testVideoStream.cpp b/src/VROmafPacking/test/testVideoStream.cpp index 1c0b3a6c..848e0141 100644 --- a/src/VROmafPacking/test/testVideoStream.cpp +++ b/src/VROmafPacking/test/testVideoStream.cpp @@ -31,12 +31,16 @@ //! Created on April 30, 2019, 6:04 AM //! +#include #include "gtest/gtest.h" -#include "../VideoStream.h" +#include "VideoStreamPluginAPI.h" +#include "error.h" +extern "C" +{ +#include "safestringlib/safe_mem_lib.h" +} -VCD_USE_VRVIDEO; -namespace { class VideoStreamTest : public testing::Test { public: @@ -231,10 +235,13 @@ class VideoStreamTest : public testing::Test return; } - memset(m_initInfo, 0, sizeof(InitialInfo)); + memset_s(m_initInfo, sizeof(InitialInfo), 0); m_initInfo->bsNumVideo = 2; m_initInfo->bsNumAudio = 0; - m_initInfo->tilesMergingType = TilesMergingType::TwoResTilesMerging; + m_initInfo->packingPluginPath = "/usr/local/lib"; + m_initInfo->packingPluginName = "HighResPlusFullLowResPacking"; + m_initInfo->videoProcessPluginPath = "/usr/local/lib"; + m_initInfo->videoProcessPluginName = "HevcVideoStreamProcess"; m_initInfo->bsBuffers = new BSBuffer[2]; if (!m_initInfo->bsBuffers) { @@ -289,7 +296,87 @@ class VideoStreamTest : public testing::Test m_initInfo->segmentationInfo->baseUrl = NULL; m_initInfo->segmentationInfo->utcTimingUrl = NULL; - m_vsLow = new VideoStream(); + m_initInfo->projType = E_SVIDEO_EQUIRECT; + + m_vsPlugin = NULL; + m_vsPlugin = dlopen("/usr/local/lib/libHevcVideoStreamProcess.so", RTLD_LAZY); + if (!m_vsPlugin) + { + fclose(m_highResFile); + m_highResFile = NULL; + fclose(m_lowResFile); + m_lowResFile = NULL; + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo); + return; + } + + CreateVideoStream* createVS = NULL; + createVS = (CreateVideoStream*)dlsym(m_vsPlugin, "Create"); + const char* dlsymErr1 = dlerror(); + if (dlsymErr1) + { + fclose(m_highResFile); + m_highResFile = NULL; + fclose(m_lowResFile); + m_lowResFile = NULL; + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + return; + } + + if (!createVS) + { + fclose(m_highResFile); + m_highResFile = NULL; + fclose(m_lowResFile); + m_lowResFile = NULL; + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + return; + } + + DestroyVideoStream* destroyVS = NULL; + destroyVS = (DestroyVideoStream*)dlsym(m_vsPlugin, "Destroy"); + const char *dlsymErr = dlerror(); + if (dlsymErr || !destroyVS) + { + fclose(m_highResFile); + m_highResFile = NULL; + fclose(m_lowResFile); + m_lowResFile = NULL; + DELETE_ARRAY(m_highResHeader); + DELETE_ARRAY(m_lowResHeader); + DELETE_ARRAY(m_totalDataLow); + DELETE_ARRAY(m_totalDataHigh); + DELETE_ARRAY(m_initInfo->bsBuffers); + DELETE_MEMORY(m_initInfo->segmentationInfo); + DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + return; + } + + m_vsLow = createVS(); if (!m_vsLow) { fclose(m_highResFile); @@ -303,10 +390,13 @@ class VideoStreamTest : public testing::Test DELETE_ARRAY(m_initInfo->bsBuffers); DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } ((MediaStream*)m_vsLow)->SetMediaType(VIDEOTYPE); + ((MediaStream*)m_vsLow)->SetCodecId(CODEC_ID_H265); int32_t ret = m_vsLow->Initialize(m_lowResStreamIdx, &(m_initInfo->bsBuffers[0]), m_initInfo); if (ret) @@ -322,12 +412,14 @@ class VideoStreamTest : public testing::Test DELETE_ARRAY(m_initInfo->bsBuffers); DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(m_vsLow); + destroyVS((VideoStream*)(m_vsLow)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } //Create and Initialize high resolution video stream - m_vsHigh = new VideoStream(); + m_vsHigh = createVS(); if (!m_vsHigh) { fclose(m_highResFile); @@ -341,11 +433,14 @@ class VideoStreamTest : public testing::Test DELETE_ARRAY(m_initInfo->bsBuffers); DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(m_vsLow); + destroyVS((VideoStream*)(m_vsLow)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } ((MediaStream*)m_vsHigh)->SetMediaType(VIDEOTYPE); + ((MediaStream*)m_vsHigh)->SetCodecId(CODEC_ID_H265); ret = m_vsHigh->Initialize(m_highResStreamIdx, &(m_initInfo->bsBuffers[1]), m_initInfo); if (ret) @@ -361,8 +456,10 @@ class VideoStreamTest : public testing::Test DELETE_ARRAY(m_initInfo->bsBuffers); DELETE_MEMORY(m_initInfo->segmentationInfo); DELETE_MEMORY(m_initInfo); - DELETE_MEMORY(m_vsLow); - DELETE_MEMORY(m_vsHigh); + destroyVS((VideoStream*)(m_vsLow)); + destroyVS((VideoStream*)(m_vsHigh)); + dlclose(m_vsPlugin); + m_vsPlugin = NULL; return; } } @@ -394,9 +491,6 @@ class VideoStreamTest : public testing::Test m_initInfo = NULL; } - DELETE_MEMORY(m_vsLow); - DELETE_MEMORY(m_vsHigh); - remove("test_vps_lowRes.bin"); remove("test_sps_lowRes.bin"); remove("test_pps_lowRes.bin"); @@ -404,6 +498,32 @@ class VideoStreamTest : public testing::Test remove("test_vps_highRes.bin"); remove("test_sps_highRes.bin"); remove("test_pps_highRes.bin"); + + if (m_vsPlugin) + { + DestroyVideoStream* destroyVS = NULL; + destroyVS = (DestroyVideoStream*)dlsym(m_vsPlugin, "Destroy"); + const char *dlsymErr = dlerror(); + if (dlsymErr) + { + return; + } + if (!destroyVS) + { + return; + } + if (m_vsLow) + { + destroyVS((VideoStream*)(m_vsLow)); + } + if (m_vsHigh) + { + destroyVS((VideoStream*)(m_vsHigh)); + } + + dlclose(m_vsPlugin); + m_vsPlugin = NULL; + } } FILE *m_highResFile; @@ -415,6 +535,7 @@ class VideoStreamTest : public testing::Test uint8_t *m_totalDataLow; uint8_t *m_totalDataHigh; InitialInfo *m_initInfo; + void *m_vsPlugin; VideoStream *m_vsLow; VideoStream *m_vsHigh; }; @@ -444,7 +565,7 @@ TEST_F(VideoStreamTest, AllProcess) uint16_t tileHeight = lowResHeight / lowTileInCol; for (uint16_t idx = 0; idx < rwpk.numRegions; idx++) { - memset(&(rwpk.rectRegionPacking[idx]), 0, sizeof(RectangularRegionWisePacking)); + memset_s(&(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking), 0); rwpk.rectRegionPacking[idx].transformType = 0; rwpk.rectRegionPacking[idx].guardBandFlag = 0; rwpk.rectRegionPacking[idx].projRegWidth = tileWidth; @@ -475,7 +596,7 @@ TEST_F(VideoStreamTest, AllProcess) } for (uint16_t idx = 0; idx < covi.numRegions; idx++) { - memset(&(covi.sphereRegions[idx]), 0, sizeof(SphereRegion)); + memset_s(&(covi.sphereRegions[idx]), sizeof(SphereRegion), 0); covi.sphereRegions[idx].viewIdc = 0; covi.sphereRegions[idx].centreAzimuth = (int32_t)((((lowResWidth / 2) - (float)((idx % lowTileInRow) * tileWidth + tileWidth / 2)) * 360 * 65536) / lowResWidth); covi.sphereRegions[idx].centreElevation = (int32_t)((((lowResHeight / 2) - (float)((idx / lowTileInRow) * tileHeight + tileHeight / 2)) * 180 * 65536) / lowResHeight); @@ -502,7 +623,8 @@ TEST_F(VideoStreamTest, AllProcess) int32_t compRet = 0; for (uint16_t idx = 0; idx < rwpk.numRegions; idx++) { - compRet = memcmp(&(origRwpk->rectRegionPacking[idx]), &(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking)); + errno_t result = memcmp_s(&(origRwpk->rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking), &(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking), &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(compRet == 0); } @@ -514,7 +636,8 @@ TEST_F(VideoStreamTest, AllProcess) EXPECT_TRUE(origCovi->defaultViewIdc == covi.defaultViewIdc); for (uint16_t idx = 0; idx < origCovi->numRegions; idx++) { - compRet = memcmp(&(origCovi->sphereRegions[idx]), &(covi.sphereRegions[idx]), sizeof(SphereRegion)); + errno_t result = memcmp_s(&(origCovi->sphereRegions[idx]), sizeof(SphereRegion), &(covi.sphereRegions[idx]), sizeof(SphereRegion), &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(compRet == 0); } @@ -547,7 +670,8 @@ TEST_F(VideoStreamTest, AllProcess) } fread(vpsData, 1, vpsLen, fp); - compRet = memcmp(vpsNalu->data, vpsData, vpsLen); + errno_t result = memcmp_s(vpsNalu->data, vpsLen, vpsData, vpsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(vpsNalu->dataSize == vpsLen); EXPECT_TRUE(compRet == 0); EXPECT_TRUE(vpsNalu->startCodesSize == 4); @@ -581,7 +705,8 @@ TEST_F(VideoStreamTest, AllProcess) } fread(spsData, 1, spsLen, fp); - compRet = memcmp(spsNalu->data, spsData, spsLen); + result = memcmp_s(spsNalu->data, spsLen, spsData, spsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(spsNalu->dataSize == spsLen); //includes start codes EXPECT_TRUE(compRet == 0); EXPECT_TRUE(spsNalu->startCodesSize == 4); @@ -615,7 +740,8 @@ TEST_F(VideoStreamTest, AllProcess) } fread(ppsData, 1, ppsLen, fp); - compRet = memcmp(ppsNalu->data, ppsData, ppsLen); + result = memcmp_s(ppsNalu->data, ppsLen, ppsData, ppsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(ppsNalu->dataSize == ppsLen); //includes start codes EXPECT_TRUE(compRet == 0); EXPECT_TRUE(ppsNalu->startCodesSize == 4); @@ -648,7 +774,7 @@ TEST_F(VideoStreamTest, AllProcess) tileHeight = highResHeight / highTileInCol; for (uint16_t idx = 0; idx < rwpk.numRegions; idx++) { - memset(&(rwpk.rectRegionPacking[idx]), 0, sizeof(RectangularRegionWisePacking)); + memset_s(&(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking), 0); rwpk.rectRegionPacking[idx].transformType = 0; rwpk.rectRegionPacking[idx].guardBandFlag = 0; rwpk.rectRegionPacking[idx].projRegWidth = tileWidth; @@ -680,7 +806,7 @@ TEST_F(VideoStreamTest, AllProcess) } for (uint16_t idx = 0; idx < covi.numRegions; idx++) { - memset(&(covi.sphereRegions[idx]), 0, sizeof(SphereRegion)); + memset_s(&(covi.sphereRegions[idx]), sizeof(SphereRegion), 0); covi.sphereRegions[idx].viewIdc = 0; covi.sphereRegions[idx].centreAzimuth = (int32_t)((((highResWidth / 2) - (float)((idx % highTileInRow) * tileWidth + tileWidth / 2)) * 360 * 65536) / highResWidth); covi.sphereRegions[idx].centreElevation = (int32_t)((((highResHeight / 2) - (float)((idx / highTileInRow) * tileHeight + tileHeight / 2)) * 180 * 65536) / highResHeight); @@ -707,7 +833,8 @@ TEST_F(VideoStreamTest, AllProcess) compRet = 0; for (uint16_t idx = 0; idx < rwpk.numRegions; idx++) { - compRet = memcmp(&(origRwpk->rectRegionPacking[idx]), &(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking)); + result = memcmp_s(&(origRwpk->rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking), &(rwpk.rectRegionPacking[idx]), sizeof(RectangularRegionWisePacking), &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(compRet == 0); } @@ -719,7 +846,8 @@ TEST_F(VideoStreamTest, AllProcess) EXPECT_TRUE(origCovi->defaultViewIdc == covi.defaultViewIdc); for (uint16_t idx = 0; idx < origCovi->numRegions; idx++) { - compRet = memcmp(&(origCovi->sphereRegions[idx]), &(covi.sphereRegions[idx]), sizeof(SphereRegion)); + result = memcmp_s(&(origCovi->sphereRegions[idx]), sizeof(SphereRegion), &(covi.sphereRegions[idx]), sizeof(SphereRegion), &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(compRet == 0); } @@ -750,7 +878,8 @@ TEST_F(VideoStreamTest, AllProcess) } fread(vpsData, 1, vpsLen, fp); - compRet = memcmp(vpsNalu->data, vpsData, vpsLen); + result = memcmp_s(vpsNalu->data, vpsLen, vpsData, vpsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(vpsNalu->dataSize == vpsLen); EXPECT_TRUE(compRet == 0); EXPECT_TRUE(vpsNalu->startCodesSize == 4); @@ -779,8 +908,8 @@ TEST_F(VideoStreamTest, AllProcess) } fread(spsData, 1, spsLen, fp); - compRet = memcmp(spsNalu->data, spsData, spsLen); - + result = memcmp_s(spsNalu->data, spsLen, spsData, spsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(spsNalu->dataSize == spsLen); //includes start codes EXPECT_TRUE(compRet == 0); EXPECT_TRUE(spsNalu->startCodesSize == 4); @@ -809,7 +938,8 @@ TEST_F(VideoStreamTest, AllProcess) } fread(ppsData, 1, ppsLen, fp); - compRet = memcmp(ppsNalu->data, ppsData, ppsLen); + result = memcmp_s(ppsNalu->data, ppsLen, ppsData, ppsLen, &compRet); + EXPECT_TRUE(result == EOK); EXPECT_TRUE(ppsNalu->dataSize == ppsLen); //includes start codes EXPECT_TRUE(compRet == 0); EXPECT_TRUE(ppsNalu->startCodesSize == 4); @@ -845,8 +975,8 @@ TEST_F(VideoStreamTest, AllProcess) fp = NULL; return; } - memset(frameData, 0, frameSize[idx]); - memcpy(frameData, m_totalDataLow+offset, frameSize[idx]); + memset_s(frameData, frameSize[idx], 0); + memcpy_s(frameData, frameSize[idx], m_totalDataLow+offset, frameSize[idx]); offset += frameSize[idx]; FrameBSInfo *frameInfo = new FrameBSInfo; @@ -921,8 +1051,8 @@ TEST_F(VideoStreamTest, AllProcess) return; } - memset(frameData, 0, frameSize1[idx]); - memcpy(frameData, m_totalDataHigh+offset, frameSize1[idx]); + memset_s(frameData, frameSize1[idx], 0); + memcpy_s(frameData, frameSize1[idx], m_totalDataHigh+offset, frameSize1[idx]); offset += frameSize1[idx]; FrameBSInfo *frameInfo = new FrameBSInfo; @@ -976,4 +1106,3 @@ TEST_F(VideoStreamTest, AllProcess) fclose(fp); fp = NULL; } -} diff --git a/src/doc/Immersive_Video_Delivery_360SCVP.md b/src/doc/Immersive_Video_Delivery_360SCVP.md index a9cba77c..2e29be0e 100644 --- a/src/doc/Immersive_Video_Delivery_360SCVP.md +++ b/src/doc/Immersive_Video_Delivery_360SCVP.md @@ -1,9 +1,11 @@ -# Immersive Video Delivery 360SCVP (Stream Concatenation Video Processing) Library +# Immersive Video Delivery 360SCVP (Stream Concatenation and Viewport Processing) Library ## Introduction 360SCVP is a common library, which provides some basic functions for stream and view port. So, the 360SCVP is the abbreviation of stream concatenation and view port about 360 videos. -360SCVP library is used in the server side (encoder and packing) and client side(player) of the OMAF compliance solution. Besides this, the library can be used in the 360 low-latency solution. So, 360SCVP is scalable, which can be easily extend to more functions according to users’ requirements. +360SCVP library is used in the server side (encoder and packing) and client side(dashaccess and player) of the OMAF compliance solution. Besides this, the library can be used in the 360 low-latency solution. So, 360SCVP is scalable, which can be easily extend to more functions according to users’ requirements. +For now, both equirect and cubemap geometry type are supported. The below diagram shows the four use types and the basic functions, which are supported currently. Whether the functions or the use types can be extended in the future. +In the latest version, a fast viewport calculation algorithm is proposed by using a look-up table, which effectively reduced the calculation time and thus optimized the E2E latency. @@ -13,6 +15,7 @@ The below table shows the five key data structures, which are param_360SCVP, par - E_MERGE_AND_VIEWPORT, when given the view port information and one frame bitstream, the library can calculate the viewport area in the frame and output the corresponding bitstream; additionally, if given two frame bitstreams e.g. one is high resolution and the other is low resolution, the library can calculate the viewport area and merge the corresponding two bitstreams into one bitstream. - E_PARSER_ONENAL, when given one NAL bitstream (SPS, PPS, SLICE, etc.), the library can parse the bitstream and return some key variables that users are interested in. If give some key variables, the library can generate its corresponding NAL bitstream. - E_PARSER_FOR_CLIENT, the library can parse some feedback information in client, currently the RWPK parser is supported. +- E_VIEWPORT_ONLY, when getting viewport information from Omaf Dash Access library, it can calculate content coverage of FOV and generate related information of selected tiles. | **Fields** | **Description** | | --- | --- | @@ -21,6 +24,22 @@ The below table shows the five key data structures, which are param_360SCVP, par | param_streamStitchInfo | The structure provides the input bitstream clips and some frame level parameters. It is input parameter when the use type is E_STREAM_STITCH_ONLY. For each bitstream clips, there is a structure param_oneStream_info to provide its information. | | param_oneStream_info | The structure provides the horizontal and vertical tile number, tile index, bitstream data buffer and data length in each input bitstream. It is input parameter when the use type is E_STREAM_STITCH_ONLY | | param_360SCVP | The structure is the main data structure, which provides all of the input and output parameters used in the library. So, it contains the above four data structures | +| Param_VideoFPStruct | The structure describes the face property of the input geometry, including tile row/col number, width/height of each face, etc. | +| PluginDef | The structure is the plugin definition which provides plugin information. | + +## Cubemap support +- The following picture and face index table[1](#refer-anchor) show the coordinates definition for cubemap. + + + +| **Face index** | **Face label** | **Notes** | +| --- | --- | --- | +| 0 | PX | Front face with positive X axis value | +| 1 | NX | Back face with negative X axis value | +| 2 | PY | Top face with positive Y axis value | +| 3 | NY | Bottom face with negative Y axis value | +| 4 | PZ | Right face with positive Z axis value | +| 5 | NZ | Left face with negative Z axis value | ## Call Sequences - The following diagram shows how to initialize the library: @@ -30,3 +49,8 @@ The below table shows the five key data structures, which are param_360SCVP, par - After initialization, you can call library as following workflow: + +
+ +## Reference +[1] Yuwen He, Bharath Vishwanath, Xiaoyu Xiu, Yan Ye. AHG8: Algorithm description of InterDigital’s projection format conversion tool (PCT36i0), JVET-D0090, Chengdu, CN, 15–21 October 2016. diff --git a/src/doc/Immersive_Video_Delivery_Architecture.md b/src/doc/Immersive_Video_Delivery_Architecture.md index c2bee9fc..cad47862 100644 --- a/src/doc/Immersive_Video_Delivery_Architecture.md +++ b/src/doc/Immersive_Video_Delivery_Architecture.md @@ -4,15 +4,15 @@ There are two major components in server side to have tile-based 360 Video encoding and OMAF-Compliant packing. - Tile-based Encoding: SVT-HEVC can support MCTS which is a must feature to support tiled 360 video streaming; it is the encoder library to support the whole solution. Projection/Rotation/rwpk and reletive SEI information should be included in encoded bitstream. -- OMAF DASH Packing: according to processing mode, the component can accept multiple streams, and then packing the streams' into tiled-base mp4 segmentations: normal tile video segmentation and extractor segementation. Extractor segmentations creation is decided by input streams' SEI information, and the Packing library will generate special rwpk/srqr information for each extractor segmentation. +- OMAF DASH Packing: according to the processing mode, the component can accept multiple streams, and then packing the streams' into tiled-base mp4 segmentations: normal tile video segmentation and extractor segementation. Extractor segmentations creation is decided by input streams' SEI information, and the Packing library will generate special rwpk/srqr information for each extractor segmentation. As for client side, there are two components in this solution too. -- OMAF Dash Access Library: the library provide basic functions to parse mpd file, selected extractor track based on current viewport, download relative tile segments, parse the extractor and tile segments, and then output a standard HEVC NAL frame with RWPK information for decoding +- OMAF Dash Access Library: the library provides basic functions to parse mpd file, selectes extractor track based on current viewport, downloads relative tile segments, parses the extractor and tile segments, and then output a standard HEVC NAL frame with RWPK information for decoding - Reference OMAF player: the refernce player gets aggregated video bitstream based on OMAF Dash Access library, decodes the bitstream with/without HW acceleration, and then render the picture to sphere/cube mesh based on rwpk of each tile in the frame. Beside the components in Server/Client, there is a common library which is used to do basic process such as: -- Provide a unify interface to process tile-based HEVC bitstream processing and viewport-based Content Processing; -- Support HEVC bitstream processing: VPS/SPS/PPS parsing and generating, 360Video-relative SEI Generating and parsing, HEVC tile-based bitstream aggregation; +- Provide a unify interface to process tile-based HEVC bitstream processing and viewport-based content processing; +- Support HEVC bitstream processing: VPS/SPS/PPS parsing and generating, 360Video-relative SEI generating and parsing, HEVC tile-based bitstream aggregation; - Support Viewport generation, viewport-based tile selection and extractor selection based on content coverage; -FFMPEG Plugins in this solution provide a quick way for user to try this tiled 360 Video streaming solution using common ffmpeg command line. \ No newline at end of file +FFMPEG Plugins in this solution provide a quick way for user to try this tiled 360 Video streaming solution using common ffmpeg command line. diff --git a/src/doc/Immersive_Video_Delivery_CubemapTransformType.md b/src/doc/Immersive_Video_Delivery_CubemapTransformType.md new file mode 100644 index 00000000..990c4cb4 --- /dev/null +++ b/src/doc/Immersive_Video_Delivery_CubemapTransformType.md @@ -0,0 +1,13 @@ +# Immersive Video Delivery Cubemap Transform Type + +It is assumed that the scenes that are shot in 360 degree real world are defined as NO_TRANSFORM. And in cubemap projection, the six faces are defined in the following figure. + + + +In particularly, in face NZ bottom/PZ top, the layout that can be stitched continuously with PX front is defined as NO_TRANSFORM. + +To figure out how to define transform type of the input cube-map source, we should know that there are total 8 transform types of rotation, which are shown in the following figure. + + + +The transform type is to specify the rotation and mirroring to be applied to the i-th packed region to remap to the i-th projected region. This parameter is required as an input to describe the input cube-map source in server end. diff --git a/src/doc/Immersive_Video_Delivery_DashAccess.md b/src/doc/Immersive_Video_Delivery_DashAccess.md index dffc2bea..6c59e5aa 100644 --- a/src/doc/Immersive_Video_Delivery_DashAccess.md +++ b/src/doc/Immersive_Video_Delivery_DashAccess.md @@ -6,10 +6,16 @@ Dash Access library is a client library for accessing the tiled-based viewport-d - Tile-based MP4 segmentation downloading and parsing - Viewport-based Tile-set Selection - OMAF-Compliant Metadata parsing +- Tiles stitching to reconstruct multiple video streams based on viewport when there is no extractor track enabled (later binding mode). +- Support tiles stitched video streams split based on maximum decodable picture width and height limitation (later binding mode). +- Support equirectangular projected media content in both extractor track mode and later binding mode. +- Support cube-map projected media content in both extractor track mode and later binding mode. +- Support AAC audio DASH MPD file parsing, segments downloading and parsing +- Support planar projected media content in later binding mode. -So far, the library is Linux-based version, but it can be ported to Android and relative client platform. +Linux-based version and Android-based version of the library are both supported. ## API Call Sequence @@ -19,5 +25,5 @@ Dash Access Library provides C APIs for user; the call sequence is as follows: Before calling any other APIs in the library, you should call OmafAccess_Init to get the Handler for further usage. - OmafAccess_OpenMedia is used to open a url which is compliant to OMAF DASH specification, and the MPD file will be downloaded and parsed. Then you can use OmafAccess_GetMediaInfo to get relative A/V information in the stream. - OmafAccess_SetupHeadSetInfo is used to set the initial head position of the user, and it will be used to select the initial viewport information and relative tile-set; -- OmafAccess_GetPacket is the function used to get well-aggregated video stream based on viewport and can be decoded by general decoder for rendering; with the API, you can also get the tile RWPK (Regin-Wised Packing) information for current viewport tile set. With/without the same thread, you can call OmafAccess_ChangeViewport to change viewport, the function will re-choose the Tile Set based on input pose Information, and it will decide what packing will be get in next segment. +- OmafAccess_GetPacket is the function used to get well-aggregated video streams based on viewport and maximum decodable picture width and height (later binding mode), and can be decoded by general decoder for rendering; with the API, you can also get the video stream RWPK (Region-Wise Packing) information for each output video stream, the total number of output video streams and informaiton of each output video stream, like resolution. With/without the same thread, you can call OmafAccess_ChangeViewport to change viewport, the function will re-choose the Tile Set based on input pose Information, and it will decide what packed content will be get in next segment. - After all media is played out, you can call OmafAccess_CloseMedia and OmafAccess_Close to end the using of the library. diff --git a/src/doc/Immersive_Video_Delivery_FFmpeg_usage.md b/src/doc/Immersive_Video_Delivery_FFmpeg_usage.md new file mode 100644 index 00000000..07dd4a28 --- /dev/null +++ b/src/doc/Immersive_Video_Delivery_FFmpeg_usage.md @@ -0,0 +1,124 @@ +# Immersive Video Delivery FFmpeg Plugins + +## Introduction +- There are 3 FFmpeg plugins provided in the project: OMAF Packing, Distribute Encoder, and OMAF Dash Access. The implementations of plugins provide a quick way to build tiled-based 360 Video delivery pipeline from server transcoding to client playback based on the libraries built from the project. At the same time, the plugins are also the sample code indicating how to use relative libraries. +- To build a E2E 360 Video delivery pipeline, the FFmpeg plugins provide key functions and relative build-in or external modules of FFmpeg are also needed to get the best performance, such as HEVC/H264 decoder, scaling filter, demuxer and so on. To achieve the best performance, parameters tuning of each component is necessary. +- The FFmpeg patches based on specific code base are provided in the project to enable these plugins. +- The sample command for "decoder + encoder + packing": +```bash + ffmpeg -i [file] -input_type 1 \ + -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 \ + -config_file:0 config_high.txt -g:0 15 -b:0 30M -map 0:v \ + -c:v:1 distributed_encoder -s:1 1024x640 -tile_row:1 2 -tile_column:1 4 \ + -config_file:1 config_low.txt -g:1 15 -b:1 5M -map 0:v \ + -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 \ + -base_url http://[server ip]/OMAFStatic_4k/ -out_name Test /usr/local/nginx/html/OMAFStatic_4k/ +``` + +## OMAF Packing Plugin +OMAF Packing Plugin is a multiplexer in the pipeline to use OMAF packing library to packing input bitstream to generate OMAF-compliant DASH content. Plugin name is "omaf_packing", options available for this plugin are listed as below: + +| **Parameters** | **Descriptions** | **Type** | **Default Value** | **Range** | **Must-Have** | +| --- | --- | --- | --- | --- | --- | +| packing_proj_type | Input source projection type | string | "ERP" | "ERP" or "Cubemap" | NO | +| cubemap_face_file | Configure input cubemap face relation to face layout defined in OMAF for cube-3x2 | string | N/A | N/A | NO | +| viewport_w | Set viewport width | int | 1024 | N/A | NO | +| viewport_h | Set viewport height | int | 1024 | N/A | NO | +| viewport_yaw | Set viewport yaw angle, which is the angle around y axis | float | 90 | [0.0, 180.0] | NO | +| viewport_pitch | Set viewport pitch angle, which is the angle around x axis | float | 0 | [0.0, 100.0] | NO | +| viewport_fov_hor | Set horizontal angle of field of view (FOV) | float | 80 | [0.0, 180.0] | NO | +| viewport_fov_ver | Set vertical angle of field of view (FOV) | float | 90 | [0.0, 100.0] | NO | +| window_size | Number of segments kept in the manifest | int | 5 | N/A | NO | +| extra_window_size | Number of segments kept outside of the manifest before removing from disk | int | 15 | N/A | NO | +| split_tile | Need split the stream to tiles if input is tile-based hevc stream | int | 0 | 0, 1 | NO | +| seg_duration | Segment duration (in u seconds, fractional value can be set) | int | 5000000 | N/A | NO | +| remove_at_exit | Remove all segments when finished | int | 0 | 0, 1 | NO | +| use_template | Use SegmentTemplate instead of SegmentList | bool | 0 | 0, 1 | NO | +| use_timeline | Use SegmentTimeline in SegmentTemplate | int | 0 | 0, 1 | NO | +| utc_timing_url | URL of the page that will return the UTC timestamp in ISO format | string | N/A | N/A | NO | +| is_live | Enable/Disable streaming mode of output. Each frame will be moof fragment | bool | 0 | 0, 1 | YES | +| base_url | MPD BaseURL, it can be the the url of generated segmentatio and MPD files | string | N/A | N/A | YES | +| out_name | Name prefix for all dash output files | string | N/A | "dash-stream" | NO | +| need_buffered_frames | Needed buffered frames number before packing starts | int | 15 | N/A | NO | +| extractors_per_thread | Extractor tracks per segmentation thread | int | 0 | N/A | NO | +| has_extractor | Enable/Disable OMAF extractor tracks| int | 1 | 0, 1 | NO | +| packing_plugin_path | OMAF Packing plugin path | string | N/A | "/usr/local/lib" | NO | +| packing_plugin_name | OMAF Packing plugin name | string | N/A | "HighResPlusFullLowResPacking" | NO | +| video_plugin_path | Video stream process plugin path | string | N/A | "/usr/local/lib" | NO | +| video_plugin_name | Video stream process plugin name | string | N/A | "HevcVideoStreamProcess" | NO | +| audio_plugin_path | Audio stream process plugin path | string | N/A | "null" | NO | +| audio_plugin_name | Audio stream process plugin name | string | N/A | "null" | NO | +| fixed_extractors_res | Whether extractor track needs the fixed resolution | bool | 0 | 0, 1 | NO | +| need_external_log | Whether external log callback is needed | bool | 0 | 0, 1 | NO | +| min_log_level | Minimal log level of output [0: INFO, 1: WARNING, 2: ERROR, 3: FATAL] | int | 2 | [0, 3] | NO | + +Sample command line for cube-map projected input source is as follows: +``` bash + numactl -c 1 ./ffmpeg -stream_loop -1 -i [file] -input_type 1 -proj_type Cubemap -rc 1 \ + -c:v:0 distributed_encoder -s:0 5760x3840 -tile_row:0 6 -tile_column:0 9 \ + -config_file:0 config_high.txt -la_depth:0 25 -r:0 25 -g:0 25 -b:0 80M -map 0:v \ + -c:v:1 distributed_encoder -s:1 960x640 -sws_flags neighbor -tile_row:1 2 -tile_column:1 3 \ + -config_file:1 config_low.txt -la_depth:1 25 -r:1 25 -g:1 25 -b:1 1500K -map 0:v \ + -vframes 3000 -f omaf_packing -packing_proj_type Cubemap -cubemap_face_file 6kcube_face_info.txt \ + -is_live 0 -split_tile 1 -seg_duration 1 -has_extractor 0 \ + -base_url http://[server ip]:8080/8kcubevod/ -out_name Test /usr/local/nginx/html/8kcubevod/ +``` +The file "6kcube_face_info.txt" is to configure input cube-map face relation to face layout defined in OMAF spec for cube-3x2. +The content of "6kcube_face_info.txt" is as follows: +``` +NY NO_TRANSFORM +PY NO_TRANSFORM +PZ NO_TRANSFORM +NZ NO_TRANSFORM +PX NO_TRANSFORM +NX NO_TRANSFORM +``` +NY/PY/PZ/NZ/PX/NX mean corresponding faces location in face layout defined in OMAF spec for cube-3x2 of faces in input cube-map projected source in raster scanning sequence. +NO_TRANSFORM means there is no additional transform of input faces, like rotation. For more transform type definitions, please refer to following picture, note that this feature is only supported for cube-3x2: + + +## Distribute Encoder Plugin +Distribute Encoder Plugin is using DistributeEncoder library to do SVT-based HEVC Encoding. Plugin name is "distributed_encoder", options available for this plugin are listed as below: + +First, `config_file` is a must-have parameter containing "ip" and "port" pair, corresponding to IP address and the port of object machine to deploy sub-encoder. Replace IP address like "127.0.0.1" with "local" to start local mode, deploy sub-encoder on host machine directly. And "numa" node is required in 8K input for better FPS performance. + +| **Parameters** | **Descriptions** | **Type** | **Default Value** | **Range** | **Must-Have** | +| --- | --- | --- | --- | --- | --- | +| config_file | Configure file path for workers information | string | N/A | N/A | YES | +| proj_type | Input source projection type, ERP or Cubemap | string | "ERP" | "ERP" or "Cubemap" | NO | +| input_type | Input stream type, 0 - encoded, 1 - raw | int | 0 | 0, 1 | NO | +| input_codec | Input bitstream type, only work when input type is 0-encoded, 1-yuv | int | 0 | 0, 1 | NO | +| vui | Enable vui info | int | 0 | 0, 1 | NO | +| aud | Include AUD | int | 0 | 0, 1 | NO | +| hielevel | Hierarchical prediction levels setting [0: flat, 1: 2level, 2: 3level, 3: 4level] | int | 3 | [0, 4] | NO | +| la_depth | Look ahead distance | int | -1 | [-1, 256] | NO | +| preset | Encoding preset (e,g, for subjective quality tuning mode and >=4k resolution), [0, 10] (for >= 1080p resolution), [0, 9] (for all resolution and modes) | int | 9 | [0, 12] | NO | +| profile | Profile setting, Main Still Picture Profile not supported | int | 2 | [1, 4] | NO | +| tier | Set tier (general_tier_flag), 0: main, 1: high| int | 0 | 0, 1 | NO | +| level | Set level (level_idc) | int | 0 | [0, 255] | NO | +| rc | Bit rate control mode, 0:cqp, 1:vbr | int | 0 | 0, 1 | NO | +| qp | QP value for intra frames | int | 32 | 0, 51 | NO | +| sc_detection | Scene change detection | int | 0 | 0, 1 | NO | +| tune | Quality tuning mode, 0: sq, 1:oq, 2:vmaf | int | 1 | [0, 2] | NO | +| bl_mode | Random Access Prediction Structure type setting | int | 0 | 0, 1 | NO | +| hdr | High dynamic range input | int | 0 | 0, 1 | NO | +| asm_type | Assembly instruction set type [0: C Only, 1: Auto] | int | 0 | 0, 1 | NO | +| tile_column | Tile column count number | int | 1 | [1, 256] | NO | +| tile_row | Tile row count number | int | 1 | [1, 256] | NO | +| in_parallel | Multiple encoders running in parallel [0: Off, 1: On] | int | 0 | 0,1 | NO | +| need_external_log | Whether external log callback is needed | bool | 0 | 0, 1 | NO | +| min_log_level | Minimal log level of output [0: INFO, 1: WARNING, 2: ERROR, 3: FATAL] | int | 2 | [0, 3] | NO | + +## OMAF Dash Access Plugin +OMAF Dash Access Plugin is a FFmpeg demux used to access OMAF Dash Content by DashAccessLibrary. The demux name is "tiled_dash_demuxer". it is used to playback the content for test purpose. The options available for this plugin are listed as below: + +| **Parameters** | **Descriptions** | **Type** | **Default Value** | **Range** | **Must-Have** | +| --- | --- | --- | --- | --- | --- | +| allowed_extensions | List of file extensions that dash is allowed to access | string | "mpd" | N/A | YES | +| cache_path | The specific path of cache folder | string | "/home" | N/A | YES | +| enable_extractor | Whether to enable extractor track in OMAF Dash Access engine | int | 1 | 0, 1 | YES | + +Sample Command: +```bash + ffplay -allowed_extensions mpd -enable_extractor 0 [mpd url] +``` diff --git a/src/doc/Immersive_Video_Delivery_OMAF_Packing.md b/src/doc/Immersive_Video_Delivery_OMAF_Packing.md index 47fc07cc..c7ffc71f 100644 --- a/src/doc/Immersive_Video_Delivery_OMAF_Packing.md +++ b/src/doc/Immersive_Video_Delivery_OMAF_Packing.md @@ -11,19 +11,42 @@ The main OMAF compliant features in this library are listed as follows: - Signal sub-picture region wise packing information in MPD file by adding an EssentialProperty element with a @schemeIdUri attribute equal to "urn:mpeg:mpegI:omaf:2017:rwpk" and in ISOBMFF segment files by adding a new box ‘rwpk’. - Signal sub-picture content coverage information in MPD file by adding a SupplementalProperty element with a @schemeIdUri attribute equal to “urn:mpeg:mpegI:omaf:2017:cc” and in ISOBMFF segment files by adding a new box ‘covi’. - Support DASH track based on HEVC tile. -- Support OMAF compliant extractor track to organize relative HEVC tiles from high quality bitstream and low quality bitstream to construct sub-picture which will be played based on user’s orientation. +- Support OMAF compliant extractor track to organize relative HEVC tiles from high quality bitstream and low quality bitstream to construct sub-picture which will be played based on user’s orientation. - Support both static mode and dynamic (live) mode DASH. +- Support option to appoint whether OMAF compliant extractor track will be generated, that is, extractor track mode or later binding mode. +- Support customized plugin to generate sub-picture region wise packing information when extractor track will be generated. +- Support equirectangular projection format in both extractor track mode and later binding mode. +- Support cube-map projection format in both extractor track mode and later binding mode. + When the input source is cube-map projected, input cube-map face layout and corresponding transform type need to be specified. + Firstly, specify the face index for each face which is defined in OMAF spec for cube-3x2. + PY 0 + PX 1 + NY 2 + NZ 3 + NX 4 + PZ 5 + Secondly, specify the transform type of the input face. The transform type is depicted in below image: + + Now only cube-3x2 is supported. +- Support both fixed sub-picture resolution and dynamic sub-picture resolution in extractor track. + When fixed sub-picture resolution is required, each extractor track has the same sub-picture resolution, and it is the resolution from the largest selected tiles number. + When dynamic sub-picture resolution is required, the resolution of tiles stitched picture in extractor track comes from the actual selected tiles number. +- Support AAC audio DASH track. +- Support planar projection format in later binding mode. + When the input sources come from planar projectin, the main Adaptation Set item in MPD will give the all video information, like resolution and tile slize. ## API Call Sequence - Call VROmafPackingInit API to create and initialize VROmafPacking library instance - Call VROmafPackingWriteSegment API to write one frame from one video to segment file. This API is called one time one frame. - Call VROmafPackingEndStreams API to stop segmentation process. -- Call VROmafPackingClose API to free VROmafPacking library related resource. +- Call VROmafPackingClose API to free VROmafPacking library related resource. Input Bitstream Requirements There are below requirements about input video bitstream to VROmafPacking library: - The input video bitstream must be HEVC compliant stream. That is, bitstream must conform with HEVC syntax and semantics. -- The first 4 NAL units in video bitstream must be VPS, SPS, PPS and Projection type SEI. And the Projection type SEI payload type must be 150, which is for equirectangular projection type. -- Tiles encoding must be enabled in input video stream and tiles split must be uniform, that is, syntax element ‘tiles_enabled_flag’ in PPS is 1 and ‘uniform_spacing_flag’ in PPS is also 1. In addition, MCTS (motion-constrained tile set) also needs to be enabled for packed picture quality. When this feature is enabled, CU at the edge of tile will not use padded pixels out of tile itself for motion estimation, that is, motion vectors for these CUs only point to pixels or sub-pixels inside tile. Then tiles will not depend on other tiles in encoding and decoding at all. There will be no mismatch between encoding and decoding. So, artifacts, like mosaics, will not appear in packed picture. +- The first 4 NAL units in video bitstream must be VPS, SPS, PPS and Projection type SEI. And the Projection type SEI payload type must be 150 or 151, which are for equirectangular projection type and cube-map projection type respectively. +- When the video source is from planar projection, no Projection type SEI is needed. That is, before the tile NAL unit, the first 3 NAL units in video bitstream must be VPS, SPS and PPS. +- Tiles encoding must be enabled in input video stream and tiles split must be uniform, that is, syntax element ‘tiles_enabled_flag’ in PPS is 1 and ‘uniform_spacing_flag’ in PPS is also 1. In addition, MCTS (motion-constrained tile set) also needs to be enabled for packed picture quality. When this feature is enabled, CU at the edge of tile will not use padded pixels out of tile itself for motion estimation, that is, motion vectors for these CUs only point to pixels or sub-pixels inside tile. Then tiles will not depend on other tiles in encoding and decoding at all. There will be no mismatch between encoding and decoding. So, artifacts, like mosaics, will not appear in packed picture. - The GOP size of input video stream is suggested to correspond to frame rate to make sure the first frame in segment file is I frame. GOP size could equal to one reasonable divisor of frame rate. For example, if frame rate is 30, then GOP size could be 15 or 30. -- The number of input video streams can only be 1 or 2. +- If the video sources are from equirectangular projection type or cube-map projection type, the number of input video streams can only be 2. +- If the video sources are from planar projection type, the number of input video streams can be more than 2. diff --git a/src/doc/Immersive_Video_Delivery_Plugins.md b/src/doc/Immersive_Video_Delivery_Plugins.md new file mode 100644 index 00000000..eb8f1e4d --- /dev/null +++ b/src/doc/Immersive_Video_Delivery_Plugins.md @@ -0,0 +1,54 @@ +# Immersive Video Delivery Plugins + +## Introduction +Immersive Video Delivery libraries provide plugin mechanism for customized process. +Now there are totally four types plugins supported: +- ViewportPredict_Plugin : plugin for predicting user viewport in next segment to reduce the latency of motion to high quality in OmafDashAccess library. +- OMAFPacking_Plugin : plugin for region-wise packing information generation for extractor track used in VROmafPacking library. +- StreamProcess_Plugin : plugin for media stream process, including both video stream and audio strem, also used in VROmafPacking library. +- 360SCVP_Plugin/TileSelection_Plugins: plugins with multiple tile selection methods for 2D/3D videos under different using scenarios in 360SCVP library. + +## ViewportPredict_Plugin +The main function of ViewportPredict_Plugin is to predict viewport angles with linear regression model using trajectory feedback in real time. +The plugin provides C APIs for user. +- `ViewportPredict_Init`: It is the initialization function with input parameter `PredictOption`. It is called only once in initialization process. +- `ViewportPredict_SetViewport`: It need to be called to set viewport each frame. The structure of viewport information is `ViewportAngle`, including (yaw, pitch, roll, pts, priority); +- `ViewportPredict_PredictPose`: It is called before timely downloading segment to obtain the predicted pose. Linear regression model is applied in the plugin, and an adaptive correction based on real-time feedback of the viewing trajectory is adopted to further improve the accuracy of prediction. +- `ViewportPredict_unInit`: It is the uninitialization function to be called in the end of the process. + +## OMAFPacking_Plugin +The main function of OMAFPacking_Plugin is to generate HEVC tiles layout for each specific extractor track and then generate the region-wise packing information for tiles stitched sub-picture. +- Firstly, the API Initialize should be called with all input video streams information defined by structure 'VideoStreamInfo', video streams index, selected tiles number in the extractor trak, the maximum selected tiles number in all extractor tracks and the external log callback. +- Then, the API GenerateMergedTilesArrange is to generate HEVC tiles layout which is defined by structure 'TileArrangement' in sub-picture for extractor track. +- After tiles arrangement is generated, API GenerateDstRwpk is called to construct region-wise packing information for tiles stitched sub-picture. +- Next, the API GenerateTilesMergeDirection is called to generate further detailed tiles merging layout defined by structure 'TilesMergeDirectionInCol' which will include below information for each selected tile : original video stream index, original tile index according to the order of raster scannig, and destination position in tiles merged sub-picture. These information will be used to calculate each extractor in extractor track. +- In addition, in order to construct new video stream from selected tiles stitching, APIs GetPackedPicWidth, GetPackedPicHeight, and GetMergedTilesArrange also need to be called to generate new SPS/PPS/Slice header. + +## StreamProcess_Plugin +The main function of StreamProcess_Plugin is to get media stream information by parsing stream headers, and handle frames to be written into DASH segments. +The base class 'MediaStream' is for both video stream process plugin and audio stream process plugin and it will specify the basic attributes 'MediaType' and 'CodecId'. + +VideoStream_Plugin +This plugin is used to parse video stream headers (VPS/SPS/PPS/SEI) and then get the detailed video stream informaiton, like resolution, tile split and so on. In addition, it will manage input video frames for DASH segmentation process. +- Firstly, the API Initialize will be called with the video stream index, basic video stream encoding configuration and video bitstream headers data defined by structure 'BSBuffer', and the initial information for VROmafPacking library. After getting video bitstream headers data, this plugin will begin parsing VPS/SPS/PPS/SEI. Then, most of video information can be get, like resolution, tile split, projection type and so on, by APIs GetSrcWidth, GetSrcHeight, GetTileInRow, GetTileInCol and GetProjType. In addition, the region-wise packing information for input video stream should also be generated during initialization by parsing SEI or other calculation methods. +- Secondly, when one frame from one video stream comes, API AddFrameInfo should be called to copy this frame information defined by structure 'FrameBSInfo' and then add new created frame information into frames list managed inside this plugin prepared for DASH segmentation. +- Then, in DASH segmentation process which may be in another thread, APIs SetCurrFrameInfo is called firstly to set the front frame in frames list as the current frame to be processed, then API GetCurrFrameInfo is used to get it in segmentation process thread. After getting new frame, call UpdateTilesNalu to update tile NALU data defined by structures 'Nalu' and 'TileInfo'. Lower library for segmentation will generate one segment when all frames included in the segment are ready, so API AddFrameToSegment should be called to hold the frame before the segment is generated. And after the segment is written, API DestroyCurrSegmentFrames will be called to release all frames data in this segment. +- At last, when all video frames have been written into segments, call API SetEOS to set EOS status, and DASH segmentation process will get this statue by API GetEOS, then to stop the process. + +AudioStream_Plugin +This plugin is used to parse auido stream headers, like ADTS header for AAC audio stream, and then get the detailed audio stream information, like sample rate, channel number and so on. In addition, it will also manage input audio frames for DASH segmentation process. +- Firstly, the API Initialize will be called with the audio stream index, basic audio stream encoding configuration and the first audio frame bitstream data defined by structure 'BSBuffer', and the initial information for VROmafPacking library. After getting the first auido frame data, whether header data is included will be checked. If there is header data, like ADTS header, then parse header to get basic audio stream information, like sample rate, if not, the basic audio information must be set in input structure 'BSBuffer'. Then, APIs GetSampleRate, GetChannelNum and so on can be called to get these information. +- Secondly, when one frame from one audio stream comes, API AddFrameInfo should be called to copy this frame information defined by structure 'FrameBSInfo' and then add new created frame information into frames list managed inside the plugin prepared for DASH segmentation. +- Then, in DASH segmentation process which may be in another thread, APIs SetCurrFrameInfo is called firstly to set the front frame in frames list as the current frame to be processed, then API GetCurrFrameInfo is used to get it in segmentation process thread. After getting new frame, call GetHeaderDataSize to update and get header size for the frame, then raw audio data in this frame can be obtained. Like video stream process plugin, lower library for segmentation will generate one segment when all frames included in the segment are ready, so API AddFrameToSegment should be called to hold the frame before the segment is generated. And after the segment has been written, API DestroyCurrSegmentFrames will be called to release all frames data. +- At last, when all audio frames have been written into segments, call API SetEOS to set EOS status, and DASH segmentation process will get this status by API GetEOS, then to stop the process. + +## 360SCVP_Plugins +The 360SCVP plugins will provide mulitiple functions on different types of videos. Though we support tile selection for multiple now, it is able to extend to other functions. +There is a structure for plugin definition which is defined in 360SCVPAPI.h. The PluginDef has three element for user to define their plugin type, format and library path. + +TileSelection_Plugins +This is one kind of 360SCVP Plugins which can support 2D Planar video but also could be easy extended to 3D projection videos like ERP or cubemap. The TileSelectionPlugins_API.h has defined a base class 'TileSelection' and users can define their own child-class in the implementation. Below is an example process for Tile Selection on planar videos. +- Firstly, The base class has a constructor, a deconstructor and an initilaization function. When users want to execute tile selection, the initialization function should be called with proper configuration in the param_360SCVP struct. The param 'sourceResolutionNum' is the number of the high resolution streams. The struct 'pStreamInfo' is used to set the resolution and tile width and height of each stream. Do not forget to set the 'PluginDef' with the plugin type, format and library path. +- Then the API 'SetViewportInfo' is called to pass down the headpose information, which includes the viewport center point coordinates, the tile selection mode, and the moving direction and speed. +- Now the API 'GetTilesInViewport' can be called to get tile selection results based on the initial configurations and headpose information. The output struct array element stores the selected tile information including the upleft point coordinate, the streamId which indicates the resolution this tile is. +- When the playback finished, the unInit API will be called to release internal dynamic memory and reset internal variables. diff --git a/src/doc/Immersive_Video_Delivery_RefPlayer.md b/src/doc/Immersive_Video_Delivery_RefPlayer.md index fd1cb537..b00954ee 100644 --- a/src/doc/Immersive_Video_Delivery_RefPlayer.md +++ b/src/doc/Immersive_Video_Delivery_RefPlayer.md @@ -1,22 +1,42 @@ # Immersive Video Delivery Reference Player ## Introduction -The reference 360 player is used to play the mixed-resolution stream video transmitted from the server. Based on the current viewport, corresponding regions are rendered on the window. The workflow is as follows: +The reference 360 player is used to play the mixed-resolution stream video transmitted from the server. Linux and Android platform are both supported. + +Based on the current viewport, corresponding regions are rendered on the window. The workflow is as follows: -The 360 player supports streams in ERP format for now, and soon Cubemap format will be supported as well. After getting encoded packet from Dash Access Library, FFmpeg software decoder is used to decode frames. Decoded frame is bind to a 2D texture and the texture would be updated every frame. And then according to Region-wise Packing information, there exists tiles copy between packed frame buffer and output frame buffer. The last step is to render the output frame buffer to sphere. +The 360 player supports streams in both Equirectangle and Cubemap geometry. In Linux player, after getting encoded packet from Dash Access Library, FFmpeg software decoder is used to decode frames. Decoded frame is bind to a 2D texture and the texture would be updated every frame. And then according to Region-wise Packing information, there exists tiles copy between packed frame buffer and output frame buffer. The last step is to render the output frame buffer to sphere or skybox. + +In Android player, MediaCodec decoder is ultilized to decode packets from Dash Access Library and deliver the output to decoded surfaces. And then, Draw tile by tile from decoded surfaces to display surface according to Region-wise Packing information. The last step is to render the output frame buffer to sphere or skybox. + +Except for 3D projection media contents, planar format is supported in late binding mode. -The key technical step in render is how to correctly remap the mixed-resolution decoded frame to the sphere texture in space. Region-wise Packing (RWPK) information would be obtained from Omaf Dash Access library together with an encoded packet, which represents the mapping space relationship between decoded frame and sphere texture. The RWPK schematic diagram is shown as follows: +Extractor-track and later-binding strategy are both supported. Thus, there may be multiple videos of different quality rankings in one input stream. Decoder manager is created to support multi-decoder process. Decoders will be destroyed, restarted or reset if the number of videos, resolution or codec format changes. - +The very important step in render is how to correctly remap the mixed-resolution decoded frame to the display texture in space. Region-wise Packing (RWPK) information would be obtained from Omaf Dash Access library together with an encoded packet, which represents the mapping space relationship between decoded frame and sphere texture. The RWPK schematic diagram is shown as follows: + + In the specific implementation process, the texture that decoded frame is bind and full texture both are attached to the corresponding frame buffer object. The following figure shows the remapping operation. It is a tile copy process which can be implemented in GPU memory using OpenGL APIs. +### Rotation in render +As shown in following figure, there exist different face descriptions in OMAF spec and OpenGL, so it’s required to align them. +In CubeMapRenderTarget class, mOMAF2GLFaceIDMap is a map that defines the projection relationship between them. + + + +Second, in Create function in CubeMapMesh class, skyboxVertices is defined to describe the six face vertices without transform type information, which is shown in following figure. + + + +And in Bind function in CubeMapMesh class, the vertices in each face would be transformed according to m_transformType data. The transform action here is reverse to the action described in m_transformType, which is to rotate the scene back to NO_TRANSFORM. + ## Configuration -The configuration file, config.xml, is used to config parameters for 360 player, as shown in the following Table: +The configuration file, config.xml, is used to config parameters for 360 linux player, as shown in the following Table: | **Parameters** | **Descriptions** | **examples** | | --- | --- | --- | @@ -24,13 +44,15 @@ The configuration file, config.xml, is used to config parameters for 360 player, | windowHeight | The height of render window | 960 for 4k, 1920 for 8k | | url | The resource URL path | Remote URL | | sourceType | Source type | 0 is for Dash Source | -| decoderType | FFmpeg Decoder type | 0 is for software decoder | -| contextType | OpenGL context type | 0 is for glfw | -| useDMABuffer | DMA Buffer flag | 0 means no DMA buffer | +| enableExtractor | extractor track path or later binding path | 1 is for extractor track and 0 is for later binding | +| StreamDumpedOption | dump packet streams or not | 0 for false, 1 for true | | viewportHFOV | Viewport horizon FOV degree | 80 | | viewportVFOV | Viewport vertical FOV degree | 80 | | viewportWidth | Viewport width | 960 for 4k, 1920 for 8k | | viewportHeight | Viewport height | 960 for 4k, 1920 for 8k | -| cachePath | Cache path | /home/media/cache | - -**Note**: So far, some parameters settings are limited. URL need to be a remote dash source URL. The parameter sourceType must set to 0, which represents dash source. The parameter decoderType must set to 0, which stands for FFmpeg software decoder. The parameter contextType need to be 0, which represents glfw context. And useDMABuffer flag should be set to 0. +| cachePath | Cache path | /tmp/cache | +| minLogLevel | min log level | INFO / WARNING / ERROR / FATAL | +| maxVideoDecodeWidth | max video decoded width | decoded width that is supported | +| maxVideoDecodeHeight | max video decoded height | decoded height that is supported | +| predict | viewport prediction plugin | 0 is disable and 1 is enable | +| PathOf360SCVPPlugins | path of 360SCVP plugins | needed for planar format rendering | diff --git a/src/doc/Immersive_Video_Getting_Started_Guide.md b/src/doc/Immersive_Video_Getting_Started_Guide.md index c11b4461..07d4c494 100644 --- a/src/doc/Immersive_Video_Getting_Started_Guide.md +++ b/src/doc/Immersive_Video_Getting_Started_Guide.md @@ -8,61 +8,49 @@ * [Build Client Components](#build-client-preresuisties) * [Quick Run](#quick-run) -This document describes how to build OMAF-Compliant Immsersive Video Delivery components and how to run sample tests. +This document describes how to build OMAF-Compliant Immersive Video Delivery components and how to run sample tests. -Intel Immersive Video Delivery solution provides basic components for OMAF-Compliant Tiled 360 Video Delivery, including MCTS-based HEVC transcoding, OMAF-compliant streaming packing, client OMAF dash access library; and FFMPEG plugins for quick trial for these components to setup E2E OMAF-Compliant 360 video streaming. Both VOD and Live streaming can be supported with the solution. The solution only supports one video processing mode which is not in ISO/IEC 23090-2 Annex D; and it provides an common interface for developer to create new video processing mode. +Intel Immersive Video Delivery solution provides the basic components for OMAF-Compliant Tiled 360 Video Delivery, including MCTS-based HEVC transcoding, OMAF-compliant streaming packing, client OMAF dash access library, and FFMPEG plugins for quick trial for these components to setup E2E OMAF-Compliant 360 video streaming. Both VOD and Live streaming are supported by the solution. The solution only supports one video processing mode which is not in ISO/IEC 23090-2 Annex D; also provides an common interface for developer to create new video processing mode. # Introduction Intel VCD Immersive Video Delivery solution provides basic components for OMAF-Compliant Tiled 360 Video Delivery, including MCTS-based HEVC transcoding, OMAF-compliant streaming packing, client OMAF dash access library; and FFMPEG plugins for quick trial for these components to setup E2E OMAF-Compliant 360 video streaming. -please refer to [wiki]() for more information. +# Prerequisites -# Prerequistes -To build the whole solution, there are some prerequistes must be ready. +[Build](#Build) process and [Quick Run](#Quick Run) are verified on *CentOS 7.6(server)* and *Ubuntu 18.04(client)* + +To build the whole solution, there are some prerequisites must be ready. ```bash gcc >= 6.3.1 g++ >= 6.3.1 cmake >= 3.12.4 ``` -You can use the following command to install relative dependency in server/client side: -```bash -sudo yum install pcre-devel openssl openssl-devel -sudo yum install devtoolset-6-gcc devtoolset-6-gcc-c++ -``` - # Build ## Build Server Components ```bash # Make sure using gcc >= 6.3.1 -git clone https://github.com/OpenVisualCloud/ImmersiveVideo -cd ImmersiveVideo/src/external -mkdir -p ../build/server -cd ../build/server -export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH -cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=server .. -make -j `nproc` -sudo make install +git clone https://github.com/OpenVisualCloud/Immersive-Video-Sample.git +cd Immersive-Video-Sample/src/external +./build.sh server y # Install dependency before the first build +# ./build.sh server n # Build without dependency installation ``` ## Build Client Components ```bash # Make sure using gcc >= 6.3.1 -git clone https://github.com/OpenVisualCloud/ImmersiveVideo -cd ImmersiveVideo/src/external -mkdir -p ../build/client -cd ../build/client -export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH -cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=client .. -make -j `nproc` -sudo make install +git clone https://github.com/OpenVisualCloud/Immersive-Video-Sample.git +cd Immersive-Video-Sample/src/external +./build.sh client y # Install dependency before the first build +# ./build.sh client n # Build without dependency installation ``` # Quick Run -To run sample test, Ngnix server should be installed in server side. please refer to [ngnix setup](ngnix_setup.md). +To run sample tests, Ngnix server have to be installed on server side, please refer to the example in [ngnix setup](ngnix_setup.md). +For details of parameters of FFmpeg plugins, please refer to the [FFmpeg usage doc](Immersive_Video_Delivery_FFmpeg_usage.md). -## Server Side +## Server Side Set up RSA if HTTPS is used. ```bash @@ -72,33 +60,91 @@ sudo ssh-copy-id root@ ### Live Streaming - Test Command for 4K Video: - ```bash - ffmpeg -i [rtmp://localhost/demo/1.flv] -input_type 1 -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 -config_file:0 config_high.txt -g:0 15 -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -tile_row:1 2 -tile_column:1 4 -config_file:1 config_low.txt -g:1 15 -b:1 5M -map 0:v -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 -base_url http://[ServerIP]/OMAFLive_4k/ -out_name Test /usr/local/nginx/html/OMAFLive_4k/ + cd /path/to/Immersive-Video-Sample/src/build/server/ffmpeg + echo "ip local port 9090" > config_high.txt + echo "ip local port 9089" > config_low.txt + ffmpeg -re -i [rtmp://localhost/demo/1.flv] -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 \ + -config_file:0 config_high.txt -g:0 15 -b:0 30M -map 0:v \ + -c:v:1 distributed_encoder -s:1 1024x640 -sws_flags neighbor -tile_row:1 2 -tile_column:1 4 \ + -config_file:1 config_low.txt -g:1 15 -b:1 5M -map 0:v \ + -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 \ + -base_url http://[ServerIP]/OMAFLive_4k/ -out_name Test /usr/local/nginx/html/OMAFLive_4k/ ``` - Test Command for 8K Video: ```bash - numactl -c 1 ffmpeg -re -i [rtmp://192.168.1.10:1935/live/video] -input_type 1 -rc 1 -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 -la_depth:0 0 -config_file:0 config_high.txt -b:0 50M -map 0:v -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -extractors_per_thread 4 -base_url http://[ServerIP]/OMAFLive_8k/ -out_name Test /usr/local/nginx/html/OMAFLive_8k/ + cd /path/to/Immersive-Video-Sample/src/build/server/ffmpeg + echo "ip local port 9090 numa 1" > config_high.txt + echo "ip local port 9089 numa 2" > config_low.txt + numactl -c 1 ffmpeg -re -i [rtmp://192.168.1.10:1935/live/video] -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 \ + -la_depth:0 0 -config_file:0 config_high.txt -b:0 50M -map 0:v \ + -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 \ + -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v \ + -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -extractors_per_thread 4 \ + -base_url http://[ServerIP]/OMAFLive_8k/ -out_name Test /usr/local/nginx/html/OMAFLive_8k/ ``` ### Tiled Content Generation for VOD - Test Command for 4K Video: -```bash - ffmpeg -i [file] -input_type 1 -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 -config_file:0 config_high.txt -g:0 15 -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -tile_row:1 2 -tile_column:1 4 -config_file:1 config_low.txt -g:1 15 -b:1 5M -map 0:v -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 -base_url http://[server ip]]/OMAFStatic_4k/ -out_name Test /usr/local/nginx/html/OMAFStatic_4k/ +```bash + cd /path/to/Immersive-Video-Sample/src/build/server/ffmpeg + echo "ip local port 9090" > config_high.txt + echo "ip local port 9089" > config_low.txt + ffmpeg -i [file] -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 \ + -config_file:0 config_high.txt -g:0 15 -b:0 30M -map 0:v \ + -c:v:1 distributed_encoder -s:1 1024x640 -sws_flags neighbor -tile_row:1 2 -tile_column:1 4 \ + -config_file:1 config_low.txt -g:1 15 -b:1 5M -map 0:v \ + -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 \ + -base_url http://[ServerIP]]/OMAFStatic_4k/ -out_name Test /usr/local/nginx/html/OMAFStatic_4k/ ``` - Test Command for 8K Video: ```bash - numactl -c 1 ffmpeg -re -i [rtmp://192.168.1.10:1935/live/video] -input_type 1 -rc 1 -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 -la_depth:0 0 -config_file:0 config_high.txt -b:0 50M -map 0:v -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -extractors_per_thread 4 -base_url http://[ServerIP]/OMAFStatic_8k/ -out_name Test /usr/local/nginx/html/OMAFStatic_8k/ + cd /path/to/Immersive-Video-Sample/src/build/server/ffmpeg + echo "ip local port 9090 numa 1" > config_high.txt + echo "ip local port 9089 numa 2" > config_low.txt + numactl -c 1 ffmpeg -i [file] -input_type 1 -rc 1 \ + -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 \ + -la_depth:0 0 -config_file:0 config_high.txt -b:0 50M -map 0:v \ + -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 \ + -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v \ + -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -extractors_per_thread 4 \ + -base_url http://[ServerIP]/OMAFStatic_8k/ -out_name Test /usr/local/nginx/html/OMAFStatic_8k/ ``` ## Client Side -- modify the config.xml; please refer to [Reference Player Configuration](Immersive_Video_Delivery_RefPlayer.md) for detail information - +Modify the config.xml; please refer to [Reference Player Configuration](Immersive_Video_Delivery_RefPlayer.md) for detailed information. ```bash -- run ./render -- type 's' to start playing. +cd /path/to/Immersive-Video-Sample/src/build/client/player +export LD_LIBRARY_PATH=/usr/local/lib/:$LD_LIBRARY_PATH +vim config.xml # Set up configuration, details in following table +./render # Located at the same path with config.xml above ``` +**Config.xml** + +| **Parameters** | **Descriptions** | **examples** | +| --- | --- | --- | +| windowWidth | The width of render window | 960 for 4k, 1920 for 8k | +| windowHeight | The height of render window | 960 for 4k, 1920 for 8k | +| url | The resource URL path | Remote URL | +| sourceType | Source type | 0 is for Dash Source | +| enableExtractor | extractor track path or later binding path | 1 is for extractor track and 0 is for later binding | +| StreamDumpedOption | dump packet streams or not | 0 for false, 1 for true | +| viewportHFOV | Viewport horizon FOV degree | 80 | +| viewportVFOV | Viewport vertical FOV degree | 80 | +| viewportWidth | Viewport width | 960 for 4k, 1920 for 8k | +| viewportHeight | Viewport height | 960 for 4k, 1920 for 8k | +| cachePath | Cache path | /tmp/cache | +| minLogLevel | min log level | INFO / WARNING / ERROR / FATAL | +| maxVideoDecodeWidth | max video decoded width | decoded width that is supported | +| maxVideoDecodeHeight | max video decoded height | decoded height that is supported | +| predict | viewport prediction plugin | 0 is disable and 1 is enable | +| PathOf360SCVPPlugins | path of 360SCVP plugins | needed for planar format rendering | + + - **Note** : So far, some parameters settings are limited. URL need to be a remote dash source URL, for example : `http://xxx.xxx.xxx.xxx:8080/OMAFLive_4k/Test.mpd`. The parameter `sourceType` must set to 0, which represents dash source. The parameter `decoderType` must set to 0, which stands for FFmpeg software decoder. The parameter `contextType` need to be 0, which represents glfw context. And `useDMABuffer` flag should be set to 0. diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp.png b/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp.png index db6ba51e..c7f8f5ee 100644 Binary files a/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp.png and b/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp.png differ diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_cubemap.png b/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_cubemap.png new file mode 100644 index 00000000..a6a58b9a Binary files /dev/null and b/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_cubemap.png differ diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_init.png b/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_init.png index a0e21055..a6e1218d 100644 Binary files a/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_init.png and b/src/doc/img/OMAF_Compliant-Video-Delivery-360scvp_init.png differ diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-cubemap_coord.png b/src/doc/img/OMAF_Compliant-Video-Delivery-cubemap_coord.png new file mode 100644 index 00000000..c5fdb463 Binary files /dev/null and b/src/doc/img/OMAF_Compliant-Video-Delivery-cubemap_coord.png differ diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-face_desc_in_OMAF_OpenGL.png b/src/doc/img/OMAF_Compliant-Video-Delivery-face_desc_in_OMAF_OpenGL.png new file mode 100644 index 00000000..873983c2 Binary files /dev/null and b/src/doc/img/OMAF_Compliant-Video-Delivery-face_desc_in_OMAF_OpenGL.png differ diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-skybox_vertices.png b/src/doc/img/OMAF_Compliant-Video-Delivery-skybox_vertices.png new file mode 100644 index 00000000..a4fee4c9 Binary files /dev/null and b/src/doc/img/OMAF_Compliant-Video-Delivery-skybox_vertices.png differ diff --git a/src/doc/img/OMAF_Compliant-Video-Delivery-transform_type.png b/src/doc/img/OMAF_Compliant-Video-Delivery-transform_type.png new file mode 100644 index 00000000..c809041e Binary files /dev/null and b/src/doc/img/OMAF_Compliant-Video-Delivery-transform_type.png differ diff --git a/src/doc/ngnix_setup.md b/src/doc/ngnix_setup.md new file mode 100644 index 00000000..be4ed1e5 --- /dev/null +++ b/src/doc/ngnix_setup.md @@ -0,0 +1,9 @@ +# Build Nginx + +``` +wget -O - http://nginx.org/download/nginx-1.13.1.tar.gz | tar xz +cd nginx-1.13.1 +./configure --with-http_ssl_module +make -j $(nproc) +sudo make install +``` diff --git a/src/external/0001-Add-avcodec_receive_frame2-for-vaapi-hardware-decodi.patch b/src/external/0001-Add-avcodec_receive_frame2-for-vaapi-hardware-decodi.patch deleted file mode 100644 index 02235a87..00000000 --- a/src/external/0001-Add-avcodec_receive_frame2-for-vaapi-hardware-decodi.patch +++ /dev/null @@ -1,78 +0,0 @@ -From 3563bed5fbaa0e9332b805404f52a08a37508d72 Mon Sep 17 00:00:00 2001 -From: Hai Lan -Date: Mon, 19 Aug 2019 14:49:25 +0800 -Subject: [PATCH] Add avcodec_receive_frame2 for vaapi hardware decoding - -Signed-off-by: Hai Lan ---- - libavcodec/avcodec.h | 3 +++ - libavcodec/decode.c | 39 +++++++++++++++++++++++++++++++++++++++ - 2 files changed, 42 insertions(+) - -diff --git a/libavcodec/avcodec.h b/libavcodec/avcodec.h -index bee2234..e13d2cd 100644 ---- a/libavcodec/avcodec.h -+++ b/libavcodec/avcodec.h -@@ -4875,6 +4875,9 @@ int avcodec_send_packet(AVCodecContext *avctx, const AVPacket *avpkt); - */ - int avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame); - -+#include -+int avcodec_receive_frame2(AVCodecContext *avctx, AVFrame *frame, VADisplay *display); -+ - /** - * Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() - * to retrieve buffered output packets. -diff --git a/libavcodec/decode.c b/libavcodec/decode.c -index 160d42d..63fda0b 100644 ---- a/libavcodec/decode.c -+++ b/libavcodec/decode.c -@@ -768,6 +768,45 @@ int attribute_align_arg avcodec_receive_frame(AVCodecContext *avctx, AVFrame *fr - return 0; - } - -+#include -+#include "vaapi_decode.h" -+ -+int attribute_align_arg avcodec_receive_frame2(AVCodecContext *avctx, AVFrame *frame, VADisplay *display) -+{ -+ AVCodecInternal *avci = avctx->internal; -+ int ret; -+ -+ av_frame_unref(frame); -+ -+ if (!avcodec_is_open(avctx) || !av_codec_is_decoder(avctx->codec)) -+ return AVERROR(EINVAL); -+ -+ if (avci->buffer_frame->buf[0]) { -+ av_frame_move_ref(frame, avci->buffer_frame); -+ } else { -+ ret = decode_receive_frame_internal(avctx, frame); -+ if (ret < 0) -+ return ret; -+ } -+ -+ if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) { -+ ret = apply_cropping(avctx, frame); -+ if (ret < 0) { -+ av_frame_unref(frame); -+ return ret; -+ } -+ } -+ -+ avctx->frame_number++; -+ -+ VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data; -+ //VADISPLAY *display = ctx->hwctx->display; -+ if(ctx->hwctx) -+ *display = ctx->hwctx->display; -+ -+ return 0; -+} -+ - static int compat_decode(AVCodecContext *avctx, AVFrame *frame, - int *got_frame, const AVPacket *pkt) - { --- -1.8.3.1 - diff --git a/src/external/0001-SSL-Modify-Makefile.patch b/src/external/0001-SSL-Modify-Makefile.patch new file mode 100644 index 00000000..accdbeea --- /dev/null +++ b/src/external/0001-SSL-Modify-Makefile.patch @@ -0,0 +1,70 @@ +From 2ab81eaaa7c8782e48d979ae3e513cbe77c120f9 Mon Sep 17 00:00:00 2001 +From: "Luo, Ying" +Date: Wed, 26 Feb 2020 09:59:38 +0800 +Subject: [PATCH] Modify Makefile. + +Signed-off-by: Luo, Ying +--- + Makefile | 20 ++++++++++++-------- + 1 file changed, 12 insertions(+), 8 deletions(-) + +diff --git a/Makefile b/Makefile +index 4722321..077e3c9 100644 +--- a/Makefile ++++ b/Makefile +@@ -20,8 +20,10 @@ SHLIB_MINOR=1 + SHLIB_TARGET=linux-shared + + LIBS=libcrypto.a libssl.a +-SHLIBS=libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) +-SHLIB_INFO="libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libcrypto.so" "libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libssl.so" ++#SHLIBS=libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) ++#SHLIB_INFO="libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libcrypto.so" "libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libssl.so" ++SHLIBS=libcrypto.so libssl.so ++SHLIB_INFO="libcrypto.so" "libssl.so" + ENGINES=engines/capi.so engines/dasync.so engines/ossltest.so engines/padlock.so + PROGRAMS=apps/openssl fuzz/asn1-test fuzz/asn1parse-test fuzz/bignum-test fuzz/bndiv-test fuzz/cms-test fuzz/conf-test fuzz/crl-test fuzz/ct-test fuzz/server-test fuzz/x509-test test/aborttest test/afalgtest test/asynciotest test/asynctest test/bad_dtls_test test/bftest test/bio_enc_test test/bioprinttest test/bntest test/buildtest_aes test/buildtest_asn1 test/buildtest_asn1t test/buildtest_async test/buildtest_bio test/buildtest_blowfish test/buildtest_bn test/buildtest_buffer test/buildtest_camellia test/buildtest_cast test/buildtest_cmac test/buildtest_cms test/buildtest_conf test/buildtest_conf_api test/buildtest_crypto test/buildtest_ct test/buildtest_des test/buildtest_dh test/buildtest_dsa test/buildtest_dtls1 test/buildtest_e_os2 test/buildtest_ebcdic test/buildtest_ec test/buildtest_ecdh test/buildtest_ecdsa test/buildtest_engine test/buildtest_err test/buildtest_evp test/buildtest_hmac test/buildtest_idea test/buildtest_kdf test/buildtest_lhash test/buildtest_md4 test/buildtest_md5 test/buildtest_mdc2 test/buildtest_modes test/buildtest_obj_mac test/buildtest_objects test/buildtest_ocsp test/buildtest_opensslv test/buildtest_ossl_typ test/buildtest_pem test/buildtest_pem2 test/buildtest_pkcs12 test/buildtest_pkcs7 test/buildtest_rand test/buildtest_rc2 test/buildtest_rc4 test/buildtest_ripemd test/buildtest_rsa test/buildtest_safestack test/buildtest_seed test/buildtest_sha test/buildtest_srp test/buildtest_srtp test/buildtest_ssl test/buildtest_ssl2 test/buildtest_stack test/buildtest_symhacks test/buildtest_tls1 test/buildtest_ts test/buildtest_txt_db test/buildtest_ui test/buildtest_whrlpool test/buildtest_x509 test/buildtest_x509_vfy test/buildtest_x509v3 test/casttest test/cipherlist_test test/clienthellotest test/constant_time_test test/crltest test/ct_test test/d2i_test test/danetest test/destest test/dhtest test/dsatest test/dtlstest test/dtlsv1listentest test/ecdsatest test/ectest test/enginetest test/evp_extra_test test/evp_test test/exdatatest test/exptest test/gmdifftest test/heartbeat_test test/hmactest test/ideatest test/igetest test/md2test test/md4test test/md5test test/mdc2test test/memleaktest test/p5_crpt2_test test/packettest test/pbelutest test/randtest test/rc2test test/rc4test test/rc5test test/rmdtest test/rsa_test test/sanitytest test/secmemtest test/sha1test test/sha256t test/sha512t test/shlibloadtest test/srptest test/ssl_test test/ssl_test_ctx_test test/sslapitest test/sslcorrupttest test/ssltest_old test/threadstest test/v3ext test/v3nametest test/verify_extra_test test/wp_test test/x509aux + SCRIPTS=apps/CA.pl apps/tsget tools/c_rehash util/shlib_wrap.sh +@@ -30,8 +32,10 @@ GENERATED_MANDATORY=crypto/include/internal/bn_conf.h crypto/include/internal/ds + GENERATED=test/buildtest_ebcdic.c test/buildtest_ocsp.c test/buildtest_ct.c test/buildtest_crypto.c crypto/aes/aesv8-armx.s test/buildtest_pkcs7.c test/buildtest_cast.c test/buildtest_ecdh.c test/buildtest_asn1t.c test/buildtest_dtls1.c test/buildtest_x509v3.c crypto/ec/ecp_nistz256-armv8.s test/buildtest_hmac.c test/buildtest_srtp.c test/buildtest_sha.c test/buildtest_ts.c test/buildtest_whrlpool.c test/buildtest_ssl.c test/buildtest_bio.c crypto/chacha/chacha-armv8.s test/buildtest_aes.c test/buildtest_rc4.c test/buildtest_camellia.c test/buildtest_stack.c test/buildtest_pem2.c crypto/arm64cpuid.s test/buildtest_dh.c crypto/aes/vpaes-armv8.s test/buildtest_cmac.c test/buildtest_async.c crypto/bn/armv8-mont.s test/buildtest_buffer.c test/buildtest_tls1.c test/buildtest_ripemd.c crypto/modes/ghashv8-armx.s test/buildtest_opensslv.c test/buildtest_ssl2.c test/buildtest_rsa.c test/buildtest_conf_api.c test/buildtest_conf.c test/buildtest_asn1.c test/buildtest_x509.c crypto/sha/sha512-armv8.s test/buildtest_seed.c test/buildtest_ui.c test/buildtest_cms.c test/buildtest_ecdsa.c test/buildtest_pem.c test/buildtest_ec.c test/buildtest_obj_mac.c test/buildtest_des.c test/buildtest_e_os2.c test/buildtest_objects.c test/buildtest_modes.c test/buildtest_rand.c test/buildtest_engine.c test/buildtest_pkcs12.c test/buildtest_srp.c test/buildtest_symhacks.c test/buildtest_dsa.c test/buildtest_idea.c test/buildtest_ossl_typ.c test/buildtest_x509_vfy.c test/buildtest_kdf.c test/buildtest_bn.c test/buildtest_md4.c test/buildtest_mdc2.c test/buildtest_evp.c crypto/sha/sha256-armv8.s test/buildtest_blowfish.c test/buildtest_err.c crypto/sha/sha1-armv8.s test/buildtest_md5.c test/buildtest_lhash.c test/buildtest_safestack.c crypto/poly1305/poly1305-armv8.s test/buildtest_txt_db.c test/buildtest_rc2.c crypto/buildinf.h include/openssl/opensslconf.h crypto/include/internal/bn_conf.h crypto/include/internal/dso_conf.h + + INSTALL_LIBS=libcrypto.a libssl.a +-INSTALL_SHLIBS=libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) +-INSTALL_SHLIB_INFO="libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libcrypto.so" "libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libssl.so" ++#INSTALL_SHLIBS=libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) ++#INSTALL_SHLIB_INFO="libcrypto.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libcrypto.so" "libssl.so.$(SHLIB_MAJOR).$(SHLIB_MINOR);libssl.so" ++INSTALL_SHLIBS=libcrypto.so libssl.so ++INSTALL_SHLIB_INFO="libcrypto.so" "libssl.so" + INSTALL_ENGINES=engines/capi.so engines/padlock.so + INSTALL_PROGRAMS=apps/openssl + +@@ -625,11 +629,11 @@ libcrypto.so: libcrypto.a ../util/libcrypto.num + PERL="$(PERL)" SRCDIR='$(SRCDIR)' DSTDIR="." \ + INSTALLTOP='$(INSTALLTOP)' LIBDIR='$(LIBDIR)' \ + LIBDEPS='$(PLIB_LDFLAGS) '""' $(EX_LIBS)' \ +- LIBNAME=crypto LIBVERSION=$(SHLIB_MAJOR).$(SHLIB_MINOR) \ ++ LIBNAME=crypto LIBVERSION= \ + LIBCOMPATVERSIONS=';$(SHLIB_VERSION_HISTORY)' \ + CC='$(CC)' CFLAGS='$(CFLAGS) $(LIB_CFLAGS)' \ + LDFLAGS='$(LDFLAGS)' \ +- SHARED_LDFLAGS='$(LIB_LDFLAGS)' SHLIB_EXT=.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) \ ++ SHARED_LDFLAGS='$(LIB_LDFLAGS)' SHLIB_EXT=.so \ + RC='$(RC)' SHARED_RCFLAGS='$(RCFLAGS)' \ + link_shlib.linux-shared + libcrypto.a: crypto/aes/aes_cbc.o crypto/aes/aes_cfb.o crypto/aes/aes_core.o crypto/aes/aes_ecb.o crypto/aes/aes_ige.o crypto/aes/aes_misc.o crypto/aes/aes_ofb.o crypto/aes/aes_wrap.o crypto/aes/aesv8-armx.o crypto/aes/vpaes-armv8.o crypto/arm64cpuid.o crypto/armcap.o crypto/asn1/a_bitstr.o crypto/asn1/a_d2i_fp.o crypto/asn1/a_digest.o crypto/asn1/a_dup.o crypto/asn1/a_gentm.o crypto/asn1/a_i2d_fp.o crypto/asn1/a_int.o crypto/asn1/a_mbstr.o crypto/asn1/a_object.o crypto/asn1/a_octet.o crypto/asn1/a_print.o crypto/asn1/a_sign.o crypto/asn1/a_strex.o crypto/asn1/a_strnid.o crypto/asn1/a_time.o crypto/asn1/a_type.o crypto/asn1/a_utctm.o crypto/asn1/a_utf8.o crypto/asn1/a_verify.o crypto/asn1/ameth_lib.o crypto/asn1/asn1_err.o crypto/asn1/asn1_gen.o crypto/asn1/asn1_lib.o crypto/asn1/asn1_par.o crypto/asn1/asn_mime.o crypto/asn1/asn_moid.o crypto/asn1/asn_mstbl.o crypto/asn1/asn_pack.o crypto/asn1/bio_asn1.o crypto/asn1/bio_ndef.o crypto/asn1/d2i_pr.o crypto/asn1/d2i_pu.o crypto/asn1/evp_asn1.o crypto/asn1/f_int.o crypto/asn1/f_string.o crypto/asn1/i2d_pr.o crypto/asn1/i2d_pu.o crypto/asn1/n_pkey.o crypto/asn1/nsseq.o crypto/asn1/p5_pbe.o crypto/asn1/p5_pbev2.o crypto/asn1/p5_scrypt.o crypto/asn1/p8_pkey.o crypto/asn1/t_bitst.o crypto/asn1/t_pkey.o crypto/asn1/t_spki.o crypto/asn1/tasn_dec.o crypto/asn1/tasn_enc.o crypto/asn1/tasn_fre.o crypto/asn1/tasn_new.o crypto/asn1/tasn_prn.o crypto/asn1/tasn_scn.o crypto/asn1/tasn_typ.o crypto/asn1/tasn_utl.o crypto/asn1/x_algor.o crypto/asn1/x_bignum.o crypto/asn1/x_info.o crypto/asn1/x_int64.o crypto/asn1/x_long.o crypto/asn1/x_pkey.o crypto/asn1/x_sig.o crypto/asn1/x_spki.o crypto/asn1/x_val.o crypto/async/arch/async_null.o crypto/async/arch/async_posix.o crypto/async/arch/async_win.o crypto/async/async.o crypto/async/async_err.o crypto/async/async_wait.o crypto/bf/bf_cfb64.o crypto/bf/bf_ecb.o crypto/bf/bf_enc.o crypto/bf/bf_ofb64.o crypto/bf/bf_skey.o crypto/bio/b_addr.o crypto/bio/b_dump.o crypto/bio/b_print.o crypto/bio/b_sock.o crypto/bio/b_sock2.o crypto/bio/bf_buff.o crypto/bio/bf_lbuf.o crypto/bio/bf_nbio.o crypto/bio/bf_null.o crypto/bio/bio_cb.o crypto/bio/bio_err.o crypto/bio/bio_lib.o crypto/bio/bio_meth.o crypto/bio/bss_acpt.o crypto/bio/bss_bio.o crypto/bio/bss_conn.o crypto/bio/bss_dgram.o crypto/bio/bss_fd.o crypto/bio/bss_file.o crypto/bio/bss_log.o crypto/bio/bss_mem.o crypto/bio/bss_null.o crypto/bio/bss_sock.o crypto/blake2/blake2b.o crypto/blake2/blake2s.o crypto/blake2/m_blake2b.o crypto/blake2/m_blake2s.o crypto/bn/armv8-mont.o crypto/bn/bn_add.o crypto/bn/bn_asm.o crypto/bn/bn_blind.o crypto/bn/bn_const.o crypto/bn/bn_ctx.o crypto/bn/bn_depr.o crypto/bn/bn_dh.o crypto/bn/bn_div.o crypto/bn/bn_err.o crypto/bn/bn_exp.o crypto/bn/bn_exp2.o crypto/bn/bn_gcd.o crypto/bn/bn_gf2m.o crypto/bn/bn_intern.o crypto/bn/bn_kron.o crypto/bn/bn_lib.o crypto/bn/bn_mod.o crypto/bn/bn_mont.o crypto/bn/bn_mpi.o crypto/bn/bn_mul.o crypto/bn/bn_nist.o crypto/bn/bn_prime.o crypto/bn/bn_print.o crypto/bn/bn_rand.o crypto/bn/bn_recp.o crypto/bn/bn_shift.o crypto/bn/bn_sqr.o crypto/bn/bn_sqrt.o crypto/bn/bn_srp.o crypto/bn/bn_word.o crypto/bn/bn_x931p.o crypto/buffer/buf_err.o crypto/buffer/buffer.o crypto/camellia/camellia.o crypto/camellia/cmll_cbc.o crypto/camellia/cmll_cfb.o crypto/camellia/cmll_ctr.o crypto/camellia/cmll_ecb.o crypto/camellia/cmll_misc.o crypto/camellia/cmll_ofb.o crypto/cast/c_cfb64.o crypto/cast/c_ecb.o crypto/cast/c_enc.o crypto/cast/c_ofb64.o crypto/cast/c_skey.o crypto/chacha/chacha-armv8.o crypto/cmac/cm_ameth.o crypto/cmac/cm_pmeth.o crypto/cmac/cmac.o crypto/cms/cms_asn1.o crypto/cms/cms_att.o crypto/cms/cms_cd.o crypto/cms/cms_dd.o crypto/cms/cms_enc.o crypto/cms/cms_env.o crypto/cms/cms_err.o crypto/cms/cms_ess.o crypto/cms/cms_io.o crypto/cms/cms_kari.o crypto/cms/cms_lib.o crypto/cms/cms_pwri.o crypto/cms/cms_sd.o crypto/cms/cms_smime.o crypto/conf/conf_api.o crypto/conf/conf_def.o crypto/conf/conf_err.o crypto/conf/conf_lib.o crypto/conf/conf_mall.o crypto/conf/conf_mod.o crypto/conf/conf_sap.o crypto/cpt_err.o crypto/cryptlib.o crypto/ct/ct_b64.o crypto/ct/ct_err.o crypto/ct/ct_log.o crypto/ct/ct_oct.o crypto/ct/ct_policy.o crypto/ct/ct_prn.o crypto/ct/ct_sct.o crypto/ct/ct_sct_ctx.o crypto/ct/ct_vfy.o crypto/ct/ct_x509v3.o crypto/cversion.o crypto/des/cbc_cksm.o crypto/des/cbc_enc.o crypto/des/cfb64ede.o crypto/des/cfb64enc.o crypto/des/cfb_enc.o crypto/des/des_enc.o crypto/des/ecb3_enc.o crypto/des/ecb_enc.o crypto/des/fcrypt.o crypto/des/fcrypt_b.o crypto/des/ofb64ede.o crypto/des/ofb64enc.o crypto/des/ofb_enc.o crypto/des/pcbc_enc.o crypto/des/qud_cksm.o crypto/des/rand_key.o crypto/des/rpc_enc.o crypto/des/set_key.o crypto/des/str2key.o crypto/des/xcbc_enc.o crypto/dh/dh_ameth.o crypto/dh/dh_asn1.o crypto/dh/dh_check.o crypto/dh/dh_depr.o crypto/dh/dh_err.o crypto/dh/dh_gen.o crypto/dh/dh_kdf.o crypto/dh/dh_key.o crypto/dh/dh_lib.o crypto/dh/dh_meth.o crypto/dh/dh_pmeth.o crypto/dh/dh_prn.o crypto/dh/dh_rfc5114.o crypto/dsa/dsa_ameth.o crypto/dsa/dsa_asn1.o crypto/dsa/dsa_depr.o crypto/dsa/dsa_err.o crypto/dsa/dsa_gen.o crypto/dsa/dsa_key.o crypto/dsa/dsa_lib.o crypto/dsa/dsa_meth.o crypto/dsa/dsa_ossl.o crypto/dsa/dsa_pmeth.o crypto/dsa/dsa_prn.o crypto/dsa/dsa_sign.o crypto/dsa/dsa_vrf.o crypto/dso/dso_dl.o crypto/dso/dso_dlfcn.o crypto/dso/dso_err.o crypto/dso/dso_lib.o crypto/dso/dso_openssl.o crypto/dso/dso_vms.o crypto/dso/dso_win32.o crypto/ebcdic.o crypto/ec/curve25519.o crypto/ec/ec2_mult.o crypto/ec/ec2_oct.o crypto/ec/ec2_smpl.o crypto/ec/ec_ameth.o crypto/ec/ec_asn1.o crypto/ec/ec_check.o crypto/ec/ec_curve.o crypto/ec/ec_cvt.o crypto/ec/ec_err.o crypto/ec/ec_key.o crypto/ec/ec_kmeth.o crypto/ec/ec_lib.o crypto/ec/ec_mult.o crypto/ec/ec_oct.o crypto/ec/ec_pmeth.o crypto/ec/ec_print.o crypto/ec/ecdh_kdf.o crypto/ec/ecdh_ossl.o crypto/ec/ecdsa_ossl.o crypto/ec/ecdsa_sign.o crypto/ec/ecdsa_vrf.o crypto/ec/eck_prn.o crypto/ec/ecp_mont.o crypto/ec/ecp_nist.o crypto/ec/ecp_nistp224.o crypto/ec/ecp_nistp256.o crypto/ec/ecp_nistp521.o crypto/ec/ecp_nistputil.o crypto/ec/ecp_nistz256-armv8.o crypto/ec/ecp_nistz256.o crypto/ec/ecp_oct.o crypto/ec/ecp_smpl.o crypto/ec/ecx_meth.o crypto/engine/eng_all.o crypto/engine/eng_cnf.o crypto/engine/eng_cryptodev.o crypto/engine/eng_ctrl.o crypto/engine/eng_dyn.o crypto/engine/eng_err.o crypto/engine/eng_fat.o crypto/engine/eng_init.o crypto/engine/eng_lib.o crypto/engine/eng_list.o crypto/engine/eng_openssl.o crypto/engine/eng_pkey.o crypto/engine/eng_rdrand.o crypto/engine/eng_table.o crypto/engine/tb_asnmth.o crypto/engine/tb_cipher.o crypto/engine/tb_dh.o crypto/engine/tb_digest.o crypto/engine/tb_dsa.o crypto/engine/tb_eckey.o crypto/engine/tb_pkmeth.o crypto/engine/tb_rand.o crypto/engine/tb_rsa.o crypto/err/err.o crypto/err/err_all.o crypto/err/err_prn.o crypto/evp/bio_b64.o crypto/evp/bio_enc.o crypto/evp/bio_md.o crypto/evp/bio_ok.o crypto/evp/c_allc.o crypto/evp/c_alld.o crypto/evp/cmeth_lib.o crypto/evp/digest.o crypto/evp/e_aes.o crypto/evp/e_aes_cbc_hmac_sha1.o crypto/evp/e_aes_cbc_hmac_sha256.o crypto/evp/e_bf.o crypto/evp/e_camellia.o crypto/evp/e_cast.o crypto/evp/e_chacha20_poly1305.o crypto/evp/e_des.o crypto/evp/e_des3.o crypto/evp/e_idea.o crypto/evp/e_null.o crypto/evp/e_old.o crypto/evp/e_rc2.o crypto/evp/e_rc4.o crypto/evp/e_rc4_hmac_md5.o crypto/evp/e_rc5.o crypto/evp/e_seed.o crypto/evp/e_xcbc_d.o crypto/evp/encode.o crypto/evp/evp_cnf.o crypto/evp/evp_enc.o crypto/evp/evp_err.o crypto/evp/evp_key.o crypto/evp/evp_lib.o crypto/evp/evp_pbe.o crypto/evp/evp_pkey.o crypto/evp/m_md2.o crypto/evp/m_md4.o crypto/evp/m_md5.o crypto/evp/m_md5_sha1.o crypto/evp/m_mdc2.o crypto/evp/m_null.o crypto/evp/m_ripemd.o crypto/evp/m_sha1.o crypto/evp/m_sigver.o crypto/evp/m_wp.o crypto/evp/names.o crypto/evp/p5_crpt.o crypto/evp/p5_crpt2.o crypto/evp/p_dec.o crypto/evp/p_enc.o crypto/evp/p_lib.o crypto/evp/p_open.o crypto/evp/p_seal.o crypto/evp/p_sign.o crypto/evp/p_verify.o crypto/evp/pmeth_fn.o crypto/evp/pmeth_gn.o crypto/evp/pmeth_lib.o crypto/evp/scrypt.o crypto/ex_data.o crypto/hmac/hm_ameth.o crypto/hmac/hm_pmeth.o crypto/hmac/hmac.o crypto/idea/i_cbc.o crypto/idea/i_cfb64.o crypto/idea/i_ecb.o crypto/idea/i_ofb64.o crypto/idea/i_skey.o crypto/init.o crypto/kdf/hkdf.o crypto/kdf/kdf_err.o crypto/kdf/tls1_prf.o crypto/lhash/lh_stats.o crypto/lhash/lhash.o crypto/md4/md4_dgst.o crypto/md4/md4_one.o crypto/md5/md5_dgst.o crypto/md5/md5_one.o crypto/mdc2/mdc2_one.o crypto/mdc2/mdc2dgst.o crypto/mem.o crypto/mem_dbg.o crypto/mem_sec.o crypto/modes/cbc128.o crypto/modes/ccm128.o crypto/modes/cfb128.o crypto/modes/ctr128.o crypto/modes/cts128.o crypto/modes/gcm128.o crypto/modes/ghashv8-armx.o crypto/modes/ocb128.o crypto/modes/ofb128.o crypto/modes/wrap128.o crypto/modes/xts128.o crypto/o_dir.o crypto/o_fips.o crypto/o_fopen.o crypto/o_init.o crypto/o_str.o crypto/o_time.o crypto/objects/o_names.o crypto/objects/obj_dat.o crypto/objects/obj_err.o crypto/objects/obj_lib.o crypto/objects/obj_xref.o crypto/ocsp/ocsp_asn.o crypto/ocsp/ocsp_cl.o crypto/ocsp/ocsp_err.o crypto/ocsp/ocsp_ext.o crypto/ocsp/ocsp_ht.o crypto/ocsp/ocsp_lib.o crypto/ocsp/ocsp_prn.o crypto/ocsp/ocsp_srv.o crypto/ocsp/ocsp_vfy.o crypto/ocsp/v3_ocsp.o crypto/pem/pem_all.o crypto/pem/pem_err.o crypto/pem/pem_info.o crypto/pem/pem_lib.o crypto/pem/pem_oth.o crypto/pem/pem_pk8.o crypto/pem/pem_pkey.o crypto/pem/pem_sign.o crypto/pem/pem_x509.o crypto/pem/pem_xaux.o crypto/pem/pvkfmt.o crypto/pkcs12/p12_add.o crypto/pkcs12/p12_asn.o crypto/pkcs12/p12_attr.o crypto/pkcs12/p12_crpt.o crypto/pkcs12/p12_crt.o crypto/pkcs12/p12_decr.o crypto/pkcs12/p12_init.o crypto/pkcs12/p12_key.o crypto/pkcs12/p12_kiss.o crypto/pkcs12/p12_mutl.o crypto/pkcs12/p12_npas.o crypto/pkcs12/p12_p8d.o crypto/pkcs12/p12_p8e.o crypto/pkcs12/p12_sbag.o crypto/pkcs12/p12_utl.o crypto/pkcs12/pk12err.o crypto/pkcs7/bio_pk7.o crypto/pkcs7/pk7_asn1.o crypto/pkcs7/pk7_attr.o crypto/pkcs7/pk7_doit.o crypto/pkcs7/pk7_lib.o crypto/pkcs7/pk7_mime.o crypto/pkcs7/pk7_smime.o crypto/pkcs7/pkcs7err.o crypto/poly1305/poly1305-armv8.o crypto/poly1305/poly1305.o crypto/rand/md_rand.o crypto/rand/rand_egd.o crypto/rand/rand_err.o crypto/rand/rand_lib.o crypto/rand/rand_unix.o crypto/rand/rand_vms.o crypto/rand/rand_win.o crypto/rand/randfile.o crypto/rc2/rc2_cbc.o crypto/rc2/rc2_ecb.o crypto/rc2/rc2_skey.o crypto/rc2/rc2cfb64.o crypto/rc2/rc2ofb64.o crypto/rc4/rc4_enc.o crypto/rc4/rc4_skey.o crypto/ripemd/rmd_dgst.o crypto/ripemd/rmd_one.o crypto/rsa/rsa_ameth.o crypto/rsa/rsa_asn1.o crypto/rsa/rsa_chk.o crypto/rsa/rsa_crpt.o crypto/rsa/rsa_depr.o crypto/rsa/rsa_err.o crypto/rsa/rsa_gen.o crypto/rsa/rsa_lib.o crypto/rsa/rsa_meth.o crypto/rsa/rsa_none.o crypto/rsa/rsa_null.o crypto/rsa/rsa_oaep.o crypto/rsa/rsa_ossl.o crypto/rsa/rsa_pk1.o crypto/rsa/rsa_pmeth.o crypto/rsa/rsa_prn.o crypto/rsa/rsa_pss.o crypto/rsa/rsa_saos.o crypto/rsa/rsa_sign.o crypto/rsa/rsa_ssl.o crypto/rsa/rsa_x931.o crypto/rsa/rsa_x931g.o crypto/seed/seed.o crypto/seed/seed_cbc.o crypto/seed/seed_cfb.o crypto/seed/seed_ecb.o crypto/seed/seed_ofb.o crypto/sha/sha1-armv8.o crypto/sha/sha1_one.o crypto/sha/sha1dgst.o crypto/sha/sha256-armv8.o crypto/sha/sha256.o crypto/sha/sha512-armv8.o crypto/sha/sha512.o crypto/srp/srp_lib.o crypto/srp/srp_vfy.o crypto/stack/stack.o crypto/threads_none.o crypto/threads_pthread.o crypto/threads_win.o crypto/ts/ts_asn1.o crypto/ts/ts_conf.o crypto/ts/ts_err.o crypto/ts/ts_lib.o crypto/ts/ts_req_print.o crypto/ts/ts_req_utils.o crypto/ts/ts_rsp_print.o crypto/ts/ts_rsp_sign.o crypto/ts/ts_rsp_utils.o crypto/ts/ts_rsp_verify.o crypto/ts/ts_verify_ctx.o crypto/txt_db/txt_db.o crypto/ui/ui_err.o crypto/ui/ui_lib.o crypto/ui/ui_openssl.o crypto/ui/ui_util.o crypto/uid.o crypto/whrlpool/wp_block.o crypto/whrlpool/wp_dgst.o crypto/x509/by_dir.o crypto/x509/by_file.o crypto/x509/t_crl.o crypto/x509/t_req.o crypto/x509/t_x509.o crypto/x509/x509_att.o crypto/x509/x509_cmp.o crypto/x509/x509_d2.o crypto/x509/x509_def.o crypto/x509/x509_err.o crypto/x509/x509_ext.o crypto/x509/x509_lu.o crypto/x509/x509_obj.o crypto/x509/x509_r2x.o crypto/x509/x509_req.o crypto/x509/x509_set.o crypto/x509/x509_trs.o crypto/x509/x509_txt.o crypto/x509/x509_v3.o crypto/x509/x509_vfy.o crypto/x509/x509_vpm.o crypto/x509/x509cset.o crypto/x509/x509name.o crypto/x509/x509rset.o crypto/x509/x509spki.o crypto/x509/x509type.o crypto/x509/x_all.o crypto/x509/x_attrib.o crypto/x509/x_crl.o crypto/x509/x_exten.o crypto/x509/x_name.o crypto/x509/x_pubkey.o crypto/x509/x_req.o crypto/x509/x_x509.o crypto/x509/x_x509a.o crypto/x509v3/pcy_cache.o crypto/x509v3/pcy_data.o crypto/x509v3/pcy_lib.o crypto/x509v3/pcy_map.o crypto/x509v3/pcy_node.o crypto/x509v3/pcy_tree.o crypto/x509v3/v3_addr.o crypto/x509v3/v3_akey.o crypto/x509v3/v3_akeya.o crypto/x509v3/v3_alt.o crypto/x509v3/v3_asid.o crypto/x509v3/v3_bcons.o crypto/x509v3/v3_bitst.o crypto/x509v3/v3_conf.o crypto/x509v3/v3_cpols.o crypto/x509v3/v3_crld.o crypto/x509v3/v3_enum.o crypto/x509v3/v3_extku.o crypto/x509v3/v3_genn.o crypto/x509v3/v3_ia5.o crypto/x509v3/v3_info.o crypto/x509v3/v3_int.o crypto/x509v3/v3_lib.o crypto/x509v3/v3_ncons.o crypto/x509v3/v3_pci.o crypto/x509v3/v3_pcia.o crypto/x509v3/v3_pcons.o crypto/x509v3/v3_pku.o crypto/x509v3/v3_pmaps.o crypto/x509v3/v3_prn.o crypto/x509v3/v3_purp.o crypto/x509v3/v3_skey.o crypto/x509v3/v3_sxnet.o crypto/x509v3/v3_tlsf.o crypto/x509v3/v3_utl.o crypto/x509v3/v3err.o +@@ -1934,11 +1938,11 @@ libssl.so: libssl.a libcrypto.so ../util/libssl.num + PERL="$(PERL)" SRCDIR='$(SRCDIR)' DSTDIR="." \ + INSTALLTOP='$(INSTALLTOP)' LIBDIR='$(LIBDIR)' \ + LIBDEPS='$(PLIB_LDFLAGS) '" -L. -lcrypto"' $(EX_LIBS)' \ +- LIBNAME=ssl LIBVERSION=$(SHLIB_MAJOR).$(SHLIB_MINOR) \ ++ LIBNAME=ssl LIBVERSION= \ + LIBCOMPATVERSIONS=';$(SHLIB_VERSION_HISTORY)' \ + CC='$(CC)' CFLAGS='$(CFLAGS) $(LIB_CFLAGS)' \ + LDFLAGS='$(LDFLAGS)' \ +- SHARED_LDFLAGS='$(LIB_LDFLAGS)' SHLIB_EXT=.so.$(SHLIB_MAJOR).$(SHLIB_MINOR) \ ++ SHARED_LDFLAGS='$(LIB_LDFLAGS)' SHLIB_EXT=.so \ + RC='$(RC)' SHARED_RCFLAGS='$(RCFLAGS)' \ + link_shlib.linux-shared + libssl.a: ssl/bio_ssl.o ssl/d1_lib.o ssl/d1_msg.o ssl/d1_srtp.o ssl/methods.o ssl/pqueue.o ssl/record/dtls1_bitmap.o ssl/record/rec_layer_d1.o ssl/record/rec_layer_s3.o ssl/record/ssl3_buffer.o ssl/record/ssl3_record.o ssl/s3_cbc.o ssl/s3_enc.o ssl/s3_lib.o ssl/s3_msg.o ssl/ssl_asn1.o ssl/ssl_cert.o ssl/ssl_ciph.o ssl/ssl_conf.o ssl/ssl_err.o ssl/ssl_init.o ssl/ssl_lib.o ssl/ssl_mcnf.o ssl/ssl_rsa.o ssl/ssl_sess.o ssl/ssl_stat.o ssl/ssl_txt.o ssl/ssl_utst.o ssl/statem/statem.o ssl/statem/statem_clnt.o ssl/statem/statem_dtls.o ssl/statem/statem_lib.o ssl/statem/statem_srvr.o ssl/t1_enc.o ssl/t1_ext.o ssl/t1_lib.o ssl/t1_reneg.o ssl/t1_trce.o ssl/tls_srp.o +-- +2.17.1 diff --git a/src/external/0001-Update-glob-header-cpp-file-for-android-ndk-build.patch b/src/external/0001-Update-glob-header-cpp-file-for-android-ndk-build.patch new file mode 100644 index 00000000..8645bbe3 --- /dev/null +++ b/src/external/0001-Update-glob-header-cpp-file-for-android-ndk-build.patch @@ -0,0 +1,954 @@ +From 9c3180b0cb6333a4e494f380f7351233a7217b6c Mon Sep 17 00:00:00 2001 +From: "Luo, Ying" +Date: Mon, 10 Feb 2020 07:54:52 +0800 +Subject: [PATCH] Update glob header/cpp file for android ndk build. + +Signed-off-by: Luo, Ying +--- + CMakeLists.txt | 1 + + src/glob.c | 786 ++++++++++++++++++++++++++++++++++++++++ + src/glob.h | 113 ++++++ + src/logging_unittest.cc | 2 +- + 4 files changed, 901 insertions(+), 1 deletion(-) + create mode 100644 src/glob.c + create mode 100644 src/glob.h + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 7415eab..862a840 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -464,6 +464,7 @@ endif (UNIX OR (APPLE AND HAVE_DLADDR)) + if (BUILD_TESTING) + add_executable (logging_unittest + src/logging_unittest.cc ++ src/glob.c + ) + + target_link_libraries (logging_unittest PRIVATE glog) +diff --git a/src/glob.c b/src/glob.c +new file mode 100644 +index 0000000..f36b1ec +--- /dev/null ++++ b/src/glob.c +@@ -0,0 +1,786 @@ ++/* ++*Modified for the Android NDK by Gabor Cselle, http://www.gaborcselle.com/ ++*Tested with Android NDK version 5b: http://developer.android.com/sdk/ndk/index.html ++*Last modified: March 3 2011 ++* ++*Copyright (c) 1989, 1993 ++* The Regents of the University of California. All rights reserved. ++* ++*This code is derived from software contributed to Berkeley by ++*Guido van Rossum. ++* ++*Redistribution and use in source and binary forms, with or without ++*modification, are permitted provided that the following conditions ++*are met: ++*1. Redistributions of source code must retain the above copyright ++* notice, this list of conditions and the following disclaimer. ++*2. Redistributions in binary form must reproduce the above copyright ++* notice, this list of conditions and the following disclaimer in the ++* documentation and/or other materials provided with the distribution. ++*4. Neither the name of the University nor the names of its contributors ++* may be used to endorse or promote products derived from this software ++* without specific prior written permission. ++* ++*THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ++*ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ++*IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ++*ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE ++*FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ++*DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ++*OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++*HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ++*LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY ++*OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++*SUCH DAMAGE. ++*/ ++ ++#if defined(LIBC_SCCS) && !defined(lint) ++static char sccsid[] = "@(#)glob.c 8.3 (Berkeley) 10/13/93"; ++#endif /* LIBC_SCCS and not lint */ ++#include ++ ++ ++/* ++*glob(3) -- a superset of the one defined in POSIX 1003.2. ++* ++*The [!...] convention to negate a range is supported (SysV, Posix, ksh). ++* ++*Optional extra services, controlled by flags not defined by POSIX: ++* ++*GLOB_QUOTE: ++* Escaping convention: \ inhibits any special meaning the following ++* character might have (except \ at end of string is retained). ++*GLOB_MAGCHAR: ++* Set in gl_flags if pattern contained a globbing character. ++*GLOB_NOMAGIC: ++* Same as GLOB_NOCHECK, but it will only append pattern if it did ++* not contain any magic characters. [Used in csh style globbing] ++*GLOB_ALTDIRFUNC: ++* Use alternately specified directory access functions. ++*GLOB_TILDE: ++* expand ~user/foo to the /home/dir/of/user/foo ++*GLOB_BRACE: ++* expand {1,2}{a,b} to 1a 1b 2a 2b ++*gl_matchc: ++* Number of matches in the current invocation of glob. ++*/ ++ ++/* ++*Some notes on multibyte character support: ++*1. Patterns with illegal byte sequences match nothing - even if ++* GLOB_NOCHECK is specified. ++*2. Illegal byte sequences in filenames are handled by treating them as ++* single-byte characters with a value of the first byte of the sequence ++* cast to wchar_t. ++*3. State-dependent encodings are not currently supported. ++*/ ++ ++#include ++#include ++ ++#include ++#include ++#include ++#include "glob.h" ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++//#include "collate.h" - NOTE(gabor): I took this out because it's not available for Android ++// and collate is only used once for string comparisons. As a side-effect, you might not ++// be able to match non-ASCII filenames. ++ ++#define DOLLAR '$' ++#define DOT '.' ++#define EOS '\0' ++#define LBRACKET '[' ++#define NOT '!' ++#define QUESTION '?' ++#define QUOTE '\\' ++#define RANGE '-' ++#define RBRACKET ']' ++#define SEP '/' ++#define STAR '*' ++#define TILDE '~' ++#define UNDERSCORE '_' ++#define LBRACE '{' ++#define RBRACE '}' ++#define SLASH '/' ++#define COMMA ',' ++ ++#define M_PROTECT 0x40 ++#define M_MASK 0xff ++ ++ ++#define M_ALL '*' ++#define M_END ']' ++#define M_NOT '!' ++#define M_ONE '?' ++#define M_RNG '-' ++#define M_SET '[' ++ ++ ++static int g_stat(char *, struct stat *, glob_t *); ++static int g_lstat(char *, struct stat *, glob_t *); ++static DIR *g_opendir(char *, glob_t *); ++static int compare(const void *, const void *); ++static int glob0(const char *, glob_t *, size_t *); ++static int glob1(char *, glob_t *, size_t *); ++static int glob2(char *, char *, char *, char *, glob_t *, size_t *); ++static int glob3(char *, char *, char *, char *, char *, glob_t *, size_t *); ++static int globextend(const char *, glob_t *, size_t *); ++static const char * ++ globtilde(const char *, char *, size_t, glob_t *); ++static int globexp1(const char *, glob_t *, size_t *); ++static int globexp2(const char *, const char *, glob_t *, int *, size_t *); ++static int match(char *, char *, char *); ++ ++int ismeta(char c) { ++ return c == M_ALL || c == M_END || c == M_NOT || c == M_ONE || c == M_RNG || c == M_SET; ++} ++ ++int ++glob(const char *pattern, int flags, int (*errfunc)(const char *, int), glob_t *pglob) ++{ ++ const char *patnext; ++ size_t limit; ++ char *bufnext, *bufend, patbuf[MAXPATHLEN], prot; ++ ++ patnext = pattern; ++ if (!(flags & GLOB_APPEND)) { ++ pglob->gl_pathc = 0; ++ pglob->gl_pathv = NULL; ++ if (!(flags & GLOB_DOOFFS)) ++ pglob->gl_offs = 0; ++ } ++ if (flags & GLOB_LIMIT) { ++ limit = pglob->gl_matchc; ++ if (limit == 0) ++ limit = 131072; ++ } else ++ limit = 0; ++ pglob->gl_flags = flags & ~GLOB_MAGCHAR; ++ pglob->gl_errfunc = errfunc; ++ pglob->gl_matchc = 0; ++ ++ bufnext = patbuf; ++ bufend = bufnext + MAXPATHLEN - 1; ++ if (flags & GLOB_NOESCAPE) { ++ strncpy(bufnext, patnext, sizeof(patbuf)); ++ } else { ++ /* Protect the quoted characters. */ ++ while (bufend >= bufnext && *patnext != EOS) { ++ if (*patnext == QUOTE) { ++ if (*++patnext == EOS) { ++ *bufnext++ = QUOTE | M_PROTECT; ++ continue; ++ } ++ prot = M_PROTECT; ++ } else ++ prot = 0; ++ *bufnext++ = *patnext; ++ patnext++; ++ } ++ } ++ *bufnext = EOS; ++ ++ if (flags & GLOB_BRACE) ++ return globexp1(patbuf, pglob, &limit); ++ else ++ return glob0(patbuf, pglob, &limit); ++} ++ ++/* ++*Expand recursively a glob {} pattern. When there is no more expansion ++*invoke the standard globbing routine to glob the rest of the magic ++*characters ++*/ ++static int ++globexp1(const char *pattern, glob_t *pglob, size_t *limit) ++{ ++ const char* ptr = pattern; ++ int rv; ++ ++ /* Protect a single {}, for find(1), like csh */ ++ if (pattern[0] == LBRACE && pattern[1] == RBRACE && pattern[2] == EOS) ++ return glob0(pattern, pglob, limit); ++ ++ while ((ptr = strchr(ptr, LBRACE)) != NULL) ++ if (!globexp2(ptr, pattern, pglob, &rv, limit)) ++ return rv; ++ ++ return glob0(pattern, pglob, limit); ++} ++ ++ ++/* ++*Recursive brace globbing helper. Tries to expand a single brace. ++*If it succeeds then it invokes globexp1 with the new pattern. ++*If it fails then it tries to glob the rest of the pattern and returns. ++*/ ++static int ++globexp2(const char *ptr, const char *pattern, glob_t *pglob, int *rv, size_t *limit) ++{ ++ int i; ++ char *lm, *ls; ++ const char *pe, *pm, *pm1, *pl; ++ char patbuf[MAXPATHLEN]; ++ ++ /* copy part up to the brace */ ++ for (lm = patbuf, pm = pattern; pm != ptr; *lm++ = *pm++) ++ continue; ++ *lm = EOS; ++ ls = lm; ++ ++ /* Find the balanced brace */ ++ for (i = 0, pe = ++ptr; *pe; pe++) ++ if (*pe == LBRACKET) { ++ /* Ignore everything between [] */ ++ for (pm = pe++; *pe != RBRACKET && *pe != EOS; pe++) ++ continue; ++ if (*pe == EOS) { ++ /* ++ * We could not find a matching RBRACKET. ++ * Ignore and just look for RBRACE ++ */ ++ pe = pm; ++ } ++ } ++ else if (*pe == LBRACE) ++ i++; ++ else if (*pe == RBRACE) { ++ if (i == 0) ++ break; ++ i--; ++ } ++ ++ /* Non matching braces; just glob the pattern */ ++ if (i != 0 || *pe == EOS) { ++ *rv = glob0(patbuf, pglob, limit); ++ return 0; ++ } ++ ++ for (i = 0, pl = pm = ptr; pm <= pe; pm++) ++ switch (*pm) { ++ case LBRACKET: ++ /* Ignore everything between [] */ ++ for (pm1 = pm++; *pm != RBRACKET && *pm != EOS; pm++) ++ continue; ++ if (*pm == EOS) { ++ /* ++ * We could not find a matching RBRACKET. ++ * Ignore and just look for RBRACE ++ */ ++ pm = pm1; ++ } ++ break; ++ ++ case LBRACE: ++ i++; ++ break; ++ ++ case RBRACE: ++ if (i) { ++ i--; ++ break; ++ } ++ /* FALLTHROUGH */ ++ case COMMA: ++ if (i && *pm == COMMA) ++ break; ++ else { ++ /* Append the current string */ ++ for (lm = ls; (pl < pm); *lm++ = *pl++) ++ continue; ++ /* ++ * Append the rest of the pattern after the ++ * closing brace ++ */ ++ for (pl = pe + 1; (*lm++ = *pl++) != EOS;) ++ continue; ++ ++ /* Expand the current pattern */ ++ *rv = globexp1(patbuf, pglob, limit); ++ ++ /* move after the comma, to the next string */ ++ pl = pm + 1; ++ } ++ break; ++ ++ default: ++ break; ++ } ++ *rv = 0; ++ return 0; ++} ++ ++ ++ ++/* ++*expand tilde from the passwd file. ++*/ ++static const char * ++globtilde(const char *pattern, char *patbuf, size_t patbuf_len, glob_t *pglob) ++{ ++ struct passwd *pwd; ++ char *h; ++ const char *p; ++ char *b, *eb; ++ ++ if (*pattern != TILDE || !(pglob->gl_flags & GLOB_TILDE)) ++ return pattern; ++ ++ /* ++ * Copy up to the end of the string or / ++ */ ++ eb = &patbuf[patbuf_len - 1]; ++ for (p = pattern + 1, h = (char *) patbuf; ++ h < (char *)eb && *p && *p != SLASH; *h++ = *p++) ++ continue; ++ ++ *h = EOS; ++ ++ if (((char *) patbuf)[0] == EOS) { ++ /* ++ * handle a plain ~ or ~/ by expanding $HOME first (iff ++ * we're not running setuid or setgid) and then trying ++ * the password file ++ */ ++// #ifndef __GLIBC__ ++// if (issetugid() != 0 || ++// (h = getenv("HOME")) == NULL) { ++// if (((h = getlogin()) != NULL && ++// (pwd = getpwnam(h)) != NULL) || ++// (pwd = getpwuid(getuid())) != NULL) ++// h = pwd->pw_dir; ++// else ++// return pattern; ++// } ++// #endif ++ } ++ else { ++ /* ++ * Expand a ~user ++ */ ++ if ((pwd = getpwnam((char*) patbuf)) == NULL) ++ return pattern; ++ else ++ h = pwd->pw_dir; ++ } ++ ++ /* Copy the home directory */ ++ for (b = patbuf; b < eb && *h; *b++ = *h++) ++ continue; ++ ++ /* Append the rest of the pattern */ ++ while (b < eb && (*b++ = *p++) != EOS) ++ continue; ++ *b = EOS; ++ ++ return patbuf; ++} ++ ++ ++/* ++*The main glob() routine: compiles the pattern (optionally processing ++*quotes), calls glob1() to do the real pattern matching, and finally ++*sorts the list (unless unsorted operation is requested). Returns 0 ++*if things went well, nonzero if errors occurred. ++*/ ++static int ++glob0(const char *pattern, glob_t *pglob, size_t *limit) ++{ ++ const char *qpatnext; ++ int err; ++ size_t oldpathc; ++ char *bufnext, c, patbuf[MAXPATHLEN]; ++ ++ qpatnext = globtilde(pattern, patbuf, MAXPATHLEN, pglob); ++ oldpathc = pglob->gl_pathc; ++ bufnext = patbuf; ++ ++ /* We don't need to check for buffer overflow any more. */ ++ while ((c = *qpatnext++) != EOS) { ++ switch (c) { ++ case LBRACKET: ++ c = *qpatnext; ++ if (c == NOT) ++ ++qpatnext; ++ if (*qpatnext == EOS || ++ strchr(qpatnext+1, RBRACKET) == NULL) { ++ *bufnext++ = LBRACKET; ++ if (c == NOT) ++ --qpatnext; ++ break; ++ } ++ *bufnext++ = M_SET; ++ if (c == NOT) ++ *bufnext++ = M_NOT; ++ c = *qpatnext++; ++ do { ++ *bufnext++ = c; ++ if (*qpatnext == RANGE && ++ (c = qpatnext[1]) != RBRACKET) { ++ *bufnext++ = M_RNG; ++ *bufnext++ = c; ++ qpatnext += 2; ++ } ++ } while ((c = *qpatnext++) != RBRACKET); ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ *bufnext++ = M_END; ++ break; ++ case QUESTION: ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ *bufnext++ = M_ONE; ++ break; ++ case STAR: ++ pglob->gl_flags |= GLOB_MAGCHAR; ++ /* collapse adjacent stars to one, ++ * to avoid exponential behavior ++ */ ++ if (bufnext == patbuf || bufnext[-1] != M_ALL) ++ *bufnext++ = M_ALL; ++ break; ++ default: ++ *bufnext++ = c; ++ break; ++ } ++ } ++ *bufnext = EOS; ++ ++ if ((err = glob1(patbuf, pglob, limit)) != 0) ++ return(err); ++ ++ /* ++ * If there was no match we are going to append the pattern ++ * if GLOB_NOCHECK was specified or if GLOB_NOMAGIC was specified ++ * and the pattern did not contain any magic characters ++ * GLOB_NOMAGIC is there just for compatibility with csh. ++ */ ++ if (pglob->gl_pathc == oldpathc) { ++ if (((pglob->gl_flags & GLOB_NOCHECK) || ++ ((pglob->gl_flags & GLOB_NOMAGIC) && ++ !(pglob->gl_flags & GLOB_MAGCHAR)))) ++ return(globextend(pattern, pglob, limit)); ++ else ++ return(GLOB_NOMATCH); ++ } ++ if (!(pglob->gl_flags & GLOB_NOSORT)) ++ qsort(pglob->gl_pathv + pglob->gl_offs + oldpathc, ++ pglob->gl_pathc - oldpathc, sizeof(char *), compare); ++ return(0); ++} ++ ++static int ++compare(const void *p, const void *q) ++{ ++ return(strcmp(*(char **)p, *(char **)q)); ++} ++ ++static int ++glob1(char *pattern, glob_t *pglob, size_t *limit) ++{ ++ char pathbuf[MAXPATHLEN]; ++ ++ /* A null pathname is invalid -- POSIX 1003.1 sect. 2.4. */ ++ if (*pattern == EOS) ++ return(0); ++ return(glob2(pathbuf, pathbuf, pathbuf + MAXPATHLEN - 1, ++ pattern, pglob, limit)); ++} ++ ++/* ++*The functions glob2 and glob3 are mutually recursive; there is one level ++*of recursion for each segment in the pattern that contains one or more ++*meta characters. ++*/ ++static int ++glob2(char *pathbuf, char *pathend, char *pathend_last, char *pattern, ++ glob_t *pglob, size_t *limit) ++{ ++ struct stat sb; ++ char *p, *q; ++ int anymeta; ++ ++ /* ++ * Loop over pattern segments until end of pattern or until ++ * segment with meta character found. ++ */ ++ for (anymeta = 0;;) { ++ if (*pattern == EOS) { /* End of pattern? */ ++ *pathend = EOS; ++ if (g_lstat(pathbuf, &sb, pglob)) ++ return(0); ++ ++ if (((pglob->gl_flags & GLOB_MARK) && ++ pathend[-1] != SEP) && (S_ISDIR(sb.st_mode) ++ || (S_ISLNK(sb.st_mode) && ++ (g_stat(pathbuf, &sb, pglob) == 0) && ++ S_ISDIR(sb.st_mode)))) { ++ if (pathend + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend++ = SEP; ++ *pathend = EOS; ++ } ++ ++pglob->gl_matchc; ++ return(globextend(pathbuf, pglob, limit)); ++ } ++ ++ /* Find end of next segment, copy tentatively to pathend. */ ++ q = pathend; ++ p = pattern; ++ while (*p != EOS && *p != SEP) { ++ if (ismeta(*p)) ++ anymeta = 1; ++ if (q + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *q++ = *p++; ++ } ++ ++ if (!anymeta) { /* No expansion, do next segment. */ ++ pathend = q; ++ pattern = p; ++ while (*pattern == SEP) { ++ if (pathend + 1 > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend++ = *pattern++; ++ } ++ } else /* Need expansion, recurse. */ ++ return(glob3(pathbuf, pathend, pathend_last, pattern, p, ++ pglob, limit)); ++ } ++ /* NOTREACHED */ ++} ++ ++static int ++glob3(char *pathbuf, char *pathend, char *pathend_last, ++ char *pattern, char *restpattern, ++ glob_t *pglob, size_t *limit) ++{ ++ struct dirent *dp; ++ DIR *dirp; ++ int err; ++ char buf[MAXPATHLEN]; ++ ++ /* ++ * The readdirfunc declaration can't be prototyped, because it is ++ * assigned, below, to two functions which are prototyped in glob.h ++ * and dirent.h as taking pointers to differently typed opaque ++ * structures. ++ */ ++ struct dirent *(*readdirfunc)(); ++ ++ if (pathend > pathend_last) ++ return (GLOB_ABORTED); ++ *pathend = EOS; ++ errno = 0; ++ ++ if ((dirp = g_opendir(pathbuf, pglob)) == NULL) { ++ /* TODO: don't call for ENOENT or ENOTDIR? */ ++ if (pglob->gl_errfunc) { ++ if (pglob->gl_errfunc(buf, errno) || ++ pglob->gl_flags & GLOB_ERR) ++ return (GLOB_ABORTED); ++ } ++ return(0); ++ } ++ ++ err = 0; ++ ++ /* Search directory for matching names. */ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ readdirfunc = pglob->gl_readdir; ++ else ++ readdirfunc = readdir; ++ while ((dp = (*readdirfunc)(dirp))) { ++ char *sc; ++ char *dc; ++ ++ /* Initial DOT must be matched literally. */ ++ if (dp->d_name[0] == DOT && *pattern != DOT) ++ continue; ++ dc = pathend; ++ sc = dp->d_name; ++ while (dc < pathend_last) { ++ if ((*dc++ = *sc) == EOS) ++ break; ++ sc++; ++ } ++ if (!match(pathend, pattern, restpattern)) { ++ *pathend = EOS; ++ continue; ++ } ++ err = glob2(pathbuf, --dc, pathend_last, restpattern, ++ pglob, limit); ++ if (err) ++ break; ++ } ++ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ (*pglob->gl_closedir)(dirp); ++ else ++ closedir(dirp); ++ return(err); ++} ++ ++ ++/* ++*Extend the gl_pathv member of a glob_t structure to accomodate a new item, ++*add the new item, and update gl_pathc. ++* ++*This assumes the BSD realloc, which only copies the block when its size ++*crosses a power-of-two boundary; for v7 realloc, this would cause quadratic ++*behavior. ++* ++*Return 0 if new item added, error code if memory couldn't be allocated. ++* ++*Invariant of the glob_t structure: ++* Either gl_pathc is zero and gl_pathv is NULL; or gl_pathc > 0 and ++* gl_pathv points to (gl_offs + gl_pathc + 1) items. ++*/ ++static int ++globextend(const char *path, glob_t *pglob, size_t *limit) ++{ ++ char **pathv; ++ size_t i, newsize, len; ++ char *copy; ++ const char *p; ++ ++ if (*limit && pglob->gl_pathc > *limit) { ++ errno = 0; ++ return (GLOB_NOSPACE); ++ } ++ ++ newsize = sizeof(*pathv) * (2 + pglob->gl_pathc + pglob->gl_offs); ++ pathv = pglob->gl_pathv ? ++ realloc((char *)pglob->gl_pathv, newsize) : ++ malloc(newsize); ++ if (pathv == NULL) { ++ if (pglob->gl_pathv) { ++ free(pglob->gl_pathv); ++ pglob->gl_pathv = NULL; ++ } ++ return(GLOB_NOSPACE); ++ } ++ ++ if (pglob->gl_pathv == NULL && pglob->gl_offs > 0) { ++ /* first time around -- clear initial gl_offs items */ ++ pathv += pglob->gl_offs; ++ for (i = pglob->gl_offs + 1; --i > 0; ) ++ *--pathv = NULL; ++ } ++ pglob->gl_pathv = pathv; ++ ++ for (p = path; *p++;) ++ continue; ++ len = (size_t)(p - path); /* XXX overallocation */ ++ if ((copy = malloc(len)) != NULL) { ++ strncpy(copy, path, len); ++ pathv[pglob->gl_offs + pglob->gl_pathc++] = copy; ++ } ++ pathv[pglob->gl_offs + pglob->gl_pathc] = NULL; ++ return(copy == NULL ? GLOB_NOSPACE : 0); ++} ++ ++/* ++*pattern matching function for filenames. Each occurrence of the * ++*pattern causes a recursion level. ++*/ ++static int ++match(char *name, char *pat, char *patend) ++{ ++ int ok, negate_range; ++ char c, k; ++ ++ while (pat < patend) { ++ c = *pat++; ++ switch (c & M_MASK) { ++ case M_ALL: ++ if (pat == patend) ++ return(1); ++ do ++ if (match(name, pat, patend)) ++ return(1); ++ while (*name++ != EOS); ++ return(0); ++ case M_ONE: ++ if (*name++ == EOS) ++ return(0); ++ break; ++ case M_SET: ++ ok = 0; ++ if ((k = *name++) == EOS) ++ return(0); ++ if ((negate_range = ((*pat & M_MASK) == M_NOT)) != EOS) ++ ++pat; ++ while (((c = *pat++) & M_MASK) != M_END) ++ if ((*pat & M_MASK) == M_RNG) { ++ // NOTE(gabor): This used to be as below, but I took out the collate.h ++ // if (__collate_load_error ? ++ // CHAR(c) <= CHAR(k) && CHAR(k) <= CHAR(pat[1]) : ++ // __collate_range_cmp(CHAR(c), CHAR(k)) <= 0 ++ // && __collate_range_cmp(CHAR(k), CHAR(pat[1])) <= 0 ++ // ) ++ ++ if (c <= k && k <= pat[1]) ++ ok = 1; ++ pat += 2; ++ } else if (c == k) ++ ok = 1; ++ if (ok == negate_range) ++ return(0); ++ break; ++ default: ++ if (*name++ != c) ++ return(0); ++ break; ++ } ++ } ++ return(*name == EOS); ++} ++ ++/* Free allocated data belonging to a glob_t structure. */ ++void ++globfree(glob_t *pglob) ++{ ++ size_t i; ++ char **pp; ++ ++ if (pglob->gl_pathv != NULL) { ++ pp = pglob->gl_pathv + pglob->gl_offs; ++ for (i = pglob->gl_pathc; i--; ++pp) ++ if (*pp) ++ free(*pp); ++ free(pglob->gl_pathv); ++ pglob->gl_pathv = NULL; ++ } ++} ++ ++static int ++g_stat(char *fn, struct stat *sb, glob_t *pglob) ++{ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_stat)(fn, sb)); ++ return(stat(fn, sb)); ++} ++ ++static DIR * ++g_opendir(char *str, glob_t *pglob) ++{ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_opendir)(str)); ++ ++ return(opendir(str)); ++} ++ ++static int ++g_lstat(char *fn, struct stat *sb, glob_t *pglob) ++{ ++ if (pglob->gl_flags & GLOB_ALTDIRFUNC) ++ return((*pglob->gl_lstat)(fn, sb)); ++ return(lstat(fn, sb)); ++} +\ No newline at end of file +diff --git a/src/glob.h b/src/glob.h +new file mode 100644 +index 0000000..6d2ece5 +--- /dev/null ++++ b/src/glob.h +@@ -0,0 +1,113 @@ ++/* ++* Modified for the Android NDK by Gabor Cselle, http://www.gaborcselle.com/ ++* Tested with Android NDK version 5b: http://developer.android.com/sdk/ndk/index.html ++* Last modified: March 3 2011 ++* ++* Copyright (c) 1989, 1993 ++* The Regents of the University of California. All rights reserved. ++* ++* This code is derived from software contributed to Berkeley by ++* Guido van Rossum. ++* ++* Redistribution and use in source and binary forms, with or without ++* modification, are permitted provided that the following conditions ++* are met: ++* 1. Redistributions of source code must retain the above copyright ++* notice, this list of conditions and the following disclaimer. ++* 2. Redistributions in binary form must reproduce the above copyright ++* notice, this list of conditions and the following disclaimer in the ++* documentation and/or other materials provided with the distribution. ++* 3. All advertising materials mentioning features or use of this software ++* must display the following acknowledgement: ++* This product includes software developed by the University of ++* California, Berkeley and its contributors. ++* 4. Neither the name of the University nor the names of its contributors ++* may be used to endorse or promote products derived from this software ++* without specific prior written permission. ++* ++* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ++* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE ++* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ++* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE ++* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL ++* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS ++* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ++* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY ++* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF ++* SUCH DAMAGE. ++* ++* @(#)glob.h 8.1 (Berkeley) 6/2/93 ++* $FreeBSD$ ++*/ ++ ++#ifndef _GLOB_H_ ++#define _GLOB_H_ ++ ++#include ++//#include ++ ++#ifndef _SIZE_T_DECLARED ++#define __size_t size_t ++#define _SIZE_T_DECLARED ++#endif ++ ++struct stat; ++typedef struct { ++ size_t gl_pathc; /* Count of total paths so far. */ ++ size_t gl_matchc; /* Count of paths matching pattern. */ ++ size_t gl_offs; /* Reserved at beginning of gl_pathv. */ ++ int gl_flags; /* Copy of flags parameter to glob. */ ++ char **gl_pathv; /* List of paths matching pattern. */ ++ /* Copy of errfunc parameter to glob. */ ++ int (*gl_errfunc)(const char *, int); ++ ++ /* ++ * Alternate filesystem access methods for glob; replacement ++ * versions of closedir(3), readdir(3), opendir(3), stat(2) ++ * and lstat(2). ++ */ ++ void (*gl_closedir)(void *); ++ struct dirent *(*gl_readdir)(void *); ++ void *(*gl_opendir)(const char *); ++ int (*gl_lstat)(const char *, struct stat *); ++ int (*gl_stat)(const char *, struct stat *); ++} glob_t; ++ ++// #if __POSIX_VISIBLE >= 199209 ++/* Believed to have been introduced in 1003.2-1992 */ ++#define GLOB_APPEND 0x0001 /* Append to output from previous call. */ ++#define GLOB_DOOFFS 0x0002 /* Use gl_offs. */ ++#define GLOB_ERR 0x0004 /* Return on error. */ ++#define GLOB_MARK 0x0008 /* Append / to matching directories. */ ++#define GLOB_NOCHECK 0x0010 /* Return pattern itself if nothing matches. */ ++#define GLOB_NOSORT 0x0020 /* Don't sort. */ ++#define GLOB_NOESCAPE 0x2000 /* Disable backslash escaping. */ ++ ++/* Error values returned by glob(3) */ ++#define GLOB_NOSPACE (-1) /* Malloc call failed. */ ++#define GLOB_ABORTED (-2) /* Unignored error. */ ++#define GLOB_NOMATCH (-3) /* No match and GLOB_NOCHECK was not set. */ ++#define GLOB_NOSYS (-4) /* Obsolete: source comptability only. */ ++// #endif /* __POSIX_VISIBLE >= 199209 */ ++ ++// #if __BSD_VISIBLE ++#define GLOB_ALTDIRFUNC 0x0040 /* Use alternately specified directory funcs. */ ++#define GLOB_BRACE 0x0080 /* Expand braces ala csh. */ ++#define GLOB_MAGCHAR 0x0100 /* Pattern had globbing characters. */ ++#define GLOB_NOMAGIC 0x0200 /* GLOB_NOCHECK without magic chars (csh). */ ++#define GLOB_QUOTE 0x0400 /* Quote special chars with \. */ ++#define GLOB_TILDE 0x0800 /* Expand tilde names from the passwd file. */ ++#define GLOB_LIMIT 0x1000 /* limit number of returned paths */ ++ ++/* source compatibility, these are the old names */ ++#define GLOB_MAXPATH GLOB_LIMIT ++#define GLOB_ABEND GLOB_ABORTED ++// #endif /* __BSD_VISIBLE */ ++ ++__BEGIN_DECLS ++int glob(const char *, int, int (*)(const char *, int), glob_t *); ++void globfree(glob_t *); ++__END_DECLS ++ ++#endif /* !_GLOB_H_ */ +diff --git a/src/logging_unittest.cc b/src/logging_unittest.cc +index b886222..9b14911 100644 +--- a/src/logging_unittest.cc ++++ b/src/logging_unittest.cc +@@ -34,7 +34,7 @@ + + #include + #ifdef HAVE_GLOB_H +-# include ++# include "glob.h" + #endif + #include + #ifdef HAVE_UNISTD_H +-- +2.17.1 diff --git a/src/external/Update-buffer-operation-and-fix-stream-loop-coredump.patch b/src/external/Update-buffer-operation-and-fix-stream-loop-coredump.patch new file mode 100644 index 00000000..e54b9e6e --- /dev/null +++ b/src/external/Update-buffer-operation-and-fix-stream-loop-coredump.patch @@ -0,0 +1,496 @@ +From 7054a4c28a11a7fdd69ebbd5d25cad28f87805fc Mon Sep 17 00:00:00 2001 +From: "Kang, Xin1" +Date: Tue, 10 Nov 2020 14:22:06 +0800 +Subject: [PATCH] Update buffer operation and fix stream loop coredump issue + +--- + libavcodec/decode.c | 13 ++- + libavcodec/internal.h | 4 + + libavcodec/pthread_slice.c | 4 +- + libavcodec/utils.c | 33 ++++++-- + libavutil/ffversion.h | 2 +- + libopenhevc/openhevc.c | 198 ++++++++++++++++++++++++++++++++++++++++++++- + libopenhevc/openhevc.h | 15 ++++ + 7 files changed, 256 insertions(+), 13 deletions(-) + +diff --git a/libavcodec/decode.c b/libavcodec/decode.c +index 052f93d..97eee2e 100644 +--- a/libavcodec/decode.c ++++ b/libavcodec/decode.c +@@ -139,7 +139,11 @@ static int unrefcount_frame(AVCodecInternal *avci, AVFrame *frame) + int ret; + + /* move the original frame to our backup */ ++ avci->to_free = avci->to_free_list[avci->to_free_idx]; ++ avci->to_free_idx = (avci->to_free_idx + 1) % LIST_SIZE; + av_frame_unref(avci->to_free); ++ if(!avci->to_free) ++ return 0; + av_frame_move_ref(avci->to_free, frame); + + /* now copy everything except the AVBufferRefs back +@@ -1825,7 +1829,14 @@ void avcodec_flush_buffers(AVCodecContext *avctx) + ff_decode_bsfs_uninit(avctx); + + if (!avctx->refcounted_frames) +- av_frame_unref(avctx->internal->to_free); ++ { ++ //av_frame_unref(avctx->internal->to_free); ++ for(int i = 0; i < LIST_SIZE; i++) ++ { ++ av_frame_free(&avctx->internal->to_free_list[i]); ++ } ++ av_freep(&avctx->internal->to_free_list); ++ } + } + + void ff_decode_bsfs_uninit(AVCodecContext *avctx) +diff --git a/libavcodec/internal.h b/libavcodec/internal.h +index d344277..ce227cb 100644 +--- a/libavcodec/internal.h ++++ b/libavcodec/internal.h +@@ -33,6 +33,8 @@ + #include "avcodec.h" + #include "config.h" + ++#define LIST_SIZE 50 ++ + /** + * The codec does not modify any global variables in the init function, + * allowing to call the init function without locking any global mutexes. +@@ -148,6 +150,8 @@ typedef struct AVCodecInternal { + int last_audio_frame; + + AVFrame *to_free; ++ AVFrame *to_free_list[LIST_SIZE]; ++ int to_free_idx; + + FramePool *pool; + +diff --git a/libavcodec/pthread_slice.c b/libavcodec/pthread_slice.c +index 353b996..d227f53 100644 +--- a/libavcodec/pthread_slice.c ++++ b/libavcodec/pthread_slice.c +@@ -117,7 +117,6 @@ void ff_slice_thread_free(AVCodecContext *avctx) + + for (i=0; ithread_count; i++) + pthread_join(c->workers[i], NULL); +- + pthread_mutex_destroy(&c->current_job_lock); + pthread_cond_destroy(&c->current_job_cond); + pthread_cond_destroy(&c->last_job_cond); +@@ -269,9 +268,10 @@ int ff_alloc_entries(AVCodecContext *avctx, int count) + + if (avctx->active_thread_type & FF_THREAD_SLICE) { + SliceThreadContext *p = avctx->internal->thread_ctx; ++ if (p->entries) ++ return 0; + p->thread_count = avctx->thread_count; + p->entries = av_mallocz_array(count, sizeof(int)); +- + p->progress_mutex = av_malloc_array(p->thread_count, sizeof(pthread_mutex_t)); + p->progress_cond = av_malloc_array(p->thread_count, sizeof(pthread_cond_t)); + +diff --git a/libavcodec/utils.c b/libavcodec/utils.c +index 1336e92..19febe5 100644 +--- a/libavcodec/utils.c ++++ b/libavcodec/utils.c +@@ -666,12 +666,23 @@ int attribute_align_arg avcodec_open2(AVCodecContext *avctx, const AVCodec *code + ret = AVERROR(ENOMEM); + goto free_and_end; + } +- ++#if 0 + avctx->internal->to_free = av_frame_alloc(); + if (!avctx->internal->to_free) { + ret = AVERROR(ENOMEM); + goto free_and_end; + } ++#endif ++ avctx->internal->to_free_idx = 0; ++ for(int i = 0; i < LIST_SIZE; i++) ++ { ++ avctx->internal->to_free_list[i] = av_frame_alloc(); ++ if (!avctx->internal->to_free_list[i]) ++ { ++ ret = AVERROR(ENOMEM); ++ goto free_and_end; ++ } ++ } + + avctx->internal->compat_decode_frame = av_frame_alloc(); + if (!avctx->internal->compat_decode_frame) { +@@ -1126,7 +1137,13 @@ FF_ENABLE_DEPRECATION_WARNINGS + av_dict_free(&tmp); + av_freep(&avctx->priv_data); + if (avctx->internal) { +- av_frame_free(&avctx->internal->to_free); ++ //av_frame_free(&avctx->internal->to_free); ++ for(int i = 0; i < LIST_SIZE; i++) ++ { ++ av_frame_free(&avctx->internal->to_free_list[i]); ++ } ++ av_freep(&avctx->internal->to_free_list); ++ + av_frame_free(&avctx->internal->compat_decode_frame); + av_frame_free(&avctx->internal->buffer_frame); + av_packet_free(&avctx->internal->buffer_pkt); +@@ -1173,13 +1190,19 @@ av_cold int avcodec_close(AVCodecContext *avctx) + avctx->internal->frame_thread_encoder && avctx->thread_count > 1) { + ff_frame_thread_encoder_free(avctx); + } +- if (HAVE_THREADS && avctx->internal->thread_ctx) ++ if (HAVE_THREADS && (avctx->internal->thread_ctx || avctx->internal->thread_ctx_frame)) + ff_thread_free(avctx); + if (avctx->codec && avctx->codec->close) + avctx->codec->close(avctx); + avctx->internal->byte_buffer_size = 0; + av_freep(&avctx->internal->byte_buffer); +- av_frame_free(&avctx->internal->to_free); ++ //av_frame_free(&avctx->internal->to_free); ++ for(int i = 0; i < LIST_SIZE; i++) ++ { ++ av_frame_free(&avctx->internal->to_free_list[i]); ++ } ++ av_freep(&avctx->internal->to_free_list); ++ + av_frame_free(&avctx->internal->compat_decode_frame); + av_frame_free(&avctx->internal->buffer_frame); + av_packet_free(&avctx->internal->buffer_pkt); +@@ -1213,13 +1236,13 @@ av_cold int avcodec_close(AVCodecContext *avctx) + av_opt_free(avctx); + av_freep(&avctx->priv_data); + if (av_codec_is_encoder(avctx->codec)) { +- av_freep(&avctx->extradata); + #if FF_API_CODED_FRAME + FF_DISABLE_DEPRECATION_WARNINGS + av_frame_free(&avctx->coded_frame); + FF_ENABLE_DEPRECATION_WARNINGS + #endif + } ++ av_freep(&avctx->extradata); + avctx->codec = NULL; + avctx->active_thread_type = 0; + +diff --git a/libavutil/ffversion.h b/libavutil/ffversion.h +index 7bafd1a..a54948f 100644 +--- a/libavutil/ffversion.h ++++ b/libavutil/ffversion.h +@@ -1,5 +1,5 @@ + /* Automatically generated by version.sh, do not manually edit! */ + #ifndef AVUTIL_FFVERSION_H + #define AVUTIL_FFVERSION_H +-#define FFMPEG_VERSION "openhevc-2.0-181-g5e7d0585" ++#define FFMPEG_VERSION "openhevc-2.0-184-g3dd964d" + #endif /* AVUTIL_FFVERSION_H */ +diff --git a/libopenhevc/openhevc.c b/libopenhevc/openhevc.c +index e544f0e..3d9ec64 100644 +--- a/libopenhevc/openhevc.c ++++ b/libopenhevc/openhevc.c +@@ -30,12 +30,19 @@ + + #define MAX_DECODERS 3 + ++typedef struct DecodePicture { ++ AVFrame * picture; ++ int free; ++} DecodePicture; + + typedef struct OHDecoderCtx { + AVCodec *codec; + AVCodecContext *codec_ctx; + AVCodecParserContext *parser_ctx; + AVFrame *picture; ++ DecodePicture *picture_pool; ++ pthread_mutex_t pool_lock; ///< mutex to protect the pool ++ int pool_size; + AVPacket avpkt; + } OHDecoderCtx; + +@@ -70,7 +77,7 @@ static void init_oh_threads(OHDecoderCtx *oh_ctx, int nb_pthreads, + av_opt_set_int(oh_ctx->codec_ctx, "threads", nb_pthreads, 0); + } + +-OHHandle oh_init(int nb_pthreads, int thread_type) ++OHHandle oh_init_async(int nb_pthreads, int thread_type, int pool_size) + { + int i; + OHContext *oh_ctx = av_mallocz(sizeof(OHContext)); +@@ -102,8 +109,14 @@ OHHandle oh_init(int nb_pthreads, int thread_type) + + oh_decoder_ctx->parser_ctx = av_parser_init( oh_decoder_ctx->codec->id ); + oh_decoder_ctx->codec_ctx = avcodec_alloc_context3(oh_decoder_ctx->codec); +- oh_decoder_ctx->picture = av_frame_alloc(); +- ++ oh_decoder_ctx->picture_pool = av_mallocz_array(pool_size, sizeof(DecodePicture)); ++ for (int i = 0; i < pool_size; i++) ++ { ++ oh_decoder_ctx->picture_pool[i].picture = av_frame_alloc(); ++ oh_decoder_ctx->picture_pool[i].free = 1; ++ } ++ oh_decoder_ctx->pool_size = pool_size; ++ oh_decoder_ctx->picture = NULL; + oh_decoder_ctx->codec_ctx->flags |= AV_CODEC_FLAG_UNALIGNED; + + //FIXME OpenHEVC does not seem to use AV_CODEC_CAP_TRUNCATED +@@ -111,9 +124,58 @@ OHHandle oh_init(int nb_pthreads, int thread_type) + oh_decoder_ctx->codec_ctx->flags |= AV_CODEC_FLAG_TRUNCATED; /* we do not send complete frames */ + + init_oh_threads(oh_decoder_ctx, nb_pthreads, thread_type); ++ av_opt_set_int(oh_decoder_ctx->codec_ctx, "refcounted_frames", 1, 0); ++ av_opt_set_int(oh_decoder_ctx->codec_ctx->priv_data, "decoder-id", i, 0); ++ pthread_mutex_init(&oh_decoder_ctx->pool_lock ,NULL); ++ } ++ ++ return (OHHandle) oh_ctx; ++} ++ ++ ++OHHandle oh_init(int nb_pthreads, int thread_type) ++{ ++ int i; ++ OHContext *oh_ctx = av_mallocz(sizeof(OHContext)); ++ OHDecoderCtx *oh_decoder_ctx; ++ ++ av_log(NULL,AV_LOG_DEBUG, "INIT openHEVC context\n"); ++ ++ avcodec_register_all(); ++ ++ oh_ctx->nb_decoders = MAX_DECODERS; ++ oh_ctx->target_active_layer = MAX_DECODERS-1; ++ oh_ctx->target_display_layer = MAX_DECODERS-1; ++ ++ oh_ctx->ctx_list = av_malloc(sizeof(OHDecoderCtx*)*oh_ctx->nb_decoders); ++ ++ pthread_mutex_init(&oh_ctx->layer_switch ,NULL); ++ ++ for( i = 0; i < oh_ctx->nb_decoders; i++){ ++ oh_decoder_ctx = oh_ctx->ctx_list[i] = av_malloc(sizeof(OHDecoderCtx)); ++ av_init_packet(&oh_decoder_ctx->avpkt); ++ ++ oh_decoder_ctx->codec = avcodec_find_decoder(AV_CODEC_ID_HEVC); + ++ if (!oh_decoder_ctx->codec) { ++ av_log(NULL, AV_LOG_ERROR, ++ "OpenHEVC could not find a suitable codec for hevc stream\n"); ++ return NULL; ++ } ++ ++ oh_decoder_ctx->parser_ctx = av_parser_init( oh_decoder_ctx->codec->id ); ++ oh_decoder_ctx->codec_ctx = avcodec_alloc_context3(oh_decoder_ctx->codec); ++ oh_decoder_ctx->picture = av_frame_alloc(); ++ oh_decoder_ctx->codec_ctx->flags |= AV_CODEC_FLAG_UNALIGNED; ++ ++ //FIXME OpenHEVC does not seem to use AV_CODEC_CAP_TRUNCATED ++ if(oh_decoder_ctx->codec->capabilities & AV_CODEC_CAP_TRUNCATED) ++ oh_decoder_ctx->codec_ctx->flags |= AV_CODEC_FLAG_TRUNCATED; /* we do not send complete frames */ ++ ++ init_oh_threads(oh_decoder_ctx, nb_pthreads, thread_type); + av_opt_set_int(oh_decoder_ctx->codec_ctx->priv_data, "decoder-id", i, 0); + } ++ + return (OHHandle) oh_ctx; + } + +@@ -237,6 +299,55 @@ int oh_start(OHHandle openHevcHandle) + return 1; + } + ++void oh_release_picture(OHHandle openHevcHandle, const unsigned char *dataY) ++{ ++ OHContext *oh_ctx = (OHContext *)openHevcHandle; ++ OHDecoderCtx *oh_decoder_ctx; ++ for (int i = 0; i < MAX_DECODERS - 1; i++) ++ { ++ oh_decoder_ctx = oh_ctx->ctx_list[i]; ++ if (i <= oh_ctx->target_active_layer) ++ { ++ pthread_mutex_lock(&oh_decoder_ctx->pool_lock); ++ for (int j = 0; j < oh_decoder_ctx->pool_size; j++) ++ { ++ if (!oh_decoder_ctx->picture_pool[j].free && oh_decoder_ctx->picture_pool[j].picture->data[0] == dataY) ++ { ++ av_frame_unref(oh_decoder_ctx->picture_pool[j].picture); ++ oh_decoder_ctx->picture_pool[j].free = 1; ++ } ++ } ++ pthread_mutex_unlock(&oh_decoder_ctx->pool_lock); ++ } ++ } ++} ++ ++int oh_acquire_picture(OHHandle openHevcHandle) ++{ ++ OHContext *oh_ctx = (OHContext *)openHevcHandle; ++ OHDecoderCtx *oh_decoder_ctx; ++ int found = 0; ++ for (int i = 0; i < MAX_DECODERS - 1; i++) ++ { ++ oh_decoder_ctx = oh_ctx->ctx_list[i]; ++ if (i <= oh_ctx->target_active_layer) ++ { ++ pthread_mutex_lock(&oh_decoder_ctx->pool_lock); ++ for (int j = 0; j < oh_decoder_ctx->pool_size; j++) ++ { ++ if (oh_decoder_ctx->picture_pool[j].free) ++ { ++ found = 1; ++ oh_decoder_ctx->picture = oh_decoder_ctx->picture_pool[j].picture; ++ oh_decoder_ctx->picture_pool[j].free = 0; ++ break; ++ } ++ } ++ pthread_mutex_unlock(&oh_decoder_ctx->pool_lock); ++ } ++ } ++ return found; ++} + int oh_decode(OHHandle openHevcHandle, const unsigned char *buff, int au_len, + int64_t pts) + { +@@ -263,7 +374,6 @@ int oh_decode(OHHandle openHevcHandle, const unsigned char *buff, int au_len, + oh_decoder_ctx->avpkt.size = au_len; + oh_decoder_ctx->avpkt.data = (uint8_t *) buff; + oh_decoder_ctx->avpkt.pts = pts; +- + AV_NOWARN_DEPRECATED( + err = avcodec_decode_video2(oh_decoder_ctx->codec_ctx, oh_decoder_ctx->picture, + &got_picture, &oh_decoder_ctx->avpkt);) +@@ -291,6 +401,60 @@ AV_NOWARN_DEPRECATED( + return oh_ctx->got_picture_mask; + } + ++int oh_decode_async(OHHandle openHevcHandle, const unsigned char *buff, int au_len, ++ int64_t pts) ++{ ++ int i; ++ int ret = 0; ++ int err = 0; ++ ++ OHContext *oh_ctx = (OHContext *) openHevcHandle; ++ OHDecoderCtx *oh_decoder_ctx; ++ ++ int target_active_layer = oh_ctx->target_active_layer; ++ ++ oh_ctx->got_picture_mask = 0; ++ ++ pthread_mutex_lock(&oh_ctx->layer_switch); ++ ++ for(i =0; i < MAX_DECODERS - 1; i++) { ++ int got_picture = 0; ++ oh_decoder_ctx = oh_ctx->ctx_list[i]; ++ ++ oh_decoder_ctx->codec_ctx->quality_id = oh_ctx->target_active_layer; ++ ++ if (i <= oh_ctx->target_active_layer){ ++ oh_decoder_ctx->avpkt.size = au_len; ++ oh_decoder_ctx->avpkt.data = (uint8_t *) buff; ++ oh_decoder_ctx->avpkt.pts = pts; ++AV_NOWARN_DEPRECATED( ++ err = avcodec_decode_video2(oh_decoder_ctx->codec_ctx, oh_decoder_ctx->picture, ++ &got_picture, &oh_decoder_ctx->avpkt);) ++ ret |= (got_picture << i); ++ if (!got_picture) ++ oh_release_picture(openHevcHandle, oh_decoder_ctx->picture->data[0]); ++ } else { ++ oh_decoder_ctx->avpkt.size = 0; ++ oh_decoder_ctx->avpkt.data = NULL; ++ oh_decoder_ctx->avpkt.pts = 0; ++ avcodec_flush_buffers(oh_decoder_ctx->codec_ctx); ++ } ++ ++ if(i < oh_ctx->target_active_layer) ++ oh_ctx->ctx_list[i+1]->codec_ctx->BL_frame = ++ oh_ctx->ctx_list[i]->codec_ctx->BL_frame; ++ } ++ pthread_mutex_unlock(&oh_ctx->layer_switch); ++ ++ oh_ctx->got_picture_mask = ret; ++ ++ if (err < 0) { ++ av_log(NULL,AV_LOG_ERROR,"openHEVC decoder returned an error while decoding frame \n"); ++ return err; ++ } ++ ++ return oh_ctx->got_picture_mask; ++} + + #if OHCONFIG_AVCBASE + //FIXME: There should be a better way to synchronize decoders +@@ -923,8 +1087,34 @@ void oh_close(OHHandle openHevcHandle) + av_freep(&oh_ctx_list->ctx_list); + av_freep(&oh_ctx_list); + av_log(NULL,AV_LOG_DEBUG,"Close openHEVC decoder\n"); ++} + ++void oh_close_async(OHHandle openHevcHandle) ++{ ++ OHContext *oh_ctx_list = (OHContext *) openHevcHandle; ++ OHDecoderCtx *oh_ctx; ++ int i, j; + ++ av_log(NULL,AV_LOG_DEBUG,"Closing openHEVC\n"); ++ for (i = oh_ctx_list->nb_decoders - 1; i >= 0 ; i--){ ++ oh_ctx = oh_ctx_list->ctx_list[i]; ++ avcodec_flush_buffers(oh_ctx->codec_ctx); ++ ++ avcodec_close(oh_ctx->codec_ctx); ++ ++ av_parser_close(oh_ctx->parser_ctx); ++ ++ av_freep(&oh_ctx->codec_ctx); ++ for (j = 0; j < oh_ctx->pool_size; j++) ++ av_freep(&(oh_ctx->picture_pool[j].picture)); ++ av_freep(&oh_ctx->picture_pool); ++ pthread_mutex_destroy(&oh_ctx->pool_lock); ++ av_freep(&oh_ctx); ++ } ++ pthread_mutex_destroy(&oh_ctx_list->layer_switch); ++ av_freep(&oh_ctx_list->ctx_list); ++ av_freep(&oh_ctx_list); ++ av_log(NULL,AV_LOG_DEBUG,"Close openHEVC decoder\n"); + } + + void oh_flush(OHHandle openHevcHandle) +diff --git a/libopenhevc/openhevc.h b/libopenhevc/openhevc.h +index 0a63b5c..1565b61 100644 +--- a/libopenhevc/openhevc.h ++++ b/libopenhevc/openhevc.h +@@ -158,6 +158,8 @@ typedef struct OHFrame_cpy { + * @param type thread_type of the threading design in use for decoding + * @return A decoder list if a decoder of each layer were found, NULL otherwise. + */ ++OHHandle oh_init_async(int nb_threads, int thread_type, int pool_size); ++ + OHHandle oh_init(int nb_threads, int thread_type); + + /** +@@ -214,6 +216,11 @@ int oh_start(OHHandle oh_hdl); + int oh_decode(OHHandle oh_hdl, const unsigned char *pkt_data, + int pkt_size, int64_t pkt_pts); + ++int oh_decode_async(OHHandle oh_hdl, const unsigned char *pkt_data, ++ int pkt_size, int64_t pkt_pts); ++ ++void oh_release_picture(OHHandle openHevcHandle, const unsigned char *dataY); ++int oh_acquire_picture(OHHandle openHevcHandle); + #if OHCONFIG_AVCBASE + /** + * Decode the video frame of size pkt_size from pkt_data into picture, in case +@@ -267,6 +274,14 @@ void oh_flush_shvc(OHHandle oh_hdl, int layer_idx); + void oh_close(OHHandle oh_hdl); + + /** ++ * Close and free all decoders, parsers and pictures. ++ * ++ * @param oh_hdl The codec context list of current decoders ++ */ ++void oh_close_async(OHHandle oh_hdl); ++ ++ ++/** + * Update informations on the output frame + * + * @param oh_hdl The codec context list of current decoders +-- +1.8.3.1 diff --git a/src/external/build.sh b/src/external/build.sh new file mode 100755 index 00000000..9418fadc --- /dev/null +++ b/src/external/build.sh @@ -0,0 +1,224 @@ +#!/bin/bash -ex + +TARGET=$1 +PREBUILD_FLAG=$2 +EX_PATH=${PWD} +SRC_PATH=${PWD}/.. + +parameters_usage(){ + echo 'Usage: 1. : [ server, client, test ]' + echo ' 2. : [ y, n ]' +} + +build_server(){ + if [ "${PREBUILD_FLAG}" == "y" ] ; then + ./prebuild.sh server + fi + ./install_safestringlib.sh + mkdir -p ../build/server + cd ../build/server + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH + if [ "$1" == "oss" ] ; then + cd ${EX_PATH}/../distributed_encoder/util + thrift -r --gen cpp shared.thrift + patch gen-cpp/shared_types.h Implement_operator_RegionInformation.patch + cd - + cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=server -DDE_FLAG=true ../.. + else + sudo cp ../../ffmpeg/dependency/*.so /usr/local/lib/ + sudo cp ../../ffmpeg/dependency/*.pc /usr/local/lib/pkgconfig/ + sudo cp ../../ffmpeg/dependency/*.h /usr/local/include/ + sudo cp ../../ffmpeg/dependency/WorkerServer /root + cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=server ../.. + fi + make -j $(nproc) + sudo make install +} + +build_client(){ + if [ "${PREBUILD_FLAG}" == "y" ] ; then + ./prebuild.sh client + fi + ./install_safestringlib.sh + mkdir -p ../build/client + cd ../build/client + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH + cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=client ../../ + make -j $(nproc) + sudo make install + cp ../../player/app/linux/config.xml ./player/app +} + +build_ci(){ + source /opt/rh/devtoolset-7/enable + PREBUILD_FLAG="n" + GIT_SHORT_HEAD=`git rev-parse --short HEAD` + + # Build server + if [ "$1" == "oss" ] ; then + ./install_FFmpeg.sh server "oss" + cd ${EX_PATH} && build_server "oss" + mkdir -p ${EX_PATH}/../ffmpeg/dependency/ + cp /usr/local/lib/libDistributedEncoder.so ${EX_PATH}/../ffmpeg/dependency/ + cp /usr/local/lib/libEncoder.so ${EX_PATH}/../ffmpeg/dependency/ + cp /root/WorkerServer ${EX_PATH}/../ffmpeg/dependency/ + else + ./install_FFmpeg.sh server + cd ${EX_PATH} && build_server + fi + + if [ "$1" == "oss" ] ; then + cd ${EX_PATH}/../build/external && mkdir -p ffmpeg_server_so + sudo cp /usr/local/lib/libavcodec.so ffmpeg_server_so/libavcodec.so.58 + sudo cp /usr/local/lib/libavutil.so ffmpeg_server_so/libavutil.so.56 + sudo cp /usr/local/lib/libavformat.so ffmpeg_server_so/libavformat.so.58 + sudo cp /usr/local/lib/libavfilter.so ffmpeg_server_so/libavfilter.so.7 + sudo cp /usr/local/lib/libswresample.so ffmpeg_server_so/libswresample.so.3 + sudo cp /usr/local/lib/libpostproc.so ffmpeg_server_so/libpostproc.so.55 + sudo cp /usr/local/lib/libswscale.so ffmpeg_server_so/libswscale.so.5 + cd ${EX_PATH} && ./fpm.sh server ${GIT_SHORT_HEAD} + fi + + # Build client + cd ${EX_PATH} && ./install_FFmpeg.sh client + cd ${EX_PATH} && build_client + + if [ "$1" == "oss" ] ; then + cd ${EX_PATH}/../build/external && mkdir -p ffmpeg_client_so + sudo cp ffmpeg_client/libavcodec/libavcodec.so.58 ffmpeg_client_so/libavcodec.so.58 + sudo cp ffmpeg_client/libavutil/libavutil.so.56 ffmpeg_client_so/libavutil.so.56 + sudo cp ffmpeg_client/libavformat/libavformat.so.58 ffmpeg_client_so/libavformat.so.58 + sudo cp ffmpeg_client/libavfilter/libavfilter.so.7 ffmpeg_client_so/libavfilter.so.7 + sudo cp ffmpeg_client/libavdevice/libavdevice.so.58 ffmpeg_client_so/libavdevice.so.58 + sudo cp ffmpeg_client/libswscale/libswscale.so.5 ffmpeg_client_so/libswscale.so.5 + sudo cp ffmpeg_client/libswresample/libswresample.so.3 ffmpeg_client_so/libswresample.so.3 + sudo cp /usr/local/lib/libpostproc.so.55 ffmpeg_client_so/libpostproc.so.55 + # cd ${EX_PATH} && ./fpm.sh client ${GIT_SHORT_HEAD} + fi +} + +build_test(){ + echo "Compiling unit test ..." + source /opt/rh/devtoolset-7/enable + + mkdir -p ../build/test && cd ../build/test + cp -r ../../google_test/gtest/ . + if [ ! -d "./googletest" ];then + git clone https://github.com/google/googletest.git + cd googletest && git checkout -b v1.8.x origin/v1.8.x + cd googletest && mkdir build && cd build + cmake -DBUILD_SHARED_LIBS=ON .. && make -j $(nproc) + g++ -I../include/ -I.. -c ../src/gtest-all.cc -D_GLIBCXX_USE_CXX11_ABI=0 + g++ -I../include/ -I.. -c ../src/gtest_main.cc -D_GLIBCXX_USE_CXX11_ABI=0 + ar -rv libgtest.a gtest-all.o gtest_main.o + fi + + cd ${EX_PATH}/.. + mkdir -p build/test/360SCVP + mkdir -p build/test/OmafDashAccess + mkdir -p build/test/VROmafPacking + mkdir -p build/test/distributed_encoder + + BASIC_CONFIG="-I${SRC_PATH}/google_test -std=c++11 -I../util/ "` + `"-I${SRC_PATH}/utils -D_GLIBCXX_USE_CXX11_ABI=0 -g -c" + SHARED_CONFIG="-L/usr/local/lib -I/usr/local/include/ "` + `"../googletest/googletest/build/libgtest.a "` + `"-lstdc++ -lpthread -lglog -lm -l360SCVP -lsafestring_shared " + + # Compile 360SCVP test + cd build/test/360SCVP && \ + g++ ${BASIC_CONFIG} ${EX_PATH}/../360SCVP/test/testI360SCVP.cpp && \ + g++ testI360SCVP.o ${SHARED_CONFIG} -o testI360SCVP + + # Compile OmafDashAccess test + DA_TEST_PATH="${SRC_PATH}/OmafDashAccess/test" + DA_SHARED_CONFIG="${SHARED_CONFIG} -lOmafDashAccess" + cd ../OmafDashAccess && \ + g++ ${BASIC_CONFIG} ${DA_TEST_PATH}/testMediaSource.cpp && \ + g++ ${BASIC_CONFIG} ${DA_TEST_PATH}/testMPDParser.cpp && \ + g++ ${BASIC_CONFIG} ${DA_TEST_PATH}/testOmafReader.cpp && \ + g++ ${BASIC_CONFIG} ${DA_TEST_PATH}/testOmafReaderManager.cpp && \ + g++ testMediaSource.o ${DA_SHARED_CONFIG} -o testMediaSource && \ + g++ testMPDParser.o ${DA_SHARED_CONFIG} -o testMPDParser && \ + g++ testOmafReader.o ${DA_SHARED_CONFIG} -o testOmafReader && \ + g++ testOmafReaderManager.o ${DA_SHARED_CONFIG} -o testOmafReaderManager + + # Compile VROmafPacking test + OP_TEST_PATH="${SRC_PATH}/VROmafPacking/test" + OP_VS_CONFIG="-I${SRC_PATH}/plugins/StreamProcess_Plugin/VideoStream_Plugin/common/ "` + `"-I${SRC_PATH}/plugins/StreamProcess_Plugin/VideoStream_Plugin/HevcVideoStream/" + OP_SHARED_CONFIG="${SHARED_CONFIG} -lVROmafPacking -lHevcVideoStreamProcess -ldl" + cd ../VROmafPacking && \ + g++ ${OP_VS_CONFIG} ${BASIC_CONFIG} ${OP_TEST_PATH}/testHevcNaluParser.cpp && \ + g++ ${OP_VS_CONFIG} ${BASIC_CONFIG} ${OP_TEST_PATH}/testVideoStream.cpp && \ + g++ ${OP_VS_CONFIG} ${BASIC_CONFIG} ${OP_TEST_PATH}/testExtractorTrack.cpp && \ + g++ ${OP_VS_CONFIG} ${BASIC_CONFIG} ${OP_TEST_PATH}/testDefaultSegmentation.cpp && \ + g++ testHevcNaluParser.o ${OP_SHARED_CONFIG} -o testHevcNaluParser && \ + g++ testVideoStream.o ${OP_SHARED_CONFIG} -o testVideoStream && \ + g++ testExtractorTrack.o ${OP_SHARED_CONFIG} -o testExtractorTrack && \ + g++ testDefaultSegmentation.o ${OP_SHARED_CONFIG} -o testDefaultSegmentation + + if [ "$1" == "oss" ] ; then + # Compile distributed_encoder test + DE_TEST_PATH="${SRC_PATH}/distributed_encoder/test" + DE_BASIC_CONFIG="${BASIC_CONFIG} -I/usr/local/include/svt-hevc "` + `"-I../../../distributed_encoder/util/" + DE_SHARED_CONFIG="${SHARED_CONFIG} -lDistributedEncoder -lEncoder -pthread "` + `"-I/usr/local/include/thrift -I/usr/local/include/svt-hevc "` + `"-lthrift -lthriftnb -lSvtHevcEnc -lopenhevc -levent -lz "` + `"-lavutil -lavdevice -lavfilter -lavformat -lavcodec "` + `"-lswscale -lswresample -lva-drm -lva-x11 -lva -lXv -lX11 "` + `"-lXext -lxcb -lxcb-shm -lxcb-shape -lxcb-xfixes -llzma " + cd ../distributed_encoder && \ + g++ ${DE_BASIC_CONFIG} ${DE_TEST_PATH}/testMainEncoder.cpp && \ + g++ ${DE_BASIC_CONFIG} ${DE_TEST_PATH}/testWorkSession.cpp && \ + g++ ${DE_BASIC_CONFIG} ${DE_TEST_PATH}/testDecoder.cpp && \ + g++ ${DE_BASIC_CONFIG} ${DE_TEST_PATH}/testEncoder.cpp && \ + g++ ${DE_BASIC_CONFIG} ${DE_TEST_PATH}/testSubEncoder.cpp && \ + g++ ${DE_BASIC_CONFIG} ${DE_TEST_PATH}/testSubEncoderManager.cpp && \ + g++ testMainEncoder.o ${DE_SHARED_CONFIG} -o testMainEncoder && \ + g++ testWorkSession.o ${DE_SHARED_CONFIG} -o testWorkSession && \ + g++ testDecoder.o ${DE_SHARED_CONFIG} -o testDecoder && \ + g++ testEncoder.o ${DE_SHARED_CONFIG} -o testEncoder && \ + g++ testSubEncoder.o ${DE_SHARED_CONFIG} -o testSubEncoder && \ + g++ testSubEncoderManager.o ${DE_SHARED_CONFIG} -o testSubEncoderManager + fi +} + +if [ $# == 2 ] ; then + + if [ "${TARGET}" == "server" ] ; then + if [ "${PREBUILD_FLAG}" != "y" ] && [ "${PREBUILD_FLAG}" != "n" ] ; then + parameters_usage + exit 1 + else + build_server + fi + elif [ "${TARGET}" == "client" ] ; then + if [ "${PREBUILD_FLAG}" != "y" ] && [ "${PREBUILD_FLAG}" != "n" ] ; then + parameters_usage + exit 1 + else + build_client + fi + else + parameters_usage + fi + +elif [ $# == 1 ] ; then + + if [ "${TARGET}" == "ci" ] ; then + build_ci + elif [ "${TARGET}" == "ci_oss" ] ; then + build_ci "oss" + elif [ "${TARGET}" == "test" ] ; then + build_test + elif [ "${TARGET}" == "test_oss" ] ; then + build_test "oss" + else + parameters_usage + fi + +else + parameters_usage +fi diff --git a/src/external/build_Nokia_omaf.sh b/src/external/build_Nokia_omaf.sh deleted file mode 100755 index 41ea7ef4..00000000 --- a/src/external/build_Nokia_omaf.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/sh -e -cd ../build/external - -if [ ! -d "./omaf" ] ; then - git clone https://github.com/nokiatech/omaf.git -fi - -if [ -L ${PWD}/../../OmafDashAccess/mp4lib ] ; then - rm -rf ${PWD}/../../OmafDashAccess/mp4lib -fi -ln -s ${PWD}/omaf/Mp4/srcs ${PWD}/../../OmafDashAccess/mp4lib -cd omaf -patch -p1 < ../../../external/nokia_omaf_patch_for_extrator_reader.diff -cd Mp4/srcs - -if [ ! -d "./build" ] ; then - mkdir build -fi -cd build - -cmake .. -make -j`nproc` - -cp -r ../api/streamsegmenter ../../../../../../VROmafPacking/ -sudo cp lib/libstreamsegmenter_static_fpic.a /usr/local/lib/ -sudo cp lib/libstreamsegmenter_static.a /usr/local/lib/ -sudo cp lib/libmp4vr_static_fpic.a /usr/local/lib/ -sudo cp lib/libmp4vr_static.a /usr/local/lib/ diff --git a/src/external/build_client.sh b/src/external/build_client.sh deleted file mode 100755 index ff1068c0..00000000 --- a/src/external/build_client.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -x -./prebuild.sh client -mkdir -p ../build/client -cd ../build/client -export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH -cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=client ../../ -make -j `nproc` - diff --git a/src/external/build_glog.sh b/src/external/build_glog.sh deleted file mode 100755 index 198bd73a..00000000 --- a/src/external/build_glog.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -e - -cd ../build/external -if [ ! -d "./glog" ] ; then - git clone https://github.com/google/glog.git -fi - -cd glog -./autogen.sh -./configure -make -j8 -sudo make install diff --git a/src/external/build_server.sh b/src/external/build_server.sh deleted file mode 100755 index 799632bc..00000000 --- a/src/external/build_server.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -./prebuild.sh server -mkdir -p ../build/server -cd ../build/server -export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH -cmake -DCMAKE_BUILD_TYPE=Release -DTARGET=server .. -make -j `nproc` -make install - diff --git a/src/external/curl_arm64.sh b/src/external/curl_arm64.sh new file mode 100755 index 00000000..84c1ffad --- /dev/null +++ b/src/external/curl_arm64.sh @@ -0,0 +1,7 @@ +#curl +cd ../build/external/android/curl-7.66.0 +cp ../../../../external/env_curl_withssl.sh ./ +./env_curl_withssl.sh arm64-v8a +make clean +make -j +make install diff --git a/src/external/dockerfile b/src/external/dockerfile new file mode 100644 index 00000000..6aeeeeda --- /dev/null +++ b/src/external/dockerfile @@ -0,0 +1,189 @@ +FROM centos:7.6.1810 + +RUN if [ -n "${http_proxy}" ] ; then \ + echo "proxy=${http_proxy}" >> /etc/yum.conf && \ + echo "http_proxy=${http_proxy}" >> /etc/wgetrc && \ + echo "https_proxy=${https_proxy}" >> /etc/wgetrc ; \ + fi + +WORKDIR /home/immersive +ARG WORKDIR=/home/immersive + +# Install denpendency +RUN yum install -y centos-release-scl-rh && \ + yum install -y wget git bzip2 xz sudo devtoolset-7-gcc* && \ + wget https://download-ib01.fedoraproject.org/pub/epel/7/x86_64/Packages/e/epel-release-7-13.noarch.rpm && \ + rpm -Uvh epel-release*rpm && \ + yum install -y openssl centos-release-scl scl-utils gmp gmp-devel && \ + yum install -y mpfr mpfr-devel libmpc libmpc-devel patch autoconf && \ + yum install -y libtool automake libcurl-devel libxml2-devel && \ + yum install -y libevent-devel.x86_64 openssl-devel bc redhat-lsb && \ + yum install -y libXrandr libXrandr-devel libXinerama libXinerama-devel && \ + yum install -y libXcursor libXcursor-devel libXi libXi-devel glm-devel && \ + yum install -y mesa-libGL mesa-libGL-devel mesa-libGLU && \ + yum install -y mesa-libGLU-devel mesa-libGLES-devel mesa-libEGL-devel && \ + yum install -y SDL2 SDL2-devel libcurl4-openssl-dev libglfw3 && \ + yum install -y libXv-devel glfw glfw-devel xz-devel lzma -y && \ + yum install -y uuid.x86_64 uuid-devel.x86_64 popt-devel.x86_64 -y && \ + yum install -y numactl.x86_64 numactl-devel.x86_64 numactl-libs.x86_64 -y && \ + rm -rf /var/cache/yum/* && rm -rf * && \ + if [ -n "${http_proxy}" ]; then \ + git config --global http.proxy ${http_proxy} && \ + git config --global https.proxy ${https_proxy} ; \ + fi + +RUN yum -y install rh-ruby23 rh-ruby23-ruby-devel rh-ruby23-rubygem* && \ + source /opt/rh/rh-ruby23/enable && \ + yum -y install rpm-build && \ + if [ -n "${http_proxy}" ]; then \ + gem install -p ${http_proxy} fpm ; \ + else \ + gem install fpm ; \ + fi + +# Build zlib +ARG ZLIB_VER=1.2.11 +ARG ZILB_REPO=http://zlib.net/zlib-${ZLIB_VER}.tar.gz +RUN wget -O - ${ZILB_REPO} | tar xz && \ + cd zlib-${ZLIB_VER} && \ + source /opt/rh/devtoolset-7/enable && \ + ./configure && \ + make -j$(nproc) && \ + make install && \ + cd ${WORKDIR} && rm -rf ./* + +# Install cmake +ARG CMAKE_VER=3.12.4 +ARG CMAKE_REPO=https://cmake.org/files +RUN wget -O - ${CMAKE_REPO}/v${CMAKE_VER%.*}/cmake-${CMAKE_VER}.tar.gz | tar xz && \ + cd cmake-${CMAKE_VER} && \ + source /opt/rh/devtoolset-7/enable && \ + ./bootstrap --prefix="/usr" --system-curl && \ + make -j$(nproc) && \ + make install && \ + cd ${WORKDIR} && rm -rf ./* + +# Build YASM +ARG YASM_VER=1.3.0 +ARG YASM_REPO=https://www.tortall.net/projects/yasm/releases/yasm-${YASM_VER}.tar.gz +RUN wget -O - ${YASM_REPO} | tar xz && \ + cd yasm-${YASM_VER} && \ + sed -i "s/) ytasm.*/)/" Makefile.in && \ + source /opt/rh/devtoolset-7/enable && \ + ./configure --prefix="/usr" --libdir=/usr/lib/x86_64-linux-gnu && \ + make -j$(nproc) && \ + make install && \ + cd ${WORKDIR} && rm -rf ./* + +# Build CURL +ARG CURL_VER=7.66.0 +ARG CURL_REPO=https://curl.haxx.se/download/curl-${CURL_VER}.tar.xz +RUN wget ${CURL_REPO} && \ + xz -d curl-${CURL_VER}.tar.xz && \ + tar -xvf curl-${CURL_VER}.tar && \ + cd curl-${CURL_VER} && \ + source /opt/rh/devtoolset-7/enable && \ + ./configure --with-darwinssl && \ + make -j$(nproc) && \ + make install && \ + cd ${WORKDIR} && rm -rf ./* + +# Install BOOST +ARG BOOST_REPO=https://sourceforge.net/projects/boost/files/boost/1.63.0/boost_1_63_0.tar.gz +RUN wget -O - ${BOOST_REPO} | tar xz && \ + cd boost_1_63_0 && \ + source /opt/rh/devtoolset-7/enable && \ + ./bootstrap.sh --without-libraries=python && \ + ./b2 -a cxxflags="-D_GLIBCXX_USE_CXX11_ABI=0" -j`nproc` && \ + ./b2 cxxflags="-D_GLIBCXX_USE_CXX11_ABI=0" install && \ + cd ${WORKDIR} && rm -rf ./boost_1_63_0* + +# Install SVT +RUN git clone https://github.com/OpenVisualCloud/SVT-HEVC.git && \ + cd SVT-HEVC && \ + source /opt/rh/devtoolset-7/enable && \ + git checkout ec0d95c7e0d5be20586e1b87150bdfb9ae97cf4d && \ + cd Build/linux/ && \ + ./build.sh && \ + cd Release && \ + make install && \ + cd ${WORKDIR} && rm -rf ./SVT-HEVC + +# Install glog +RUN git clone https://github.com/google/glog.git && \ + cd glog && \ + source /opt/rh/devtoolset-7/enable && \ + sed -i '23s/OFF/ON/' CMakeLists.txt && \ + cmake -H. -Bbuild -G "Unix Makefiles" && \ + cmake --build build && \ + cmake --build build --target install && \ + cd ${WORKDIR} && rm -rf ./glog + +# Install lttng +RUN source /opt/rh/devtoolset-7/enable && \ + wget -c https://lttng.org/files/urcu/userspace-rcu-latest-0.11.tar.bz2 && \ + tar -xjf userspace-rcu-latest-0.11.tar.bz2 && \ + cd userspace-rcu-0.11.* && \ + ./configure && \ + make -j $(nproc) && \ + make install && ldconfig && \ + cd ../ && rm -rf userspace-rcu-0.11.* && \ + wget -c http://lttng.org/files/lttng-ust/lttng-ust-latest-2.11.tar.bz2 && \ + tar -xjf lttng-ust-latest-2.11.tar.bz2 && \ + cd lttng-ust-2.11.* && \ + ./configure --disable-man-pages && \ + make -j $(nproc) && \ + make install && ldconfig && \ + cd ../ && rm -rf lttng-ust-2.11.* + +# Copy source +RUN mkdir -p save +COPY ./Update-buffer-operation-and-fix-stream-loop-coredump.patch ${WORKDIR}/save +COPY ./Disable_cxx11_abi_for_thrift.patch ${WORKDIR}/save + +# Install Thrift +ARG THRIFT_VER=0.12.0 +ARG THRIFT_REPO=http://apache.osuosl.org/thrift/${THRIFT_VER}/thrift-${THRIFT_VER}.tar.gz +RUN wget -O - ${THRIFT_REPO} | tar xz && \ + cd thrift-${THRIFT_VER} && \ + source /opt/rh/devtoolset-7/enable && \ + patch configure ../save/Disable_cxx11_abi_for_thrift.patch && \ + sed -i '21 a # include ' ./lib/cpp/src/thrift/transport/PlatformSocket.h && \ + ./configure --with-boost=/usr/local --with-boost-libdir=/usr/local/lib --with-libevent=/usr --with-java=0 && \ + make -j`nproc` && \ + make install && \ + cd ${WORKDIR} && rm -rf ./thrift-${THRIFT_VER}* + +# Install openHEVC +RUN git clone https://github.com/OpenHEVC/openHEVC.git && \ + cd openHEVC && \ + source /opt/rh/devtoolset-7/enable && \ + git config --global user.email "you@example.com" && \ + git config --global user.name "Your Name" && \ + git checkout ffmpeg_update && \ + git am --whitespace=fix ../save/Update-buffer-operation-and-fix-stream-loop-coredump.patch && \ + ./configure --libdir=/usr/lib64 --disable-sdl2 && \ + make -j `nproc` && \ + make install && \ + cd ${WORKDIR} && rm -rf ./openHEVC + +# Build Nginx +ARG NGINX_VER=1.13.1 +ARG NGINX_REPO=http://nginx.org/download/nginx-${NGINX_VER}.tar.gz +RUN wget -O - ${NGINX_REPO} | tar xz && \ + cd nginx-${NGINX_VER} && \ + source /opt/rh/devtoolset-7/enable && \ + ./configure --with-http_ssl_module && \ + make -j `nproc` && \ + make install && \ + cd ${WORKDIR} && rm -rf ./nginx* +EXPOSE 443 +EXPOSE 8080 + +# Unset proxy if necessary +RUN if [ -n "${http_proxy}" ]; then \ + sed -i '$d' /etc/yum.conf && \ + sed -i '1,$d' /etc/wgetrc && \ + git config --global --unset http.proxy && \ + git config --global --unset https.proxy ; \ + fi diff --git a/src/external/env_curl_withssl.sh b/src/external/env_curl_withssl.sh new file mode 100755 index 00000000..653f8d16 --- /dev/null +++ b/src/external/env_curl_withssl.sh @@ -0,0 +1,219 @@ +#!/usr/bin/env bash +# ==================================================================== +# Sets the cross compile environment for Android +# Based upon OpenSSL's setenv-android.sh (by TH, JW, and SM). +# +# Crypto++ Library is copyrighted as a compilation and (as of version 5.6.2) +# licensed under the Boost Software License 1.0, while the individual files +# in the compilation are all public domain. +# +# See http://www.cryptopp.com/wiki/Android_(Command_Line) for more details +# ==================================================================== + +unset IS_CROSS_COMPILE + +unset IS_IOS +unset IS_ANDROID +unset IS_ARM_EMBEDDED + +unset AOSP_FLAGS +unset AOSP_SYSROOT +unset AOSP_STL_INC +unset AOSP_STL_LIB +unset AOSP_BITS_INC + +# Set AOSP_TOOLCHAIN_SUFFIX to your preference of tools and STL library. +# Note: 4.9 is required for the latest architectures, like ARM64/AARCH64. +# AOSP_TOOLCHAIN_SUFFIX=4.8 +# AOSP_TOOLCHAIN_SUFFIX=4.9 +if [ -z "$AOSP_TOOLCHAIN_SUFFIX" ]; then + AOSP_TOOLCHAIN_SUFFIX=4.9 +fi + +# Set AOSP_API to the API you want to use. 'armeabi' and 'armeabi-v7a' need +# API 3 (or above), 'mips' and 'x86' need API 9 (or above), etc. +# AOSP_API="android-3" # Android 1.5 and above +# AOSP_API="android-4" # Android 1.6 and above +# AOSP_API="android-5" # Android 2.0 and above +# AOSP_API="android-8" # Android 2.2 and above +# AOSP_API="android-9" # Android 2.3 and above +# AOSP_API="android-14" # Android 4.0 and above +# AOSP_API="android-18" # Android 4.3 and above +# AOSP_API="android-19" # Android 4.4 and above +# AOSP_API="android-21" # Android 5.0 and above +# AOSP_API="android-23" # Android 6.0 and above +if [ -z "$AOSP_API" ]; then + AOSP_API="android-21" +fi + +##################################################################### + +# ANDROID_NDK_ROOT should always be set by the user (even when not running this script) +# http://groups.google.com/group/android-ndk/browse_thread/thread/a998e139aca71d77. +# If the user did not specify the NDK location, try and pick it up. We expect something +# like ANDROID_NDK_ROOT=/opt/android-ndk-r10e or ANDROID_NDK_ROOT=/usr/local/android-ndk-r10e. + +export ANDROID_NDK_ROOT="${PWD}/../android-ndk-r14b" + +if [ -z "$ANDROID_NDK_ROOT" ]; then + ANDROID_NDK_ROOT=$(find /opt -maxdepth 1 -type d -name android-ndk-r10* 2>/dev/null | tail -1) + + if [ -z "$ANDROID_NDK_ROOT" ]; then + ANDROID_NDK_ROOT=$(find /usr/local -maxdepth 1 -type d -name android-ndk-r10* 2>/dev/null | tail -1) + fi + if [ -z "$ANDROID_NDK_ROOT" ]; then + ANDROID_NDK_ROOT=$(find $HOME -maxdepth 1 -type d -name android-ndk-r10* 2>/dev/null | tail -1) + fi +fi + +# Error checking +if [ ! -d "$ANDROID_NDK_ROOT/toolchains" ]; then + echo "ERROR: ANDROID_NDK_ROOT is not a valid path. Please set it." + [ "$0" = "$BASH_SOURCE" ] && exit 1 || return 1 +fi + +##################################################################### + +if [ "$#" -lt 1 ]; then + THE_ARCH=armv7 +else + THE_ARCH=$(tr [A-Z] [a-z] <<< "$1") +fi + +# https://developer.android.com/ndk/guides/abis.html +case "$THE_ARCH" in + arm|armv5|armv6|armv7|armeabi) + TOOLCHAIN_BASE="arm-linux-androideabi" + TOOLNAME_BASE="arm-linux-androideabi" + AOSP_ABI="armeabi" + AOSP_ARCH="arch-arm" + AOSP_FLAGS="-march=armv5te -mtune=xscale -mthumb -msoft-float -funwind-tables -fexceptions -frtti" + ;; + armv7a|armeabi-v7a) + TOOLCHAIN_BASE="arm-linux-androideabi" + TOOLNAME_BASE="arm-linux-androideabi" + AOSP_ABI="armeabi-v7a" + AOSP_ARCH="arch-arm" + AOSP_FLAGS="-march=armv7-a -mthumb -mfpu=vfpv3-d16 -mfloat-abi=softfp -Wl,--fix-cortex-a8 -funwind-tables -fexceptions -frtti" + ;; + hard|armv7a-hard|armeabi-v7a-hard) + TOOLCHAIN_BASE="arm-linux-androideabi" + TOOLNAME_BASE="arm-linux-androideabi" + AOSP_ABI="armeabi-v7a" + AOSP_ARCH="arch-arm" + AOSP_FLAGS="-mhard-float -D_NDK_MATH_NO_SOFTFP=1 -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp -Wl,--fix-cortex-a8 -funwind-tables -fexceptions -frtti -Wl,--no-warn-mismatch -Wl,-lm_hard" + ;; + neon|armv7a-neon) + TOOLCHAIN_BASE="arm-linux-androideabi" + TOOLNAME_BASE="arm-linux-androideabi" + AOSP_ABI="armeabi-v7a" + AOSP_ARCH="arch-arm" + AOSP_FLAGS="-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp -Wl,--fix-cortex-a8 -funwind-tables -fexceptions -frtti" + ;; + armv8|armv8a|aarch64|arm64|arm64-v8a) + TOOLCHAIN_BASE="aarch64-linux-android" + TOOLNAME_BASE="aarch64-linux-android" + AOSP_ABI="arm64-v8a" + AOSP_ARCH="arch-arm64" + AOSP_FLAGS="-funwind-tables -fexceptions -frtti" + ;; + mips|mipsel) + TOOLCHAIN_BASE="mipsel-linux-android" + TOOLNAME_BASE="mipsel-linux-android" + AOSP_ABI="mips" + AOSP_ARCH="arch-mips" + AOSP_FLAGS="-funwind-tables -fexceptions -frtti" + ;; + mips64|mipsel64|mips64el) + TOOLCHAIN_BASE="mips64el-linux-android" + TOOLNAME_BASE="mips64el-linux-android" + AOSP_ABI="mips64" + AOSP_ARCH="arch-mips64" + AOSP_FLAGS="-funwind-tables -fexceptions -frtti" + ;; + x86) + TOOLCHAIN_BASE="x86" + TOOLNAME_BASE="i686-linux-android" + AOSP_ABI="x86" + AOSP_ARCH="arch-x86" + AOSP_FLAGS="-march=i686 -mtune=intel -mssse3 -mfpmath=sse -funwind-tables -fexceptions -frtti" + ;; + x86_64|x64) + TOOLCHAIN_BASE="x86_64" + TOOLNAME_BASE="x86_64-linux-android" + AOSP_ABI="x86_64" + AOSP_ARCH="arch-x86_64" + AOSP_FLAGS="-march=x86-64 -msse4.2 -mpopcnt -mtune=intel -funwind-tables -fexceptions -frtti" + ;; + *) + echo "ERROR: Unknown architecture $1" + [ "$0" = "$BASH_SOURCE" ] && exit 1 || return 1 + ;; +esac + + +# Based on ANDROID_NDK_ROOT, try and pick up the path for the tools. We expect something +# like /opt/android-ndk-r10e/toolchains/arm-linux-androideabi-4.7/prebuilt/linux-x86_64/bin +# Once we locate the tools, we add it to the PATH. +AOSP_TOOLCHAIN_PATH="" +for host in "linux-x86_64" "darwin-x86_64" "linux-x86" "darwin-x86" +do + if [ -d "$ANDROID_NDK_ROOT/toolchains/$TOOLCHAIN_BASE-$AOSP_TOOLCHAIN_SUFFIX/prebuilt/$host/bin" ]; then + AOSP_TOOLCHAIN_PATH="$ANDROID_NDK_ROOT/toolchains/$TOOLCHAIN_BASE-$AOSP_TOOLCHAIN_SUFFIX/prebuilt/$host/bin" + break + fi +done + + +##################################################################### + +# Error checking +if [ ! -d "$ANDROID_NDK_ROOT/platforms/$AOSP_API" ]; then + echo "ERROR: AOSP_API is not valid. Does the NDK support the API? Please edit this script." + [ "$0" = "$BASH_SOURCE" ] && exit 1 || return 1 +elif [ ! -d "$ANDROID_NDK_ROOT/platforms/$AOSP_API/$AOSP_ARCH" ]; then + echo "ERROR: AOSP_ARCH is not valid. Does the NDK support the architecture? Please edit this script." + [ "$0" = "$BASH_SOURCE" ] && exit 1 || return 1 +fi + +# Android SYSROOT. It will be used on the command line with --sysroot +# http://android.googlesource.com/platform/ndk/+/ics-mr0/docs/STANDALONE-TOOLCHAIN.html +export AOSP_SYSROOT="$ANDROID_NDK_ROOT/platforms/$AOSP_API/$AOSP_ARCH" + +# TODO: export for the previous GNUmakefile-cross. These can go away eventually. +export ANDROID_SYSROOT=$AOSP_SYSROOT + + +##################################################################### + + +export CPP="$AOSP_TOOLCHAIN_PATH/$TOOLNAME_BASE-cpp --sysroot=$AOSP_SYSROOT" +export CC="$AOSP_TOOLCHAIN_PATH/$TOOLNAME_BASE-gcc --sysroot=$AOSP_SYSROOT" +export CXX="$AOSP_TOOLCHAIN_PATH/$TOOLNAME_BASE-g++ --sysroot=$AOSP_SYSROOT" +export CFLAGS="-pie -fPIE" +export LDFLAGS="-pie -fPIE" +##################################################################### + +export PREFIX=${PWD}/../curl-output/$AOSP_ABI +export CC="$AOSP_TOOLCHAIN_PATH/$TOOLNAME_BASE-gcc --sysroot=$AOSP_SYSROOT" + + +VERBOSE=1 +if [ ! -z "$VERBOSE" ] && [ "$VERBOSE" != "0" ]; then + echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT" + echo "AOSP_TOOLCHAIN_PATH: $AOSP_TOOLCHAIN_PATH" + echo "AOSP_ABI: $AOSP_ABI" + echo "AOSP_API: $AOSP_API" + echo "CC: $CC" + echo "AOSP_SYSROOT: $AOSP_SYSROOT" +fi + +./configure \ + --prefix=$PREFIX \ + --with-darwinssl \ + --with-ssl=${PWD}/../openssl-output/ \ + --enable-static \ + --enable-shared \ + --host=$TOOLNAME_BASE + +[ "$0" = "$BASH_SOURCE" ] && exit 0 || return 0 diff --git a/src/external/env_openssl_arm64.sh b/src/external/env_openssl_arm64.sh new file mode 100644 index 00000000..577374d1 --- /dev/null +++ b/src/external/env_openssl_arm64.sh @@ -0,0 +1,233 @@ +#!/bin/bash +# Cross-compile environment for Android on ARMv7 and x86 +# +# Contents licensed under the terms of the OpenSSL license +# http://www.openssl.org/source/license.html +# +# See http://wiki.openssl.org/index.php/FIPS_Library_and_Android +# and http://wiki.openssl.org/index.php/Android + +##################################################################### + +# Set ANDROID_NDK_ROOT to you NDK location. For example, +# /opt/android-ndk-r8e or /opt/android-ndk-r9. This can be done in a +# login script. If ANDROID_NDK_ROOT is not specified, the script will +# try to pick it up with the value of _ANDROID_NDK_ROOT below. If +# ANDROID_NDK_ROOT is set, then the value is ignored. +# _ANDROID_NDK="android-ndk-r8e" +_ANDROID_NDK="android-ndk-r14b" +# _ANDROID_NDK="android-ndk-r10" + +# Set _ANDROID_EABI to the EABI you want to use. You can find the +# list in $ANDROID_NDK_ROOT/toolchains. This value is always used. +# _ANDROID_EABI="x86-4.6" +# _ANDROID_EABI="arm-linux-androideabi-4.6" +_ANDROID_EABI="aarch64-linux-android-4.9" + +# Set _ANDROID_ARCH to the architecture you are building for. +# This value is always used. +# _ANDROID_ARCH=arch-x86 +_ANDROID_ARCH=arch-arm64 + +# Set _ANDROID_API to the API you want to use. You should set it +# to one of: android-14, android-9, android-8, android-14, android-5 +# android-4, or android-3. You can't set it to the latest (for +# example, API-17) because the NDK does not supply the platform. At +# Android 5.0, there will likely be another platform added (android-22?). +# This value is always used. +# _ANDROID_API="android-14" +_ANDROID_API="android-21" +# _ANDROID_API="android-19" + +##################################################################### + +# If the user did not specify the NDK location, try and pick it up. +# We expect something like ANDROID_NDK_ROOT=/opt/android-ndk-r8e +# or ANDROID_NDK_ROOT=/usr/local/android-ndk-r8e. + +if [ -z "$ANDROID_NDK_ROOT" ]; then + + _ANDROID_NDK_ROOT="${PWD}/../build/external/android/android-ndk-r14b" + if [ -z "$_ANDROID_NDK_ROOT" ] && [ -d "/usr/local/$_ANDROID_NDK" ]; then + _ANDROID_NDK_ROOT="/usr/local/$_ANDROID_NDK" + fi + + if [ -z "$_ANDROID_NDK_ROOT" ] && [ -d "/opt/$_ANDROID_NDK" ]; then + _ANDROID_NDK_ROOT="/opt/$_ANDROID_NDK" + fi + + if [ -z "$_ANDROID_NDK_ROOT" ] && [ -d "$HOME/$_ANDROID_NDK" ]; then + _ANDROID_NDK_ROOT="$HOME/$_ANDROID_NDK" + fi + + if [ -z "$_ANDROID_NDK_ROOT" ] && [ -d "$PWD/$_ANDROID_NDK" ]; then + _ANDROID_NDK_ROOT="$PWD/$_ANDROID_NDK" + fi + + # If a path was set, then export it + if [ ! -z "$_ANDROID_NDK_ROOT" ] && [ -d "$_ANDROID_NDK_ROOT" ]; then + export ANDROID_NDK_ROOT="$_ANDROID_NDK_ROOT" + fi +fi + +# Error checking +# ANDROID_NDK_ROOT should always be set by the user (even when not running this script) +# http://groups.google.com/group/android-ndk/browse_thread/thread/a998e139aca71d77 +if [ -z "$ANDROID_NDK_ROOT" ] || [ ! -d "$ANDROID_NDK_ROOT" ]; then + echo "Error: ANDROID_NDK_ROOT is not a valid path. Please edit this script." + # echo "$ANDROID_NDK_ROOT" + # exit 1 +fi + +# Error checking +if [ ! -d "$ANDROID_NDK_ROOT/toolchains" ]; then + echo "Error: ANDROID_NDK_ROOT/toolchains is not a valid path. Please edit this script." + # echo "$ANDROID_NDK_ROOT/toolchains" + # exit 1 +fi + +# Error checking +if [ ! -d "$ANDROID_NDK_ROOT/toolchains/$_ANDROID_EABI" ]; then + echo "Error: ANDROID_EABI is not a valid path. Please edit this script." + # echo "$ANDROID_NDK_ROOT/toolchains/$_ANDROID_EABI" + # exit 1 +fi + +##################################################################### + +# Based on ANDROID_NDK_ROOT, try and pick up the required toolchain. We expect something like: +# /opt/android-ndk-r83/toolchains/arm-linux-androideabi-4.7/prebuilt/linux-x86_64/bin +# Once we locate the toolchain, we add it to the PATH. Note: this is the 'hard way' of +# doing things according to the NDK documentation for Ice Cream Sandwich. +# https://android.googlesource.com/platform/ndk/+/ics-mr0/docs/STANDALONE-TOOLCHAIN.html + +ANDROID_TOOLCHAIN="" +for host in "linux-x86_64" "linux-x86" "darwin-x86_64" "darwin-x86" +do + if [ -d "$ANDROID_NDK_ROOT/toolchains/$_ANDROID_EABI/prebuilt/$host/bin" ]; then + ANDROID_TOOLCHAIN="$ANDROID_NDK_ROOT/toolchains/$_ANDROID_EABI/prebuilt/$host/bin" + break + fi +done + +# Error checking +if [ -z "$ANDROID_TOOLCHAIN" ] || [ ! -d "$ANDROID_TOOLCHAIN" ]; then + echo "Error: ANDROID_TOOLCHAIN is not valid. Please edit this script." + # echo "$ANDROID_TOOLCHAIN" + # exit 1 +fi + +case $_ANDROID_ARCH in + arch-arm64) + ANDROID_TOOLS="aarch64-linux-android-gcc aarch64-linux-android-ranlib aarch64-linux-android-ld" + ;; + arch-x86) + ANDROID_TOOLS="i686-linux-android-gcc i686-linux-android-ranlib i686-linux-android-ld" + ;; + *) + echo "ERROR ERROR ERROR" + ;; +esac + +for tool in $ANDROID_TOOLS +do + # Error checking + if [ ! -e "$ANDROID_TOOLCHAIN/$tool" ]; then + echo "Error: Failed to find $tool. Please edit this script." + # echo "$ANDROID_TOOLCHAIN/$tool" + # exit 1 + fi +done + +# Only modify/export PATH if ANDROID_TOOLCHAIN good +if [ ! -z "$ANDROID_TOOLCHAIN" ]; then + export ANDROID_TOOLCHAIN="$ANDROID_TOOLCHAIN" + export PATH="$ANDROID_TOOLCHAIN":"$PATH" +fi + +##################################################################### + +# For the Android SYSROOT. Can be used on the command line with --sysroot +# https://android.googlesource.com/platform/ndk/+/ics-mr0/docs/STANDALONE-TOOLCHAIN.html +export ANDROID_SYSROOT="$ANDROID_NDK_ROOT/platforms/$_ANDROID_API/$_ANDROID_ARCH" +export CROSS_SYSROOT="$ANDROID_SYSROOT" +export NDK_SYSROOT="$ANDROID_SYSROOT" + +# Error checking +if [ -z "$ANDROID_SYSROOT" ] || [ ! -d "$ANDROID_SYSROOT" ]; then + echo "Error: ANDROID_SYSROOT is not valid. Please edit this script." + # echo "$ANDROID_SYSROOT" + # exit 1 +fi + +##################################################################### + +# If the user did not specify the FIPS_SIG location, try and pick it up +# If the user specified a bad location, then try and pick it up too. +if [ -z "$FIPS_SIG" ] || [ ! -e "$FIPS_SIG" ]; then + + # Try and locate it + _FIPS_SIG="" + if [ -d "/usr/local/ssl/$_ANDROID_API" ]; then + _FIPS_SIG=`find "/usr/local/ssl/$_ANDROID_API" -name incore` + fi + + if [ ! -e "$_FIPS_SIG" ]; then + _FIPS_SIG=`find $PWD -name incore` + fi + + # If a path was set, then export it + if [ ! -z "$_FIPS_SIG" ] && [ -e "$_FIPS_SIG" ]; then + export FIPS_SIG="$_FIPS_SIG" + fi +fi + +# Error checking. Its OK to ignore this if you are *not* building for FIPS +if [ -z "$FIPS_SIG" ] || [ ! -e "$FIPS_SIG" ]; then + echo "Error: FIPS_SIG does not specify incore module. Please edit this script." + # echo "$FIPS_SIG" + # exit 1 +fi + +##################################################################### + +# Most of these should be OK (MACHINE, SYSTEM, ARCH). RELEASE is ignored. +export MACHINE=arm64-v8a +export RELEASE=2.6.37 +export SYSTEM=android +export ARCH=arm64 +export CROSS_COMPILE="aarch64-linux-android-" + +if [ "$_ANDROID_ARCH" == "arch-x86" ]; then + export MACHINE=i686 + export RELEASE=2.6.37 + export SYSTEM=android + export ARCH=x86 + export CROSS_COMPILE="i686-linux-android-" +fi + +# For the Android toolchain +# https://android.googlesource.com/platform/ndk/+/ics-mr0/docs/STANDALONE-TOOLCHAIN.html +export ANDROID_SYSROOT="$ANDROID_NDK_ROOT/platforms/$_ANDROID_API/$_ANDROID_ARCH" +export SYSROOT="$ANDROID_SYSROOT" +export NDK_SYSROOT="$ANDROID_SYSROOT" +export ANDROID_NDK_SYSROOT="$ANDROID_SYSROOT" +export ANDROID_API="$_ANDROID_API" + +# CROSS_COMPILE and ANDROID_DEV are DFW (Don't Fiddle With). Its used by OpenSSL build system. +# export CROSS_COMPILE="arm-linux-androideabi-" +export ANDROID_DEV="$ANDROID_NDK_ROOT/platforms/$_ANDROID_API/$_ANDROID_ARCH/usr" +export HOSTCC=gcc + +VERBOSE=1 +if [ ! -z "$VERBOSE" ] && [ "$VERBOSE" != "0" ]; then + echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT" + echo "ANDROID_ARCH: $_ANDROID_ARCH" + echo "ANDROID_EABI: $_ANDROID_EABI" + echo "ANDROID_API: $ANDROID_API" + echo "ANDROID_SYSROOT: $ANDROID_SYSROOT" + echo "ANDROID_TOOLCHAIN: $ANDROID_TOOLCHAIN" + echo "FIPS_SIG: $FIPS_SIG" + echo "CROSS_COMPILE: $CROSS_COMPILE" + echo "ANDROID_DEV: $ANDROID_DEV" +fi diff --git a/src/external/ffmpeg_update_add_circle_list_for_to_free_frame.patch b/src/external/ffmpeg_update_add_circle_list_for_to_free_frame.patch deleted file mode 100644 index 3f5cb1a7..00000000 --- a/src/external/ffmpeg_update_add_circle_list_for_to_free_frame.patch +++ /dev/null @@ -1,110 +0,0 @@ -diff --git a/libavcodec/decode.c b/libavcodec/decode.c -index 052f93d..02d6725 100644 ---- a/libavcodec/decode.c -+++ b/libavcodec/decode.c -@@ -139,6 +139,8 @@ static int unrefcount_frame(AVCodecInternal *avci, AVFrame *frame) - int ret; - - /* move the original frame to our backup */ -+ avci->to_free = avci->to_free_list[avci->to_free_idx]; -+ avci->to_free_idx = (avci->to_free_idx + 1) % LIST_SIZE; - av_frame_unref(avci->to_free); - av_frame_move_ref(avci->to_free, frame); - -@@ -1825,7 +1827,14 @@ void avcodec_flush_buffers(AVCodecContext *avctx) - ff_decode_bsfs_uninit(avctx); - - if (!avctx->refcounted_frames) -- av_frame_unref(avctx->internal->to_free); -+ { -+ //av_frame_unref(avctx->internal->to_free); -+ for(int i = 0; i < LIST_SIZE; i++) -+ { -+ av_frame_free(&avctx->internal->to_free_list[i]); -+ } -+ av_freep(&avctx->internal->to_free_list); -+ } - } - - void ff_decode_bsfs_uninit(AVCodecContext *avctx) -diff --git a/libavcodec/internal.h b/libavcodec/internal.h -index d344277..ce227cb 100644 ---- a/libavcodec/internal.h -+++ b/libavcodec/internal.h -@@ -33,6 +33,8 @@ - #include "avcodec.h" - #include "config.h" - -+#define LIST_SIZE 50 -+ - /** - * The codec does not modify any global variables in the init function, - * allowing to call the init function without locking any global mutexes. -@@ -148,6 +150,8 @@ typedef struct AVCodecInternal { - int last_audio_frame; - - AVFrame *to_free; -+ AVFrame *to_free_list[LIST_SIZE]; -+ int to_free_idx; - - FramePool *pool; - -diff --git a/libavcodec/utils.c b/libavcodec/utils.c -index 1336e92..41098b5 100644 ---- a/libavcodec/utils.c -+++ b/libavcodec/utils.c -@@ -666,12 +666,23 @@ int attribute_align_arg avcodec_open2(AVCodecContext *avctx, const AVCodec *code - ret = AVERROR(ENOMEM); - goto free_and_end; - } -- -+#if 0 - avctx->internal->to_free = av_frame_alloc(); - if (!avctx->internal->to_free) { - ret = AVERROR(ENOMEM); - goto free_and_end; - } -+#endif -+ avctx->internal->to_free_idx = 0; -+ for(int i = 0; i < LIST_SIZE; i++) -+ { -+ avctx->internal->to_free_list[i] = av_frame_alloc(); -+ if (!avctx->internal->to_free_list[i]) -+ { -+ ret = AVERROR(ENOMEM); -+ goto free_and_end; -+ } -+ } - - avctx->internal->compat_decode_frame = av_frame_alloc(); - if (!avctx->internal->compat_decode_frame) { -@@ -1126,7 +1137,13 @@ FF_ENABLE_DEPRECATION_WARNINGS - av_dict_free(&tmp); - av_freep(&avctx->priv_data); - if (avctx->internal) { -- av_frame_free(&avctx->internal->to_free); -+ //av_frame_free(&avctx->internal->to_free); -+ for(int i = 0; i < LIST_SIZE; i++) -+ { -+ av_frame_free(&avctx->internal->to_free_list[i]); -+ } -+ av_freep(&avctx->internal->to_free_list); -+ - av_frame_free(&avctx->internal->compat_decode_frame); - av_frame_free(&avctx->internal->buffer_frame); - av_packet_free(&avctx->internal->buffer_pkt); -@@ -1179,7 +1196,13 @@ av_cold int avcodec_close(AVCodecContext *avctx) - avctx->codec->close(avctx); - avctx->internal->byte_buffer_size = 0; - av_freep(&avctx->internal->byte_buffer); -- av_frame_free(&avctx->internal->to_free); -+ //av_frame_free(&avctx->internal->to_free); -+ for(int i = 0; i < LIST_SIZE; i++) -+ { -+ av_frame_free(&avctx->internal->to_free_list[i]); -+ } -+ av_freep(&avctx->internal->to_free_list); -+ - av_frame_free(&avctx->internal->compat_decode_frame); - av_frame_free(&avctx->internal->buffer_frame); - av_packet_free(&avctx->internal->buffer_pkt); diff --git a/src/external/fpm.sh b/src/external/fpm.sh index 19a5740f..fbc27050 100755 --- a/src/external/fpm.sh +++ b/src/external/fpm.sh @@ -1,68 +1,112 @@ -#!/bin/bash -x +#!/bin/bash -ex ORIPATH=$(pwd) -VERSION=$1 +ITEM=$1 +VERSION=$2 PACKAGE=package -LIBDIR=${ORIPATH}/../../OMAF-Sample/client/${PACKAGE}/files/usr/lib64/immersive-client/ -BINDIR=${ORIPATH}/../../OMAF-Sample/client/${PACKAGE}/files/usr/bin/immersive-client/ +LIBDIR=${ORIPATH}/../build/${PACKAGE}/${ITEM}/usr/lib64/immersive-${ITEM}/ +BINDIR=${ORIPATH}/../build/${PACKAGE}/${ITEM}/usr/bin/immersive-${ITEM}/ +NAME=$(echo "immersive-${item}") parameters_usage(){ - echo 'Usage: 1. : Version of current package.' -} - -program_exists() { - local RET='0' - command -v $1 >/dev/null 2>&1 || { local RET='1'; } - # fail on non-zero return value - if [ ${RET} -ne 0 ]; then - return 1 - fi - - return 0 + echo 'Usage: 1. : [ server, client ]' + echo ' 2. : Version of current package.' } package(){ - echo 'sudo ldconfig' > post + if [ ${ITEM} = "server" ] ; then + echo 'sudo cp /usr/lib64/immersive-server/libHighResPlusFullLowResPacking.so /usr/local/lib' > post + echo 'sudo cp /usr/lib64/immersive-server/libHevcVideoStreamProcess.so /usr/local/lib' > post + #echo 'sudo cp /usr/lib64/immersive-server/libSingleVideoPacking.so /usr/local/lib' >> post + echo 'sudo ldconfig && sudo cp /usr/bin/immersive-server/WorkerServer /root' >> post + elif [ ${ITEM} = "client" ] ; then + echo 'sudo ldconfig' > post + fi fpm \ -f \ -s dir \ -t $1 \ - -n immersive-client \ - -v ${VERSION} \ + -n immersive-$2$3 \ + -v 1${VERSION} \ --iteration 1.el7 \ - -C ${PACKAGE}/files \ + -C ${PACKAGE}/$2 \ -p ${PACKAGE} \ --after-install post rm -rf ./post } -if [ "${VERSION}" = "-h" ] || [ $# != 1 ] ; then +if [ "${ITEM}" = "-h" ] || [ $# != 2 ] ; then parameters_usage exit 0 fi - -program_exists fpm -if [ $? != 0 ];then - sudo apt-get -y install ruby rubygems ruby-dev - sudo gem install fpm +if [ "${ITEM}" != "server" ] && [ "${ITEM}" != "client" ] ; then + parameters_usage + exit 0 fi mkdir -p ${LIBDIR} mkdir -p ${BINDIR} -git log | head -n 3 > git_info -cd ../build -cp external/MediaServerStudioEssentialsKBL2019R1HF1_10010/intel-linux-media-kbl-10010/opt/intel/mediasdk/lib64/libva-drm.so.2 ${LIBDIR} -cp external/MediaServerStudioEssentialsKBL2019R1HF1_10010/intel-linux-media-kbl-10010/opt/intel/mediasdk/lib64/libva-x11.so.2 ${LIBDIR} -cp external/MediaServerStudioEssentialsKBL2019R1HF1_10010/intel-linux-media-kbl-10010/opt/intel/mediasdk/lib64/libva.so.2 ${LIBDIR} -cp external/glog/.libs/libglog.so.0 ${LIBDIR} -cp client/360SCVP/lib360SCVP.so ${LIBDIR} -cp client/OmafDashAccess/libOmafDashAccess.so ${LIBDIR} -cp client/player/render ${BINDIR} -cp ../player/config.xml ${BINDIR} -mv ../external/git_info ${BINDIR} -cd ../../OMAF-Sample/client -strip ${LIBDIR}/* -strip ${BINDIR}/render -package rpm -package deb +if [ ${ITEM} = "server" ] ; then + echo `fgrep -rn "checkout" install_SVT.sh` | awk '{ print $3 }' > SVT_version + git log | head -n 3 > git_info + cd ../build + cp external/ffmpeg_server_so/libavcodec.so.58 ${LIBDIR} + cp external/ffmpeg_server_so/libavutil.so.56 ${LIBDIR} + cp external/ffmpeg_server_so/libavformat.so.58 ${LIBDIR} + cp external/ffmpeg_server_so/libavfilter.so.7 ${LIBDIR} + cp external/ffmpeg_server_so/libswscale.so.5 ${LIBDIR} + cp external/ffmpeg_server_so/libswresample.so.3 ${LIBDIR} + cp external/ffmpeg_server_so/libpostproc.so.55 ${LIBDIR} + cp /usr/local/lib/libHighResPlusFullLowResPacking.so ${LIBDIR} + cp /usr/local/lib/libHevcVideoStreamProcess.so ${LIBDIR} + #cp /usr/local/lib/libSingleVideoPacking.so ${LIBDIR} + cp /usr/local/lib/libglog.so.0 ${LIBDIR} + cp /usr/local/lib/libsafestring_shared.so ${LIBDIR} + cp /usr/local/lib/libthrift-0.12.0.so ${LIBDIR} + cp /usr/local/lib/libthriftnb-0.12.0.so ${LIBDIR} + cp /usr/local/lib64/libSvtHevcEnc.so.1 ${LIBDIR} + cp /usr/lib64/libopenhevc.so.1 ${LIBDIR} + cp /usr/lib64/libSDL2-2.0.so.0 ${LIBDIR} + cp server/360SCVP/lib360SCVP.so ${LIBDIR} + cp server/VROmafPacking/libVROmafPacking.so ${LIBDIR} + cp ../ffmpeg/dependency/libEncoder.so ${LIBDIR} + cp ../ffmpeg/dependency/libDistributedEncoder.so ${LIBDIR} + cp ../ffmpeg/dependency/WorkerServer ${BINDIR} + cp server/ffmpeg/ffmpeg ${BINDIR} + mv ../external/SVT_version ${BINDIR} + mv ../external/git_info ${BINDIR} + strip ${LIBDIR}/* + strip ${BINDIR}/WorkerServer ${BINDIR}/ffmpeg + package rpm ${ITEM} + package deb ${ITEM} +fi + +if [ ${ITEM} = "client" ] ; then + git log | head -n 3 > git_info + cd ../build + cp external/ffmpeg_client_so/libavcodec.so.58 ${LIBDIR} + cp external/ffmpeg_client_so/libavutil.so.56 ${LIBDIR} + cp external/ffmpeg_client_so/libavformat.so.58 ${LIBDIR} + cp external/ffmpeg_client_so/libavfilter.so.7 ${LIBDIR} + cp external/ffmpeg_client_so/libavdevice.so.58 ${LIBDIR} + cp external/ffmpeg_client_so/libswscale.so.5 ${LIBDIR} + cp external/ffmpeg_client_so/libswresample.so.3 ${LIBDIR} + cp external/ffmpeg_server_so/libpostproc.so.55 ${LIBDIR} + cp /usr/local/lib/libva-drm.so.2 ${LIBDIR} + cp /usr/local/lib/libva-x11.so.2 ${LIBDIR} + cp /usr/local/lib/libva.so.2 ${LIBDIR} + cp /usr/local/lib/libglog.so.0 ${LIBDIR} + cp /usr/local/lib/libsafestring_shared.so ${LIBDIR} + cp /usr/lib64/libSDL2-2.0.so.0 ${LIBDIR} + cp client/360SCVP/lib360SCVP.so ${LIBDIR} + cp client/OmafDashAccess/libOmafDashAccess.so ${LIBDIR} + cp client/player/render ${BINDIR} + cp ../player/config.xml ${BINDIR} + mv ../external/git_info ${BINDIR} + strip ${LIBDIR}/* + strip ${BINDIR}/render + package rpm ${ITEM} + package deb ${ITEM} +fi + diff --git a/src/external/generate_DoxygenFiles.sh b/src/external/generate_DoxygenFiles.sh index edccd562..926232c5 100755 --- a/src/external/generate_DoxygenFiles.sh +++ b/src/external/generate_DoxygenFiles.sh @@ -10,10 +10,6 @@ cd ../OmafDashAccess doxygen Doxyfile cd - -cd ../distributed_encoder -doxygen Doxyfile -cd - - cd ../VROmafPacking doxygen Doxyfile cd - diff --git a/src/external/install_FFmpeg.sh b/src/external/install_FFmpeg.sh index 0cb10300..7712aa22 100755 --- a/src/external/install_FFmpeg.sh +++ b/src/external/install_FFmpeg.sh @@ -1,32 +1,39 @@ #!/bin/bash -x TARGET=$1 -ORIGIN_PATH=${PWD} +REPO=$2 + cd .. -if [ ! -d "./FFmpeg" ];then - git clone https://github.com/FFmpeg/FFmpeg.git +if [ "${REPO}" != "oss" ] ; then + if [ ! -d "./FFmpeg" ] ; then + if [ ! -f "./ffmpeg-4.3.1.tar.xz" ] ; then + wget http://ffmpeg.org/releases/ffmpeg-4.3.1.tar.xz + fi + tar xf ffmpeg-4.3.1.tar.xz && mv ffmpeg-4.3.1 FFmpeg + fi + if [ ! -f "FFmpeg/libavcodec/distributed_encoder.c" ] ; then + cd FFmpeg + patch -p1 < ../ffmpeg/patches/FFmpeg_OMAF.patch + cd .. + fi fi -cd FFmpeg - -if [ ${TARGET} == "server" ] ; then - git checkout release/4.1 - git checkout c2ac3b8e6a040e33d53fa13548848c8ba981a8e4 - cd - - patch -p1 < external/FFmpeg_OMAF.patch +if [ "${TARGET}" == "server" ] ; then - cd build/external/ffmpeg - # export CXXFLAGS="$CXXFLAGS -fPIC" - ../../../FFmpeg/configure --prefix=/usr --libdir=/usr/local/lib --enable-static --enable-shared --enable-gpl --enable-nonfree --disable-optimizations --disable-vaapi - make -j `nproc` + mkdir -p build/external/ffmpeg_server + cd build/external/ffmpeg_server + ../../../FFmpeg/configure --prefix=/usr --libdir=/usr/local/lib \ + --enable-static --enable-shared --enable-gpl --enable-nonfree \ + --disable-optimizations --disable-vaapi + make -j $(nproc) sudo make install -elif [ ${TARGET} == "client" ] ; then +elif [ "${TARGET}" == "client" ] ; then - patch -p1 < ../external/0001-Add-avcodec_receive_frame2-for-vaapi-hardware-decodi.patch - ./configure - make -j `nproc` + mkdir -p build/external/ffmpeg_client + cd build/external/ffmpeg_client + ../../../FFmpeg/configure --enable-shared + make -j $(nproc) sudo make install - -fi +fi diff --git a/src/external/install_SVT.sh b/src/external/install_SVT.sh index ec2bbf51..58553506 100755 --- a/src/external/install_SVT.sh +++ b/src/external/install_SVT.sh @@ -1,4 +1,6 @@ #!/bin/sh -e + +mkdir -p ../build/external cd ../build/external if [ ! -d "./SVT-HEVC" ] ; then @@ -12,5 +14,3 @@ cd Build/linux/ ./build.sh cd Release sudo make install -cd ../../../../../../external - diff --git a/src/external/install_glog.sh b/src/external/install_glog.sh new file mode 100755 index 00000000..7d47a405 --- /dev/null +++ b/src/external/install_glog.sh @@ -0,0 +1,14 @@ +#!/bin/bash -e + +mkdir -p ../build/external +cd ../build/external +if [ ! -d "./glog" ] ; then + git clone https://github.com/google/glog.git +fi + +cd glog +sed -i '23s/OFF/ON/' CMakeLists.txt +cmake -H. -Bbuild -G "Unix Makefiles" +cmake --build build +cmake --build build --target test +sudo cmake --build build --target install diff --git a/src/external/install_lttng.sh b/src/external/install_lttng.sh new file mode 100755 index 00000000..d7ae864e --- /dev/null +++ b/src/external/install_lttng.sh @@ -0,0 +1,82 @@ +#!/bin/bash -ex + +export LD_LIBRARY_PATH=/usr/local/lib/:/usr/local/lib64/:/usr/lib64:$LD_LIBRARY_PATH + +mkdir -p ../build/external/lttng +cd ../build/external/lttng +OS=$(awk -F= '/^NAME/{print $2}' /etc/os-release) + +# Install liburcu library +if [ ! -f "./userspace-rcu-latest-0.11.tar.bz2" ];then + wget -c https://lttng.org/files/urcu/userspace-rcu-latest-0.11.tar.bz2 + tar -xjf userspace-rcu-latest-0.11.tar.bz2 +fi +cd userspace-rcu-0.11.* +./configure +make -j $(nproc) +sudo make install +sudo ldconfig +cd ../ + +# Install uuid, popt and other dependencies +if [ "${OS}" == \""Ubuntu"\" ];then + echo "Ubuntu OS" + sudo apt-get install uuid-dev -y + sudo apt-get install libpopt-dev -y + sudo apt-get install libxml2-dev -y + sudo apt-get install libdw-dev -y +elif [ "${OS}" == \""CentOS Linux"\" ];then + echo "CentOS OS" + sudo yum install uuid.x86_64 -y + sudo yum install libuuid -y + sudo yum install libuuid-devel -y + sudo yum install uuid-devel.x86_64 -y + sudo yum install popt-devel.x86_64 -y + sudo yum install glib2-devel -y + sudo yum install elfutils-devel -y +fi + +# Install numactl +if [ "${OS}" == \""Ubuntu"\" ];then + sudo apt-get install numactl -y + sudo apt-get install libnuma-dev -y +elif [ "${OS}" == \""CentOS Linux"\" ];then + sudo yum install numactl.x86_64 -y + sudo yum install numactl-devel.x86_64 -y + sudo yum install numactl-libs.x86_64 -y +fi + +# Install lttng-ust +if [ ! -f "./lttng-ust-latest-2.11.tar.bz2" ];then + wget -c http://lttng.org/files/lttng-ust/lttng-ust-latest-2.11.tar.bz2 + tar -xjf lttng-ust-latest-2.11.tar.bz2 +fi +cd lttng-ust-2.11.* +./configure --disable-man-pages +make -j $(nproc) +sudo make install +sudo ldconfig +cd ../ + +# Install lttng-tools +if [ ! -f "./lttng-tools-latest-2.11.tar.bz2" ];then + wget -c http://lttng.org/files/lttng-tools/lttng-tools-latest-2.11.tar.bz2 + tar -xjf lttng-tools-latest-2.11.tar.bz2 +fi +cd lttng-tools-2.11.* +./configure +make -j $(nproc) +sudo make install +sudo ldconfig +cd ../ + +# Install babeltrace2 +if [ ! -f "./babeltrace-2.0.0.tar.bz2" ];then + wget -c https://www.efficios.com/files/babeltrace/babeltrace-2.0.0.tar.bz2 + tar -xjf babeltrace-2.0.0.tar.bz2 +fi +cd babeltrace-2.0.0 +./configure +make -j $(nproc) +sudo make install +cd ../ diff --git a/src/external/install_openHEVC.sh b/src/external/install_openHEVC.sh index fe95ea69..2552aee4 100755 --- a/src/external/install_openHEVC.sh +++ b/src/external/install_openHEVC.sh @@ -1,4 +1,6 @@ #!/bin/sh -e + +mkdir -p ../build/external cd ../build/external if [ ! -d "./openHEVC" ] ; then @@ -7,8 +9,7 @@ fi cd openHEVC git checkout ffmpeg_update -patch -p1 < ../../../external/ffmpeg_update_add_circle_list_for_to_free_frame.patch +git am --whitespace=fix ../../../external/Update-buffer-operation-and-fix-stream-loop-coredump.patch ./configure --libdir=/usr/lib64 --disable-sdl2 make -j `nproc` sudo make install -cd ../../../ diff --git a/src/external/install_safestringlib.sh b/src/external/install_safestringlib.sh new file mode 100755 index 00000000..7ff98649 --- /dev/null +++ b/src/external/install_safestringlib.sh @@ -0,0 +1,14 @@ +#!/bin/bash -e + +mkdir -p ../build/external +cd ../build/external +if [ ! -d "./safestringlib" ] ; then + git clone https://github.com/intel/safestringlib.git +fi + +cd safestringlib +cmake . +make -j $(nproc) -f Makefile +sudo cp libsafestring_shared.so /usr/local/lib/ +sudo mkdir -p /usr/local/include/safestringlib/ +sudo cp ./include/* /usr/local/include/safestringlib/ diff --git a/src/external/install_thrift.sh b/src/external/install_thrift.sh index 38462f67..f69f4e43 100755 --- a/src/external/install_thrift.sh +++ b/src/external/install_thrift.sh @@ -1,8 +1,13 @@ #!/bin/sh -e +OS=$(awk -F= '/^NAME/{print $2}' /etc/os-release) +mkdir -p ../build/external cd ../build/external -sudo yum install libevent-devel.x86_64 -y -sudo yum install openssl-devel -y + +if [ "${OS}" == \""CentOS Linux"\" ];then + sudo yum install libevent-devel.x86_64 -y + sudo yum install openssl-devel -y +fi # boost if [ ! "${BOOST_VERSION}" == \""1_63"\" ];then @@ -12,7 +17,7 @@ if [ ! "${BOOST_VERSION}" == \""1_63"\" ];then tar zxf boost_1_63_0.tar.gz cd boost_1_63_0 ./bootstrap.sh --without-libraries=python - ./b2 -a cxxflags="-D_GLIBCXX_USE_CXX11_ABI=0" -j8 + ./b2 -a cxxflags="-D_GLIBCXX_USE_CXX11_ABI=0" -j $(nproc) sudo ./b2 cxxflags="-D_GLIBCXX_USE_CXX11_ABI=0" install cd .. fi @@ -27,13 +32,7 @@ if [ ! "${THRIFT_VERSION}" == "0.12.0" ];then patch configure ../../../external/Disable_cxx11_abi_for_thrift.patch sed -i '21 a # include ' ./lib/cpp/src/thrift/transport/PlatformSocket.h ./configure --with-boost=/usr/local --with-boost-libdir=/usr/local/lib --with-libevent=/usr --with-java=0 - make -j8 + make -j $(nproc) sudo make install cd .. fi - -# generate gen-cpp files -cd ../../distributed_encoder/util -thrift -r --gen cpp shared.thrift -patch gen-cpp/shared_types.h Implement_operator_RegionInformation.patch -cd - diff --git a/src/external/make_android.sh b/src/external/make_android.sh new file mode 100755 index 00000000..dc2c1dc5 --- /dev/null +++ b/src/external/make_android.sh @@ -0,0 +1,43 @@ +#!/bin/bash -x +NDK_r18b_PATH="../../external/android/android-ndk-r18b" +echo "Start build android ndk for client libraries ..." +mkdir -p ../build/android/360SCVP +mkdir -p ../build/android/isolib +mkdir -p ../build/android/OmafDashAccess +mkdir -p ../build/android/player/player_lib + +# Install 360SCVP +cd ../build/android/360SCVP && \ +cmake ../../../360SCVP -DUSE_ANDROID_NDK=ON -DDEBUG=NO -DCMAKE_TOOLCHAIN_FILE=${NDK_r18b_PATH}/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_TOOLCHAIN=aarch64-linux-android-4.9 -DANDROID_PLATFORM=android-21 -DANDROID_STD=c++_shared && \ +make -j && \ +sudo make install + +# Install isolib +cd ../isolib && \ +cmake ../../../isolib -DUSE_ANDROID_NDK=ON -DDEBUG=NO -DCMAKE_TOOLCHAIN_FILE=${NDK_r18b_PATH}/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_TOOLCHAIN=aarch64-linux-android-4.9 -DANDROID_PLATFORM=android-21 -DANDROID_STD=c++_static && \ +make -j && \ +sudo cp dash_parser/libdashparser.a /usr/local/lib/ + +# Install OmafDashAccess +cd ../OmafDashAccess && \ +cmake ../../../OmafDashAccess -DUSE_ANDROID_NDK=ON -DDEBUG=NO -DCMAKE_TOOLCHAIN_FILE=${NDK_r18b_PATH}/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_TOOLCHAIN=aarch64-linux-android-4.9 -DANDROID_PLATFORM=android-21 -DANDROID_STD=c++_shared && \ +make -j && \ +sudo make install + +cd ../player/player_lib && \ +cmake ../../../../player/player_lib -DUSE_OMAF=ON -DANDROID_OS=ON -DDEBUG=NO -DCMAKE_TOOLCHAIN_FILE=../${NDK_r18b_PATH}/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_TOOLCHAIN=aarch64-linux-android-4.9 -DANDROID_PLATFORM=android-21 -DANDROID_STD=c++_shared && \ +make -j && \ +sudo make install + +cd ../../../../ + +mkdir -p ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/libcurl.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/libsafestring_shared.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp ./build/external/android/openssl-output/lib/libssl.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/libglog.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp ./build/external/android/openssl-output/lib/libcrypto.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/lib360SCVP.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/libOmafDashAccess.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/libdashparser.a ./player/app/android/app/src/main/jniLibs/arm64-v8a/ +sudo cp /usr/local/lib/libMediaPlayer.so ./player/app/android/app/src/main/jniLibs/arm64-v8a/ \ No newline at end of file diff --git a/src/external/nokia_omaf_patch_for_extrator_reader.diff b/src/external/nokia_omaf_patch_for_extrator_reader.diff deleted file mode 100644 index c63c0ce8..00000000 --- a/src/external/nokia_omaf_patch_for_extrator_reader.diff +++ /dev/null @@ -1,508 +0,0 @@ -diff --git a/Mp4/srcs/CMakeLists.txt b/Mp4/srcs/CMakeLists.txt -index d277175..050fcc0 100644 ---- a/Mp4/srcs/CMakeLists.txt -+++ b/Mp4/srcs/CMakeLists.txt -@@ -60,6 +60,7 @@ elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Cl - # -Wstrict-overflow=5 - # -Wswitch-default - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden") -+ SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0") - - if(NOT NO_DEBUG_STL) - SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_GLIBCXX_DEBUG") -diff --git a/Mp4/srcs/api/reader/mp4vrfilereaderinterface.h b/Mp4/srcs/api/reader/mp4vrfilereaderinterface.h -index eadee2d..5484fb8 100644 ---- a/Mp4/srcs/api/reader/mp4vrfilereaderinterface.h -+++ b/Mp4/srcs/api/reader/mp4vrfilereaderinterface.h -@@ -221,7 +221,13 @@ namespace MP4VR - * @param [out] trackSampleBoxType Media track sample description entry type (box type) is returned. - * @return ErrorCode: NO_ERROR, INVALID_CONTEXT_ID or UNINITIALIZED */ - virtual int32_t getTrackSampleType(uint32_t trackId, uint32_t sampleId, FourCC& trackSampleBoxType) const = 0; -+ - -+ virtual int32_t getExtractorTrackSampleData(uint32_t trackId, -+ uint32_t itemIdApi, -+ char* memoryBuffer, -+ uint32_t& memoryBufferSize, -+ bool videoByteStreamHeaders = true) = 0; - /** Get track sample data for given {trackId, sampleId} pair. - * Sample Data does not contain initialization or configuration data (i.e. decoder configuration records), - * it is pure sample data - except for samples where bytestream headers are inserted and 'hvc1'/'avc1' type s. -diff --git a/Mp4/srcs/reader/mp4vrfiledatatypesinternal.hpp b/Mp4/srcs/reader/mp4vrfiledatatypesinternal.hpp -index ff04584..42b8941 100644 ---- a/Mp4/srcs/reader/mp4vrfiledatatypesinternal.hpp -+++ b/Mp4/srcs/reader/mp4vrfiledatatypesinternal.hpp -@@ -538,6 +538,8 @@ namespace MP4VR - SequenceToSegmentMap sequenceToSegment; - - SegmentIndex segmentIndex; -+ -+ ContextId correspondTrackId; ///< index of the track corrresponding to the init segment - }; - } // namespace MP4VR - -diff --git a/Mp4/srcs/reader/mp4vrfilereaderaccessors.cpp b/Mp4/srcs/reader/mp4vrfilereaderaccessors.cpp -index 3719e9a..cedb30c 100644 ---- a/Mp4/srcs/reader/mp4vrfilereaderaccessors.cpp -+++ b/Mp4/srcs/reader/mp4vrfilereaderaccessors.cpp -@@ -860,6 +860,46 @@ namespace MP4VR - return segmentIdOf(id.first, id.second, segmentId); - } - -+ int32_t MP4VRFileReaderImpl::getSampleDataInfo(uint32_t trackId, -+ uint32_t itemIdApi, -+ const InitSegmentId& initSegmentId, -+ uint64_t& refSampleLength, -+ uint64_t& refDataOffset) -+ { -+ //auto trackContextId = ofTrackId(trackId).second; -+ auto trackContextId = ContextId(trackId); -+ if (!(mInitSegmentPropertiesMap[initSegmentId].trackProperties[trackContextId].referenceTrackIds["scal"].empty())) -+ { -+ return ErrorCode::INVALID_PROPERTY_INDEX; -+ } -+ //auto refTrackContextId = -+ //mInitSegmentPropertiesMap[initSegmentId].trackProperties[trackContextId].referenceTrackIds["scal"].at( -+ //trackReference); -+ -+ // Create a new pair: init segment from the extractor but track context id from the referred media track where -+ // data is extracted from. -+ //InitSegmentTrackId refInitSegTrackId = std::make_pair(initSegmentId, refTrackContextId); -+ InitSegmentTrackId neededInitSegTrackId = std::make_pair(initSegmentId, trackContextId); -+ -+ //SegmentId refSegmentId; -+ SegmentId segmentId; -+ //int32_t result = segmentIdOf(refInitSegTrackId, itemIdApi, refSegmentId); -+ int32_t result = segmentIdOf(neededInitSegTrackId, itemIdApi, segmentId); -+ if (result != ErrorCode::OK) -+ { -+ return result; -+ } -+ //SegmentTrackId refSegTrackId = std::make_pair(refSegmentId, refInitSegTrackId.second); -+ SegmentTrackId segTrackId = std::make_pair(segmentId, neededInitSegTrackId.second); -+ //ItemId refItemId = ItemId(itemIdApi) - getTrackInfo(initSegmentId, refSegTrackId).itemIdBase; -+ ItemId itemId = ItemId(itemIdApi) - getTrackInfo(initSegmentId, segTrackId).itemIdBase; -+ -+ refDataOffset = getTrackInfo(initSegmentId, segTrackId).samples.at(itemId.get()).dataOffset; -+ refSampleLength = getTrackInfo(initSegmentId, segTrackId).samples.at(itemId.get()).dataLength; -+ -+ return ErrorCode::OK; -+ } -+ - int32_t MP4VRFileReaderImpl::getRefSampleDataInfo(uint32_t trackId, - uint32_t itemIdApi, - const InitSegmentId& initSegmentId, -@@ -895,6 +935,336 @@ namespace MP4VR - return ErrorCode::OK; - } - -+ int32_t MP4VRFileReaderImpl::getExtractorTrackSampleData(uint32_t trackId, -+ uint32_t itemIdApi, -+ char* memoryBuffer, -+ uint32_t& memoryBufferSize, -+ bool videoByteStreamHeaders) -+ { -+ uint32_t spaceAvailable = memoryBufferSize; -+ if (isInitializedError()) -+ { -+ return ErrorCode::UNINITIALIZED; -+ } -+ -+ InitSegmentTrackId initSegTrackId = ofTrackId(trackId); -+ InitSegmentId initSegmentId = initSegTrackId.first; -+ SegmentId segmentId; -+ int32_t result = segmentIdOf(initSegTrackId, itemIdApi, segmentId); -+ if (result != ErrorCode::OK) -+ { -+ return result; -+ } -+ SegmentTrackId segTrackId = std::make_pair(segmentId, initSegTrackId.second); -+ ItemId itemId = ItemId(itemIdApi) - getTrackInfo(initSegmentId, segTrackId).itemIdBase; -+ -+ SegmentIO& io = mInitSegmentPropertiesMap.at(initSegmentId).segmentPropertiesMap.at(segmentId).io; -+ // read NAL data to bitstream object -+ ContextType contextType; -+ int error = getContextTypeError(initSegTrackId, contextType); -+ if (error) -+ { -+ return error; -+ } -+ switch (contextType) -+ { -+ case ContextType::TRACK: -+ { -+ // The requested frame should be one that is available -+ if (itemId.get() >= getTrackInfo(initSegmentId, segTrackId).samples.size()) -+ { -+ return ErrorCode::INVALID_ITEM_ID; -+ } -+ -+ const uint32_t sampleLength = getTrackInfo(initSegmentId, segTrackId).samples.at(itemId.get()).dataLength; -+ if (memoryBufferSize < sampleLength) -+ { -+ memoryBufferSize = sampleLength; -+ return ErrorCode::MEMORY_TOO_SMALL_BUFFER; -+ } -+ -+ std::int64_t neededDataOffset = (std::int64_t) getTrackInfo(initSegmentId, segTrackId).samples.at(itemId.get()).dataOffset; -+ //seekInput(io, (std::int64_t) getTrackInfo(initSegmentId, segTrackId).samples.at(itemId.get()).dataOffset); -+ seekInput(io, neededDataOffset); -+ io.stream->read(memoryBuffer, sampleLength); -+ memoryBufferSize = sampleLength; -+ -+ if (!io.stream->good()) -+ { -+ return ErrorCode::FILE_READ_ERROR; -+ } -+ break; -+ } -+ default: -+ return ErrorCode::INVALID_CONTEXT_ID; -+ } -+ -+ // Process bitstream by codec -+ FourCC codeType; -+ error = getDecoderCodeType(toTrackId(initSegTrackId), itemIdApi, codeType); -+ if (error) -+ { -+ return error; -+ } -+ -+ if (codeType == "avc1" || codeType == "avc3") -+ { -+ // Get item data from AVC bitstream -+ if (videoByteStreamHeaders) -+ { -+ return processAvcItemData(memoryBuffer, memoryBufferSize); -+ } -+ else -+ { -+ return ErrorCode::OK; -+ } -+ } -+ else if (codeType == "hvc1" || codeType == "hev1") -+ { -+ // Get item data from HEVC bitstream -+ if (videoByteStreamHeaders) -+ { -+ return processHevcItemData(memoryBuffer, memoryBufferSize); -+ } -+ else -+ { -+ return ErrorCode::OK; -+ } -+ } -+ // If the codeType is found to be of hvc2 type -+ else if (codeType == "hvc2") -+ { -+ // Load the extrator NAL into memory buffer, which is copied into -+ // extractorSampleBuffer for further processing -+ Vector extractorSampleBuffer(memoryBuffer, memoryBuffer + memoryBufferSize); -+ -+ std::uint8_t nalLengthSizeMinus1 = 3; -+ ItemId sampleBase; -+ auto& sampleInfo = getSampleInfo(initSegmentId, segTrackId, sampleBase); -+ SampleDescriptionIndex index = sampleInfo.at((ItemId(itemIdApi) - sampleBase).get()).sampleDescriptionIndex; -+ if (getInitTrackInfo(initSegTrackId).nalLengthSizeMinus1.count(index.get()) != 0) -+ { -+ nalLengthSizeMinus1 = getInitTrackInfo(initSegTrackId).nalLengthSizeMinus1.at(index); -+ assert(nalLengthSizeMinus1 == 3); // NAL length can be 1, 2 or 4 bytes, but the whole parsing process assumes it is 4 bytes -+ } -+ -+ Hvc2Extractor::ExtractorSample extractorSample; -+ uint64_t extractionSize = 0; -+ uint64_t tolerance = 0; -+ -+ // If the current NAL is affirmed to be an extractor NAL, parse it to extNalDat -+ -+ if (Hvc2Extractor::parseExtractorNal(extractorSampleBuffer, extractorSample, nalLengthSizeMinus1, extractionSize)) -+ { -+ if (extractionSize == 0) -+ { -+ // the size from extractors is not reliable. Make an estimate based on sample lengths of the -+ // referred tracks -+ extractionSize = 0; -+ for (auto& extractor : extractorSample.extractors) -+ { -+ for (std::vector::iterator sampleConstruct = -+ extractor.sampleConstruct.begin(); -+ sampleConstruct != extractor.sampleConstruct.end(); ++sampleConstruct) -+ { -+ uint64_t refSampleLength = 0; -+ uint64_t refDataOffset = 0; -+ result = -+ getRefSampleDataInfo(trackId, itemIdApi, initSegmentId, (*sampleConstruct).track_ref_index, -+ refSampleLength, refDataOffset); -+ if (result != ErrorCode::OK) -+ { -+ return result; -+ } -+ extractionSize += refSampleLength; -+ } -+ } -+ // + add 10% tolerance (inline constructors can result in more data in the extracted samples than -+ // the original samples, but should be less than 10%) -+ tolerance = (uint64_t)(extractionSize / 10); -+ extractionSize += tolerance; -+ } -+ if (extractionSize > (uint64_t) spaceAvailable) -+ { -+ // add an additional tolerance to requested size; this could save some useless realloc's in client -+ // side in the next runs (assuming the client reuses the buffers) -+ memoryBufferSize = (uint32_t)(extractionSize + tolerance); -+ return ErrorCode::MEMORY_TOO_SMALL_BUFFER; -+ } -+ -+ // Extract bytes from the inline and sample constructs -+ uint32_t extractedBytes = 0; -+ char* buffer = memoryBuffer; -+ char* inlineNalLengthPlaceHolder = nullptr; -+ size_t inlineLength = 0; -+ std::vector::iterator sampleConstruct; -+ std::vector::iterator inlineConstruct; -+ uint64_t refSampleLength = 0; -+ uint64_t refSampleOffset = 0; -+ uint8_t trackRefIndex = UINT8_MAX; -+ -+ for (auto& extractor : extractorSample.extractors) -+ { -+ // We loop through both constructors, until both of them are empty. They are often interleaved, but not -+ // always through the whole sequence. -+ for (sampleConstruct = extractor.sampleConstruct.begin(), -+ inlineConstruct = extractor.inlineConstruct.begin(); -+ sampleConstruct != extractor.sampleConstruct.end() || -+ inlineConstruct != extractor.inlineConstruct.end();) -+ { -+ if (inlineConstruct != extractor.inlineConstruct.end() && -+ (sampleConstruct == extractor.sampleConstruct.end() || -+ (*inlineConstruct).order_idx < (*sampleConstruct).order_idx)) -+ { -+ inlineNalLengthPlaceHolder = buffer; -+ // the inline constructor is expected to contain a placeholder for the NAL unit length field too -+ -+ // copy the inline part - note: std::copy with iterators give warning in Visual Studio, so the -+ // good old memcpy is used instead -+ memcpy(buffer, (*inlineConstruct).inline_data.data(), (*inlineConstruct).inline_data.size()); -+ inlineLength = (*inlineConstruct).inline_data.size() - (nalLengthSizeMinus1 + 1); // exclude the length -+ buffer += (*inlineConstruct).data_length; -+ extractedBytes += (*inlineConstruct).data_length; -+ ++inlineConstruct; -+ } -+ else if (sampleConstruct != extractor.sampleConstruct.end()) -+ { -+ /////Andrew modify io since it might from different segement -+ //InitSegmentTrackId ref_initSegTrackId = ofTrackId((*sampleConstruct).track_ref_index + 1); -+ //InitSegmentId ref_initSegmentId = ref_initSegTrackId.first; -+ auto referredTrack = ContextId((*sampleConstruct).track_ref_index + 1); -+ InitSegmentId ref_initSegmentId; -+ for (const auto& loopInitSegment : mInitSegmentPropertiesMap) -+ { -+ if (loopInitSegment.second.correspondTrackId == referredTrack) -+ { -+ ref_initSegmentId = loopInitSegment.first; -+ break; -+ } -+ } -+ InitSegmentTrackId ref_initSegTrackId = std::make_pair(ref_initSegmentId, referredTrack); -+ SegmentId ref_segmentId; -+ int32_t result = segmentIdOf(ref_initSegTrackId, itemIdApi, ref_segmentId); -+ SegmentIO& ref_io = mInitSegmentPropertiesMap.at(ref_initSegmentId).segmentPropertiesMap.at(ref_segmentId).io; -+ -+ // read the sample from the referenced track -+ if ((*sampleConstruct).track_ref_index != trackRefIndex || trackRefIndex == UINT8_MAX) -+ { -+ //result = -+ //getRefSampleDataInfo(trackId, itemIdApi, initSegmentId, (*sampleConstruct).track_ref_index, -+ //refSampleLength, refSampleOffset); -+ result = -+ getSampleDataInfo(((*sampleConstruct).track_ref_index + 1), itemIdApi, ref_initSegmentId, -+ refSampleLength, refSampleOffset); -+ if (result != ErrorCode::OK) -+ { -+ return result; -+ } -+ trackRefIndex = (*sampleConstruct).track_ref_index; -+ seekInput(ref_io, refSampleOffset); -+ } -+ // let's read the length to the buffer (use it as a temp storage, don't update the ptr) -+ ref_io.stream->read(buffer, (nalLengthSizeMinus1 + 1)); -+ // todo nalLengthSizeMinus1-based reading -+ uint64_t refNalLength = readNalLength(buffer); -+ -+ // sc.data_offset is from the beginning of sample -+ uint64_t inputReadOffset = refSampleOffset + (*sampleConstruct).data_offset; -+ -+ // Extract the referenced sample into memoryBuffer from io stream -+ uint64_t bytesToCopy = refNalLength; -+ if ((*sampleConstruct).data_length == 0) -+ { -+ // bytes to copy is taken from the bitstream (length field referenced by data_offset) -+ // there should be no inline constructor / replacement header (see 14496-15 A.7.4.1.2) -+ bytesToCopy = refNalLength; -+ refSampleLength = 0; -+ } -+ else -+ { -+ if ((uint64_t)((*sampleConstruct).data_offset) + (uint64_t)((*sampleConstruct).data_length) > refSampleLength) -+ { -+ // the sampleConstruct gives too large data_length, clip the length of copied data block to the length of the actual sample -+ if ((*sampleConstruct).data_offset > refSampleLength) -+ { -+ // something is wrong, the offset and sample lengths do not match at all -+ return ErrorCode::INVALID_SEGMENT; -+ } -+ bytesToCopy = refSampleLength - (*sampleConstruct).data_offset; -+ } -+ else -+ { -+ // follow the values given in the sampleConstruct -+ bytesToCopy = (*sampleConstruct).data_length; -+ } -+ -+ if (inlineNalLengthPlaceHolder != nullptr) -+ { -+ // need to rewrite the NAL length field as the value from inline constructor is no -+ // longer valid -+ uint64_t actualNalLength = bytesToCopy + inlineLength; -+ writeNalLength(actualNalLength, inlineNalLengthPlaceHolder); -+ inlineNalLengthPlaceHolder = nullptr; -+ } -+ else -+ { -+ // there was no inline constructor. (*sampleConstruct).data_offset should now point to -+ // the length field of the NAL to be copied, and we already have the length in the buffer. -+ // Just update the ptr & counter -+ inputReadOffset += (nalLengthSizeMinus1 + 1); -+ if (bytesToCopy == refSampleLength - (*sampleConstruct).data_offset) -+ { -+ bytesToCopy -= (nalLengthSizeMinus1 + 1); -+ } -+ buffer += (nalLengthSizeMinus1 + 1); -+ extractedBytes += (nalLengthSizeMinus1 + 1); -+ } -+ } -+ -+ if (extractedBytes + (uint32_t)bytesToCopy > spaceAvailable) -+ { -+ memoryBufferSize = extractedBytes + (uint32_t)bytesToCopy; -+ return ErrorCode::MEMORY_TOO_SMALL_BUFFER; -+ } -+ // Add NAL payload -+ if (inputReadOffset > 0) -+ { -+ seekInput(ref_io, (std::int64_t) inputReadOffset); -+ } -+ ref_io.stream->read(buffer, bytesToCopy); -+ buffer += bytesToCopy; -+ extractedBytes += (uint32_t)bytesToCopy; -+ ++sampleConstruct; -+ inlineNalLengthPlaceHolder = nullptr; -+ inlineLength = 0; -+ -+ refSampleLength -= (refNalLength + (nalLengthSizeMinus1 + 1)); -+ } -+ } -+ } -+ memoryBufferSize = extractedBytes; -+ if (videoByteStreamHeaders) -+ { -+ // Process the extracted NAL sample (replace NAL lengths with start codes) -+ return processHevcItemData(memoryBuffer, memoryBufferSize); -+ } -+ -+ return ErrorCode::OK; -+ } -+ return ErrorCode::UNSUPPORTED_CODE_TYPE; // hvc2 but unknown extractor? -+ } -+ else if ((codeType == "mp4a") || (codeType == "invo") || (codeType == "urim") || (codeType == "mp4v")) -+ { -+ // already valid data - do nothing. -+ return ErrorCode::OK; -+ } -+ else -+ { -+ // Code type not supported -+ return ErrorCode::UNSUPPORTED_CODE_TYPE; -+ } -+ } -+ - int32_t MP4VRFileReaderImpl::getTrackSampleData(uint32_t trackId, - uint32_t itemIdApi, - char* memoryBuffer, -diff --git a/Mp4/srcs/reader/mp4vrfilereaderimpl.cpp b/Mp4/srcs/reader/mp4vrfilereaderimpl.cpp -index 3778720..c4d479e 100644 ---- a/Mp4/srcs/reader/mp4vrfilereaderimpl.cpp -+++ b/Mp4/srcs/reader/mp4vrfilereaderimpl.cpp -@@ -840,6 +840,9 @@ namespace MP4VR - { - sequenceToSegment.erase(sequence); - } -+ SegmentProperties& segmentProperties = mInitSegmentPropertiesMap.at(initSegmentId).segmentPropertiesMap[segmentId]; -+ SegmentIO& io = segmentProperties.io; -+ io.stream.reset(nullptr); - mInitSegmentPropertiesMap.at(initSegmentId).segmentPropertiesMap.erase(segmentId); - } - else -@@ -1665,6 +1668,17 @@ namespace MP4VR - trackPropertiesMap[associatedTrack].trackFeature.setFeature( - TrackFeatureEnum::Feature::HasAssociatedDepthTrack); - } -+ -+ } -+ -+ if (trackProperties.second.referenceTrackIds["vdep"].empty()) -+ { -+ mInitSegmentPropertiesMap.at(initSegmentId).correspondTrackId = trackProperties.first; -+ } -+ -+ if (!(trackProperties.second.referenceTrackIds["scal"].empty())) -+ { -+ mInitSegmentPropertiesMap.at(initSegmentId).correspondTrackId = trackProperties.first; - } - } - } -diff --git a/Mp4/srcs/reader/mp4vrfilereaderimpl.hpp b/Mp4/srcs/reader/mp4vrfilereaderimpl.hpp -index 17aa650..c7fc916 100644 ---- a/Mp4/srcs/reader/mp4vrfilereaderimpl.hpp -+++ b/Mp4/srcs/reader/mp4vrfilereaderimpl.hpp -@@ -173,6 +173,13 @@ namespace MP4VR - /// @see MP4VRFileReaderInterface::getItemType() - int32_t getTrackSampleType(uint32_t trackId, uint32_t itemId, FourCC& trackItemType) const; - -+ -+ int32_t getExtractorTrackSampleData(uint32_t trackId, -+ uint32_t itemIdApi, -+ char* memoryBuffer, -+ uint32_t& memoryBufferSize, -+ bool videoByteStreamHeaders = true); -+ - /// @see MP4VRFileReaderInterface::getItemData() - int32_t getTrackSampleData(uint32_t trackId, - uint32_t itemId, -@@ -723,6 +730,13 @@ namespace MP4VR - int32_t segmentIdOf(InitSegmentTrackId initSegTrackId, ItemId itemId, SegmentId& segmentId) const; - int32_t segmentIdOf(Id id, SegmentId& segmentId) const; - -+ -+ int32_t getSampleDataInfo(uint32_t trackId, -+ uint32_t itemIdApi, -+ const InitSegmentId& initSegmentId, -+ uint64_t& refSampleLength, -+ uint64_t& refDataOffset); -+ - int32_t getRefSampleDataInfo(uint32_t trackId, - uint32_t itemIdApi, - const InitSegmentId& initSegmentId, -diff --git a/Mp4/srcs/reader/mp4vrfilestreaminternal.cpp b/Mp4/srcs/reader/mp4vrfilestreaminternal.cpp -index afd1156..cf1d548 100644 ---- a/Mp4/srcs/reader/mp4vrfilestreaminternal.cpp -+++ b/Mp4/srcs/reader/mp4vrfilestreaminternal.cpp -@@ -35,6 +35,8 @@ namespace MP4VR - - InternalStream::~InternalStream() - { -+ if(m_stream) -+ delete m_stream; - // nothing - } - diff --git a/src/external/openssl_arm64.sh b/src/external/openssl_arm64.sh new file mode 100755 index 00000000..e6f029d5 --- /dev/null +++ b/src/external/openssl_arm64.sh @@ -0,0 +1,18 @@ +#openssl +export ANDROID_NDK_ROOT="${PWD}/../build/external/android/android-ndk-r14b" +cd ../build/external/android/openssl-1.1.0f/ +if [ ! -d "./build" ];then + mkdir build +fi +cd build +#../../setenv-android_armv8.sh arm64-v8a +source ../../../../../external/env_openssl_arm64.sh +#../config shared --openssldir=/home/media/Codes/OMAF_android/build/external/openssl-output --prefix=/home/media/Codes/OMAF_android/build/external/openssl-output +#../config shared no-ssl2 no-ssl3 no-comp no-hw --openssldir=${PWD}/../../openssl-output --prefix=${PWD}/../../openssl-output +../Configure shared no-ssl2 no-ssl3 no-comp no-hw android64-aarch64 --openssldir=${PWD}/../../openssl-output --prefix=${PWD}/../../openssl-output +patch -p1 < ../../../../../external/0001-SSL-Modify-Makefile.patch +make clean +make depend +make CALC_VERSIONS="SHLIB_COMPAT=; SHLIB_SOVER=" MAKE="make -e" all +#make CALC_VERSIONS="SHLIB_COMPAT=; SHLIB_SOVER=" build_libs +make install diff --git a/src/external/prebuild.sh b/src/external/prebuild.sh index 8ebeb3fc..0bc25267 100755 --- a/src/external/prebuild.sh +++ b/src/external/prebuild.sh @@ -1,16 +1,20 @@ #!/bin/bash -x -os=$(awk -F= '/^NAME/{print $2}' /etc/os-release) -object=$1 -path=${PWD} +OS=$(awk -F= '/^NAME/{print $2}' /etc/os-release) +TARGET=$1 +LTTNGFLAG=$2 +EX_PATH=${PWD} if [ $# != 1 ] ; then - echo "Please choose server, client or test you want to build on." + echo "Please choose server, client or android you want to build." + echo "Add \"--enable-lttng\" as the second parameter to enable lttng." + echo "e.g." + echo " ./prebuild.sh server --enabel-lttng" exit fi -if [ "${object}" != "server" ] && [ "${object}" != "client" ] && [ "${object}" != "test" ] ; then - echo "Please choose server, client or test you want to build on." - exit +if [ "${TARGET}" != "server" ] && [ "${TARGET}" != "client" ] && [ "${TARGET}" != "android" ] ; then + echo "Please choose server, client or test you want to build on." + exit fi program_exists() { @@ -24,107 +28,92 @@ program_exists() { return 0 } -download_tools() -{ +install_tools() { program_exists gcc if [ $? != 0 ];then - if [ $(gcc -dumpversion | awk -F'.' '{print $1}') -ge 6 ];then - if [ "$os" == \""Ubuntu"\" ];then - sudo apt-get install -y software-properties-common - sudo add-apt-repository ppa:ubuntu-toolchain-r/test - sudo apt update - sudo apt install g++-7 -y - sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 60 --slave /usr/bin/g++ g++ /usr/bin/g++-7 - sudo update-alternatives --config gcc - elif [ "$os" == \""CentOS Linux"\" ];then - sudo yum install centos-release-scl - sudo yum install scl-utils - sudo yum install devtoolset-7-gcc* - source /opt/rh/devtoolset-7/enable - scl enable devtoolset-7 bash - fi + if [ "${OS}" == \""Ubuntu"\" ];then + sudo apt-get install -y software-properties-common + sudo apt update + sudo apt install make -y + sudo apt install g++-7 -y + sudo update-alternatives \ + --install /usr/bin/gcc gcc /usr/bin/gcc-7 60 \ + --slave /usr/bin/g++ g++ /usr/bin/g++-7 + sudo update-alternatives --config gcc + elif [ "${OS}" == \""CentOS Linux"\" ];then + sudo yum install centos-release-scl + sudo yum install scl-utils + sudo yum install devtoolset-7-gcc* + source /opt/rh/devtoolset-7/enable + scl enable devtoolset-7 bash fi fi - if [ "$os" == \""Ubuntu"\" ];then - sudo apt-get install libgmp-dev libmpfr-dev mpc libmpc-dev patch autoconf libtool automake libssl-dev -y - elif [ "$os" == \""CentOS Linux"\" ];then - sudo yum install gmp gmp-devel mpfr mpfr-devel libmpc libmpc-devel patch autoconf libtool automake libssl-devel -y + if [ "${OS}" == \""Ubuntu"\" ];then + sudo apt-get install libgmp-dev libmpfr-dev mpc libmpc-dev patch autoconf \ + libtool automake libssl-dev libevent-dev libcurl4-openssl-dev bc -y + elif [ "${OS}" == \""CentOS Linux"\" ];then + sudo yum install gmp gmp-devel mpfr mpfr-devel libmpc libmpc-devel \ + patch autoconf libtool automake libssl-devel bc \ + libevent-devel.x86_64 openssl-devel libxml2-devel -y + fi + + program_exists cmake + if [ $? != 0 ];then + if [ ! -f "./cmake-3.12.4.tar.gz" ];then + wget https://cmake.org/files/v3.12/cmake-3.12.4.tar.gz + fi + tar xf cmake-3.12.4.tar.gz + cd cmake-3.12.4 + ./bootstrap --prefix=/usr && make -j $(nproc) && sudo make install && cd .. fi mkdir -p ../build/external && cd ../build/external if [ ! -f "./zlib-1.2.11.tar.gz" ];then - wget -q http://zlib.net/zlib-1.2.11.tar.gz + wget http://zlib.net/zlib-1.2.11.tar.gz fi tar xf zlib-1.2.11.tar.gz - cd zlib-1.2.11 && ./configure && make -j && sudo make install && cd .. - - if [ ! -f "./libxml2-2.9.6.tar.gz" ];then - wget -q http://www.xmlsoft.org/sources/libxml2-2.9.6.tar.gz - fi - tar xf libxml2-2.9.6.tar.gz - cd libxml2-2.9.6 && ./configure --with-python=no && make -j && sudo make install && cd .. - cd /usr/local/include && sudo mv libxml2/libxml . && sudo rm -rf libxml2 - cd ${path}/../build/external + cd zlib-1.2.11 && ./configure && make -j $(nproc) && sudo make install && cd .. program_exists yasm if [ $? != 0 ];then if [ ! -f "./yasm-1.3.0.tar.gz" ];then - wget -q http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz + wget http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz fi tar zxf yasm-1.3.0.tar.gz cd yasm-1.3.0 - ./configure && make -j `nproc` && sudo make install && cd .. - fi - - program_exists cmake - if [ $? != 0 ];then - if [ ! -f "./cmake-3.12.4.tar.gz" ];then - wget -q https://cmake.org/files/v3.12/cmake-3.12.4.tar.gz - fi - tar xf cmake-3.12.4.tar.gz - cd cmake-3.12.4 - ./bootstrap --prefix=/usr && make -j `nproc` && sudo make install && cd .. + ./configure && make -j $(nproc) && sudo make install && cd .. fi if [ ! -f "./curl-7.66.0.tar.xz" ];then - wget -q https://curl.haxx.se/download/curl-7.66.0.tar.xz + wget https://curl.haxx.se/download/curl-7.66.0.tar.xz fi tar xf curl-7.66.0.tar.xz - cd curl-7.66.0 && ./configure --with-darwinssl && make -j && sudo make install - cd /usr/local/include/libxml2 && sudo mv libxml ../ && cd ../ && sudo rm -rf libxml2 - cd ${path}/../build/external + cd curl-7.66.0 && ./configure --with-darwinssl && make -j $(nproc) && sudo make install } -download_dependencies() -{ - cd $path - if [ $1 == "server" ];then - mkdir -p ../build/external/FFmpeg - ./build_glog.sh - ./build_Nokia_omaf.sh - ./install_openHEVC.sh - ./install_SVT.sh - ./install_thrift.sh - ./install_FFmpeg.sh server - - elif [ $1 == "client" ];then - ./build_glog.sh - ./build_Nokia_omaf.sh - ./prebuild_player.sh +install_dependencies() { + cd ${EX_PATH} + if [ ${TARGET} == "server" ] ; then + ./install_glog.sh + ./install_safestringlib.sh + ./install_openHEVC.sh + ./install_SVT.sh + ./install_thrift.sh + ./install_FFmpeg.sh server + elif [ ${TARGET} == "client" ] ; then + ./install_glog.sh + ./install_safestringlib.sh + ./prebuild_player.sh + ./install_FFmpeg.sh client + elif [ ${TARGET} == "android" ] ; then + ./prebuild_android.sh + fi - elif [ $1 == "test" ];then - mkdir -p ../build/test && cd ../build/test - cp -r ../../google_test/gtest/ . - git clone https://github.com/google/googletest.git - cd googletest && git checkout -b v1.8.x origin/v1.8.x - cd googletest && mkdir build && cd build - cmake -DBUILD_SHARED_LIBS=ON .. && make - g++ -I../include/ -I.. -c ../src/gtest-all.cc -D_GLIBCXX_USE_CXX11_ABI=0 - g++ -I../include/ -I.. -c ../src/gtest_main.cc -D_GLIBCXX_USE_CXX11_ABI=0 - ar -rv libgtest.a gtest-all.o gtest_main.o - fi + if [ ${LTTNGFLAG} == "--enable-lttng" ] ; then + ./install_lttng.sh + fi } -download_tools -download_dependencies ${object} +install_tools +install_dependencies ${TARGET} diff --git a/src/external/prebuild_android.sh b/src/external/prebuild_android.sh new file mode 100755 index 00000000..d27780bf --- /dev/null +++ b/src/external/prebuild_android.sh @@ -0,0 +1,61 @@ +#!/bin/bash +mkdir -p ../build/external/android && cd ../build/external/android +#NDK download +if [ ! -d "android-ndk-r18b" ];then + wget https://dl.google.com/android/repository/android-ndk-r18b-linux-x86_64.zip + unzip android-ndk-r18b-linux-x86_64.zip +fi +cd android-ndk-r18b +#NDK path +NDK_r18b_PATH=${PWD} +cd ../ +#safestring +if [ ! -d "./safestringlib" ] ; then + git clone https://github.com/intel/safestringlib.git +fi + +cd safestringlib +if [ ! -d "./build" ];then + mkdir build +fi +cd build +cmake .. -DBUILD_SHARED_LIBS=ON -DDEBUG=NO -DCMAKE_TOOLCHAIN_FILE=$NDK_r18b_PATH/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_TOOLCHAIN=aarch64-linux-android-4.9 -DANDROID_PLATFORM=android-21 -DANDROID_STD=c++_shared +make -j $(nproc) -f Makefile +sudo cp libsafestring_shared.so /usr/local/lib/ +sudo mkdir -p /usr/local/include/safestringlib/ +sudo cp ../include/* /usr/local/include/safestringlib/ +cd ../.. +#glog +if [ ! -d "./glog" ];then + git clone https://github.com/google/glog.git +fi +cd glog +patch -p1 < ../../../../external/0001-Update-glob-header-cpp-file-for-android-ndk-build.patch +if [ ! -d "./build" ];then + mkdir build +fi +cd build +cmake .. -DBUILD_SHARED_LIBS=ON -DDEBUG=NO -DCMAKE_TOOLCHAIN_FILE=$NDK_r18b_PATH/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_TOOLCHAIN=aarch64-linux-android-4.9 -DANDROID_PLATFORM=android-21 -DANDROID_STD=c++_shared +make -j +sudo make install +cd ../.. +#curl +#NDK r14b download +if [ ! -d "android-ndk-r14b" ];then + wget https://dl.google.com/android/repository/android-ndk-r14b-linux-x86_64.zip + unzip android-ndk-r14b-linux-x86_64.zip +fi + +if [ ! -d "./curl-7.66.0" ];then + wget https://curl.haxx.se/download/curl-7.66.0.tar.xz + tar xf curl-7.66.0.tar.xz +fi +if [ ! -d "./openssl-1.1.0f" ];then + wget https://www.openssl.org/source/old/1.1.0/openssl-1.1.0f.tar.gz + tar -xvf openssl-1.1.0f.tar.gz +fi +cd ../../../external +./openssl_arm64.sh +./curl_arm64.sh +cd ../build/external/android && sudo cp ./curl-output/arm64-v8a/lib/libcurl.so /usr/local/lib/ +cp ./android-ndk-r14b/platforms/android-19/arch-arm/usr/include/sys/timeb.h ./android-ndk-r18b/sysroot/usr/include/sys/ diff --git a/src/external/prebuild_player.sh b/src/external/prebuild_player.sh index ca9b9fb6..fd4d82b4 100755 --- a/src/external/prebuild_player.sh +++ b/src/external/prebuild_player.sh @@ -1,49 +1,20 @@ #!/bin/bash -ex -which awk - -ORIPATH=${PWD} OS=$(awk -F= '/^NAME/{print $2}' /etc/os-release) # INSTALL DEPENDENCIES if [ "${OS}" == \""Ubuntu"\" ];then - sudo apt-get install lsb-core libxrandr-dev libxinerama-dev libxcursor-dev libxi-dev libgl1-mesa-dev libglu1-mesa-dev libgles2-mesa-dev libglm-dev libegl1-mesa-dev pkg-config libglfw3-dev liblzma-dev -y + sudo apt-get install -y \ + lsb-core libxrandr-dev libxinerama-dev libxcursor-dev libxi-dev \ + libgl1-mesa-dev libglu1-mesa-dev libgles2-mesa-dev libglm-dev \ + libegl1-mesa-dev pkg-config libglfw3-dev liblzma-dev elif [ "${OS}" == \""CentOS Linux"\" ];then - sudo yum install -y redhat-lsb libXrandr libXrandr-devel libXinerama libXinerama-devel libXcursor libXcursor-devel libXi libXi-devel mesa-libGL mesa-libGL-devel mesa-libGLU mesa-libGLU-devel mesa-libGLES-devel glm-devel mesa-libEGL-devel SDL2 SDL2-devel libcurl4-openssl-dev glfw glfw-devel xz-devel pkg-config + wget https://download-ib01.fedoraproject.org/pub/epel/7/x86_64/Packages/e/epel-release-7-12.noarch.rpm + rpm -Uvh epel-release*rpm || true + sudo yum install -y redhat-lsb libXrandr libXrandr-devel libXinerama \ + libXinerama-devel libXcursor libXcursor-devel libXi libXi-devel \ + mesa-libGL mesa-libGL-devel mesa-libGLU mesa-libGLU-devel \ + mesa-libGLES-devel glm-devel mesa-libEGL-devel mesa-dri-drivers \ + SDL2 SDL2-devel libcurl4-openssl-dev glfw glfw-devel xz-devel \ + pkg-config lzma fi - -check_mkdir() -{ - if [ ! -d "$1" ];then - mkdir -p $1 - fi - cd $1 -} - -###INSTALL LIBVA -intel_curl() { - curl -O -k -x '' -H 'X-JFrog-Art-Api: AKCp5dL3Kxmp2PhDfYhT2oFk4SDxJji5H8S38oAqmMSkiD46Ho8uCA282aJJhM9ZqCKLb64bw' "$@" -} -check_mkdir ${ORIPATH}/../build/external/ -intel_curl https://ubit-artifactory-sh.intel.com/artifactory/DCG_Media_Driver-local/PV5-Build-After-Branch-Out/build_prod_mss_kblg/10013/MediaServerStudioEssentialsKBL2019R1HF1_10010.tar.gz -tar xzf MediaServerStudioEssentialsKBL2019R1HF1_10010.tar.gz -cd MediaServerStudioEssentialsKBL2019R1HF1_10010 -tar xzf intel-linux-media-kbl-10010.tar.gz -cd intel-linux-media-kbl-10010 -if [ "${OS}" == \""CentOS Linux"\" ];then - sed -i '108s/lib\/x86_64-linux-gnu/usr\/lib64/' ./install_media.sh -fi -( echo 'n' ) | sudo ./install_media.sh -cd opt/intel/mediasdk/opensource/libva/2.3.0-10010 -tar xjf libva-2.3.0.tar.bz2 -cd libva-2.3.0 -./autogen.sh -make -j -sudo make install - -# INSTALL FFMPEG -cd ${ORIPATH} && ./install_FFmpeg.sh client - -if [ "${OS}" == \""CentOS Linux"\" ];then - sed -i '15s/v2/v2\ lzma/' ${ORIPATH}/../player/CMakeLists.txt -fi diff --git a/src/external/run.sh b/src/external/run.sh deleted file mode 100755 index 40cefedd..00000000 --- a/src/external/run.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash -x - -RES=$1 -TYPE=$2 -IP=$3 - -parameters_usage(){ - echo 'Usage: 1. : [ 4k , 8k ]' - echo ' 2. : [ live , static ]' - echo ' 3. : IP of this machine.' -} - -if [ "${RES}" = "-h" ] || [ $# != 3 ] ; then - parameters_usage - exit 0 -fi -if [ "${RES}" != "4k" ] && [ "${RES}" != "8k" ] ; then - parameters_usage - exit 0 -fi -if [ "${TYPE}" != "live" ] && [ "${TYPE}" != "static" ] ; then - parameters_usage - exit 0 -fi - -if [ ! -d "../build/server/ffmpeg" ] - echo 'Build server first' - exit 0 -else - cd ../build/server/ffmpeg -fi - -ffmpeg_4k_live(){ - ./ffmpeg -re -stream_loop -1 -i $1 -input_type 1 -rc 1 -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 -config_file:0 config_high.txt -la_depth:0 0 -r:0 30 -g:0 15 -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -tile_row:1 2 -tile_column:1 4 -config_file:1 config_low.txt -la_depth:1 0 -r:1 30 -g:1 15 -b:1 5M -map 0:v -vframes 10000 -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -window_size 20 -extra_window_size 30 -base_url https://$2:443/live4k/ -out_name Test /usr/local/nginx/html/live4k/ -} - -ffmpeg_4k_static(){ - numactl -c 1 ./ffmpeg -stream_loop -1 -i $1 -input_type 1 -rc 1 -r 30 -c:v:0 distributed_encoder -s:0 3840x1920 -g:0 15 -tile_row:0 6 -tile_column:0 10 -la_depth:0 0 -config_file:0 config_high.txt -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -sws_flags neighbor -g:1 15 -tile_row:1 1 -tile_column:1 4 -la_depth:1 0 -config_file:1 config_low.txt -b:1 2M -map 0:v -vframes 500 -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -base_url https://$2:443/static4k/ -out_name Test /usr/local/nginx/html/static4k/ -} - -ffmpeg_8k_live(){ - numactl -c 1 ./ffmpeg -re -stream_loop -1 -i $1 -input_type 1 -rc 1 -c:v:0 distributed_encoder -s:0 7680x3840 -g:0 25 -tile_row:0 6 -tile_column:0 12 -config_file:0 config_high.txt -b:0 50M -map 0:v -c:v:1 distributed_encoder -s:1 1280x1280 -sws_flags neighbor -g:1 25 -tile_row:1 2 -tile_column:1 2 -config_file:1 config_low.txt -b:1 2M -map 0:v -vframes 1000000 -f omaf_packing -is_live 1 -split_tile 1 -seg_duration 1 -extractors_per_thread 5 -base_url https://$2:443/live8k/ -out_name Test /usr/local/nginx/html/live8k/ -} - -ffmpeg_8k_static(){ - ./ffmpeg -re -stream_loop -1 -f rawvideo -video_size 3840x1920 -i $1 -input_type 1 -rc 1 -vframes 1000 -c:v:0 distributed_encoder -s:0 3840x1920 -tile_row:0 6 -tile_column:0 10 -config_file:0 config_high.txt -la_depth:0 0 -r:0 30 -g:0 15 -b:0 30M -map 0:v -c:v:1 distributed_encoder -s:1 1024x640 -tile_row:1 2 -tile_column:1 4 -config_file:1 config_low.txt -la_depth:1 0 -r:1 30 -g:1 15 -b:1 0.5M -map 0:v -f omaf_packing -is_live 0 -split_tile 1 -seg_duration 1 -base_url https://$2:443/static8k/ -out_name Test /usr/local/nginx/html/static8k/ -} - -export LD_LIBRARY_PATH=/usr/local/lib/:/usr/local/lib64:$LD_LIBRARY_PATH - -if [ "${RES}" = "4k" ] ; then - - VIDEO="../../../Sample-Videos/test1_h265_3840x2048_30fps_30M_200frames.mp4" - echo "ip 127.0.0.1 port 9089" > config_low.txt - echo "ip 127.0.0.1 port 9090" > config_high.txt - - if [ "${TYPE}" = "live" ] ; then - ffmpeg_4k_live ${VIDEO} ${IP} - else - ffmpeg_4k_static ${VIDEO} ${IP} - fi -else - - VIDEO="../../../Sample-Videos/test1_h265_8k_25fps_60M_100frames.mp4" - echo "ip 127.0.0.1 port 9089 numa 1" > config_low.txt - echo "ip 127.0.0.1 port 9090 numa 2" > config_high.txt - - if [ "${TYPE}" = "live" ] ; then - ffmpeg_8k_live ${VIDEO} ${IP} - else - ffmpeg_8k_static ${VIDEO} ${IP} - fi -fi diff --git a/src/external/run_server.sh b/src/external/run_server.sh deleted file mode 100755 index a29424b6..00000000 --- a/src/external/run_server.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -cd ../build/server/ffmpeg -echo "ip 127.0.0.1 port 9089" > config_low.txt -echo "ip 127.0.0.1 port 9090" > config_high.txt -export LD_LIBRARY_PATH=/usr/local/lib/:/usr/local/lib64:$LD_LIBRARY_PATH diff --git a/src/external/run_test.sh b/src/external/run_test.sh index 162ebe7b..a2a34aa4 100755 --- a/src/external/run_test.sh +++ b/src/external/run_test.sh @@ -1,5 +1,7 @@ #!/bin/bash -ex +REPO=$1 +export LD_LIBRARY_PATH=/usr/local/lib/:/usr/local/lib64:$LD_LIBRARY_PATH cd ../build/test # 360SCVP test @@ -11,25 +13,6 @@ cp ../../../360SCVP/test/*265 . cd - -# OmafDashAccess test -################################ -cd OmafDashAccess -curl -H 'X-JFrog-Art-Api: AKCp5dL3Kxmp2PhDfYhT2oFk4SDxJji5H8S38oAqmMSkiD46Ho8uCA282aJJhM9ZqCKLb64bw' -O "https://ubit-artifactory-sh.intel.com/artifactory/immersive_media-sh-local/testfile/segs_for_readertest_0909.tar.gz" && tar zxf segs_for_readertest_0909.tar.gz - -./testMediaSource --gtest_filter=*_static -./testMediaSource --gtest_filter=*_live -./testMediaSource --gtest_filter=*_static_withPredictor -./testMediaSource --gtest_filter=*_live_withPredictor -./testMediaSource --gtest_filter=*_static_changeViewport -./testMediaSource --gtest_filter=*_live_changeViewport -./testMPDParser -./testOmafReader -./testOmafReaderManager - -rm -rf ./segs_for_readertest* - -cd - - # VROmafPacking test ################################ cd VROmafPacking @@ -43,51 +26,65 @@ cp ../../../VROmafPacking/test/*bin . cd - -# distributed_encoder test -################################ - destroy_worker() { - STATUS=$(lsof -i:9090 | tail -n 1 | awk {'print $NF'}) - - while [ "${STATUS}" != "EMPTY" ] - do - if [ "${STATUS}" == "(LISTEN)" ] ; then - echo "LISTEN" - PID=$(lsof -i:9090 | tail -n 1 | awk {'print $2'}) - echo ${PID} - if [ ! -z "${PID}" ] ; then - kill -9 ${PID} - STATUS="EMPTY" - fi - sleep 2s - else - echo "EMPTY" - STATUS="EMPTY" - fi - done + PID=$(pidof WorkerServer_9090) || true + echo ${PID} + if [ ! -z "${PID}" ] ; then + kill -9 ${PID} + fi + sleep 2s } -cd distributed_encoder -cp ../../../distributed_encoder/test/*265 . -cp ../../../distributed_encoder/test/*264 . -cp ../../../distributed_encoder/test/*yuv . -cp ../../../distributed_encoder/test/*bin . -cp ../../../distributed_encoder/test/*txt . - -export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH -export LD_LIBRARY_PATH=/usr/local/lib64:$LD_LIBRARY_PATH - -destroy_worker -./testMainEncoder -destroy_worker -./testWorkSession -destroy_worker -./testDecoder -destroy_worker -./testSubEncoder -destroy_worker -./testEncoder -# ./testSubEncoderManager +if [ "${REPO}" = "oss" ] ; then + + # OmafDashAccess test + ################################ + cd OmafDashAccess + curl -H 'X-JFrog-Art-Api: AKCp5dL3Kxmp2PhDfYhT2oFk4SDxJji5H8S38oAqmMSkiD46Ho8uCA282aJJhM9ZqCKLb64bw' -O "https://ubit-artifactory-sh.intel.com/artifactory/immersive_media-sh-local/testfile/segs_for_readertest_0909.tar.gz" && tar zxf segs_for_readertest_0909.tar.gz + + ./testMediaSource --gtest_filter=*_static + ./testMediaSource --gtest_filter=*_live + ./testMediaSource --gtest_filter=*_static_withPredictor + ./testMediaSource --gtest_filter=*_live_withPredictor + ./testMediaSource --gtest_filter=*_static_changeViewport + ./testMediaSource --gtest_filter=*_live_changeViewport + # local dash file will be deleted after release. + curl -H 'X-JFrog-Art-Api: AKCp5dL3Kxmp2PhDfYhT2oFk4SDxJji5H8S38oAqmMSkiD46Ho8uCA282aJJhM9ZqCKLb64bw' -O "https://ubit-artifactory-sh.intel.com/artifactory/immersive_media-sh-local/testfile/segs_for_readertest_0909.tar.gz" && tar zxf segs_for_readertest_0909.tar.gz + ./testMPDParser + curl -H 'X-JFrog-Art-Api: AKCp5dL3Kxmp2PhDfYhT2oFk4SDxJji5H8S38oAqmMSkiD46Ho8uCA282aJJhM9ZqCKLb64bw' -O "https://ubit-artifactory-sh.intel.com/artifactory/immersive_media-sh-local/testfile/segs_for_readertest_0909.tar.gz" && tar zxf segs_for_readertest_0909.tar.gz + ./testOmafReader + curl -H 'X-JFrog-Art-Api: AKCp5dL3Kxmp2PhDfYhT2oFk4SDxJji5H8S38oAqmMSkiD46Ho8uCA282aJJhM9ZqCKLb64bw' -O "https://ubit-artifactory-sh.intel.com/artifactory/immersive_media-sh-local/testfile/segs_for_readertest_0909.tar.gz" && tar zxf segs_for_readertest_0909.tar.gz + ./testOmafReaderManager + + rm -rf ./segs_for_readertest* + + cd - + + # distributed_encoder test + ################################ + + cd distributed_encoder + cp ../../../distributed_encoder/test/*265 . + cp ../../../distributed_encoder/test/*264 . + cp ../../../distributed_encoder/test/*yuv . + cp ../../../distributed_encoder/test/*bin . + cp ../../../distributed_encoder/test/*txt . + + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:/usr/local/lib64/pkgconfig:$PKG_CONFIG_PATH + export LD_LIBRARY_PATH=/usr/local/lib64:$LD_LIBRARY_PATH + + destroy_worker + ./testMainEncoder + destroy_worker + ./testWorkSession + destroy_worker + ./testDecoder + destroy_worker + ./testSubEncoder + destroy_worker + ./testEncoder + # ./testSubEncoderManager +fi cd - diff --git a/src/ffmpeg/dependency/DistributedEncoder.pc b/src/ffmpeg/dependency/DistributedEncoder.pc index e771ced3..e4530b67 100644 --- a/src/ffmpeg/dependency/DistributedEncoder.pc +++ b/src/ffmpeg/dependency/DistributedEncoder.pc @@ -7,4 +7,4 @@ Name:DistributedEncoder Description: Distributed Encoder with remote or local workers Version:0.0.1-DEV Cflags: -I${prefix}/include -Libs: -L${libdir} -lDistributedEncoder -lstdc++ -l360SCVP -lthrift -lthriftnb -levent -lglog -lpthread -L/usr/local/lib64 +Libs: -L${libdir} -lDistributedEncoder -lstdc++ -l360SCVP -lthrift -lthriftnb -levent -lglog -lpthread -lavcodec -lavformat -lavutil -L/usr/local/lib64 diff --git a/src/ffmpeg/dependency/DistributedEncoderAPI.h b/src/ffmpeg/dependency/DistributedEncoderAPI.h index b85c7c50..cfedbab9 100644 --- a/src/ffmpeg/dependency/DistributedEncoderAPI.h +++ b/src/ffmpeg/dependency/DistributedEncoderAPI.h @@ -117,6 +117,7 @@ typedef enum { EncoderType_None = 0, EncoderType_SVTHEVC, + EncoderType_Multiple_SVTHEVC, }EncoderType; //! @@ -161,13 +162,16 @@ typedef struct ENCODERPARAM{ uint32_t hierarchical_level; //!< the hierarchical level for to construct GOP uint32_t intra_period; //!< the distance between two adjacent intra frame uint32_t la_depth; //!< the number of frames that used for look ahead - uint32_t enc_mode; //!< the preset for quality and performance balance,[0-12], 0 is best quality, 12 is best performance + uint32_t enc_mode; //!< the preset for quality and performance balance, + //!< [0-12], 0 is best quality, 12 is best performance uint32_t rc_mode; //!< rate control mode, 0 is CQP mode and 1 is VBR mode uint32_t qp; //!< quantization value under CQP mode uint32_t bit_rate; //!< bitrate value under VBR mode uint32_t scd; //!< scene change detection flag - uint32_t tune; //!< specific encoder tuning, 0 is visually optimized mode, 1 is PSNR/SSIM optimized mode, 2 is VMAF optimized mode - uint32_t profile; //!< the profile to create bitstream, 1 is Main with 8 bit depth, 2 is Main 10 with 8-10 bit depth + uint32_t tune; //!< specific encoder tuning, 0 is visually optimized mode, + //!< 1 is PSNR/SSIM optimized mode, 2 is VMAF optimized mode + uint32_t profile; //!< the profile to create bitstream, 1 is Main with 8 bit depth, + //!< 2 is Main 10 with 8-10 bit depth uint32_t base_layer_switch_mode; //!< decide use P or B frame in base layer, 0 is B frame, 1 is P frame uint32_t intra_refresh_type; //!< the type of intra frame refresh, 1 is CRA, 2 is IDR intra refresh type uint32_t tier; //!< limitation for max bitrate and max buffer size @@ -182,6 +186,9 @@ typedef struct ENCODERPARAM{ uint8_t MCTS_enable; //!< motion vector constrains flag uint8_t tile_columnCnt; //!< tile column count when tile is enabled uint8_t tile_rowCnt; //!< tile row count when tile is enabled + int8_t target_socket; //!< Target socket to run on + bool in_parallel; //!< multiple tiles encoding in parallel + bool local_mode; //!< flag of local mode for encoder }EncoderParam; typedef struct INPUTFRAME{ @@ -250,6 +257,8 @@ typedef struct ENCODEROPTION{ typedef struct CODECAPPOPTION{ DecoderOption decOption; EncoderOption encOption; + void *logFunction; //!< External log callback function pointer, NULL if external log is not used + uint32_t minLogLevel; //!< Minimal log level of output }CodecAppOption; //! @@ -258,10 +267,11 @@ typedef struct CODECAPPOPTION{ //! typedef struct DISTRIBUTEDENCODERPARAM{ StreamInfo streamInfo; //!< Information of input stream - EncoderParam encoderParams; //!< parameters for encoding - DispatchType type; //!< task dispatch type + EncoderParam encoderParams; //!< Parameters for encoding + DispatchType type; //!< Task dispatch type SupplementalEnhancementInfo suppleEnhanceInfo; //!< Supplemental Enhancement Information - CodecAppOption codecOption; //!< decoder/encoder choice and the settings + CodecAppOption codecOption; //!< Choice and the settings of decoder/encoder + bool glogInitialized; //!< Whether glog has been initialized }DistributedEncoderParam; #ifdef __cplusplus @@ -324,7 +334,8 @@ bool DistributedEncoder_NeedMemCpyForInput(DEHandle handle); //! \return DEStatus //! DE_STATUS_SUCCESS if success, else fail reason //! -DEStatus DistributedEncoder_GetPacket(DEHandle handle, char** pktData, uint64_t* pktSize, int64_t* pktPTS, int64_t* pktDTS, bool* eos); +DEStatus DistributedEncoder_GetPacket(DEHandle handle, char** pktData, uint64_t* pktSize, + int64_t* pktPTS, int64_t* pktDTS, bool* eos); //! //! \brief Set parameter to distributed encoder @@ -356,6 +367,19 @@ DEStatus DistributedEncoder_SetParam(DEHandle handle, uint32_t type, uint64_t pa //! DEStatus DistributedEncoder_GetParam(DEHandle handle, ParamType type, void** param); +//! +//! \brief Set the logcallback funciton +//! +//! \param [in] deHandle +//! Distributed encoder library handle +//! \param [in] externalLog +//! The customized logging callback function pointer +//! +//! \return DEStatus +//! DE_STATUS_SUCCESS if success, else fail reason +//! +DEStatus DistributedEncoder_SetLogCallBack(void* deHandle, void* externalLog); + //! //! \brief Close all the connections and clean all resources //! diff --git a/src/ffmpeg/dependency/WorkerServer b/src/ffmpeg/dependency/WorkerServer index 86375d03..97a5764c 100755 Binary files a/src/ffmpeg/dependency/WorkerServer and b/src/ffmpeg/dependency/WorkerServer differ diff --git a/src/ffmpeg/dependency/libDistributedEncoder.so b/src/ffmpeg/dependency/libDistributedEncoder.so index c90a6c6e..56609fe4 100755 Binary files a/src/ffmpeg/dependency/libDistributedEncoder.so and b/src/ffmpeg/dependency/libDistributedEncoder.so differ diff --git a/src/ffmpeg/dependency/libEncoder.so b/src/ffmpeg/dependency/libEncoder.so index ef23a91c..f73c074c 100755 Binary files a/src/ffmpeg/dependency/libEncoder.so and b/src/ffmpeg/dependency/libEncoder.so differ diff --git a/src/ffmpeg/dependency/libstdc++.so.6.0.22 b/src/ffmpeg/dependency/libstdc++.so.6.0.22 new file mode 100755 index 00000000..c4062acd Binary files /dev/null and b/src/ffmpeg/dependency/libstdc++.so.6.0.22 differ diff --git a/src/ffmpeg/patches/FFmpeg_OMAF.patch b/src/ffmpeg/patches/FFmpeg_OMAF.patch index 17c24ee9..fd4ae5e9 100644 --- a/src/ffmpeg/patches/FFmpeg_OMAF.patch +++ b/src/ffmpeg/patches/FFmpeg_OMAF.patch @@ -1,70 +1,70 @@ -diff --git a/FFmpeg/Changelog b/FFmpeg/Changelog -index 0343ce9..f1c70df 100644 ---- a/FFmpeg/Changelog -+++ b/FFmpeg/Changelog -@@ -33,6 +33,8 @@ version : - - ilbc decoder - - audio denoiser as afftdn filter - - AV1 parser -+- add SVT(scalable Video Technology) HEVC encoder -+- add tiled-base distribute encoder based on SVT & x265 - - SER demuxer - - sinc audio filter source - - chromahold filter -diff --git a/FFmpeg/configure b/FFmpeg/configure -index 85d5dd5..c168651 100755 ---- a/FFmpeg/configure -+++ b/FFmpeg/configure -@@ -262,6 +262,15 @@ External library support: - --enable-libspeex enable Speex de/encoding via libspeex [no] - --enable-libsrt enable Haivision SRT protocol via libsrt [no] - --enable-libssh enable SFTP protocol via libssh [no] -+ --enable-libsvthevc enable HEVC encodig via SVT [no] +diff -urN FFmpeg/configure FFmpeg-patched/configure +--- FFmpeg/configure 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/configure 2020-09-27 13:35:13.977526503 +0800 +@@ -252,6 +252,7 @@ + --enable-libopencv enable video filtering via libopencv [no] + --enable-libopenh264 enable H.264 encoding via OpenH264 [no] + --enable-libopenjpeg enable JPEG 2000 de/encoding via OpenJPEG [no] ++ --enable-libopenhevc enable HEVC decoding via OpenHEVC [no] + --enable-libopenmpt enable decoding tracked files via libopenmpt [no] + --enable-libopus enable Opus de/encoding via libopus [no] + --enable-libpulse enable Pulseaudio input via libpulse [no] +@@ -293,6 +294,16 @@ + --enable-libxcb-shape enable X11 grabbing shape rendering [autodetect] + --enable-libxvid enable Xvid encoding via xvidcore, + native MPEG-4/Xvid encoder exists [no] ++ --enable-libsvthevc enable HEVC encodig via SVT [no] + --enable-libDistributedEncoder enable Distributed Encoder [no] -+ --enable-hevctile enable HEVC tile encodig via SVT or x265 [no] -+ --enable-libstitch enable HEVC tile stitch library [no] -+ --enable-libgpac enable DASH mux with libgpac [no] -+ --enable-libVROmafPacking enable OMAF Compliance packing muxer [no] -+ --enable-libVRDashStreaming enable VR DASH streaming and demuxer library [no] -+ --enable-libOmafDashAccess enable OMAF Compliance DASH streaming and demuxer library [no] -+ --enable-libtransform360 enable Transform360 to transform projection from ERP to cubemap [no] - --enable-libtensorflow enable TensorFlow as a DNN module backend - for DNN based filters like sr [no] - --enable-libtesseract enable Tesseract, needed for ocr filter [no] -@@ -1664,6 +1673,7 @@ EXTERNAL_LIBRARY_GPL_LIST=" - libcdio ++ --enable-hevctile enable HEVC tile encodig via SVT or x265 [no] ++ --enable-libstitch enable HEVC tile stitch library [no] ++ --enable-libgpac enable DASH mux with libgpac [no] ++ --enable-libVROmafPacking enable OMAF Compliance packing muxer [no] ++ --enable-libVRDashStreaming enable VR DASH streaming and demuxer library [no] ++ --enable-libOmafDashAccess enable OMAF Compliance DASH streaming and demuxer library [no] ++ --enable-libtransform360 enable Transform360 to transform projection from ERP to cubemap [no] ++ --enable-libxcam enable image processing via xcam [no] + --enable-libxml2 enable XML parsing using the C library libxml2, needed + for dash demuxing support [no] + --enable-libzimg enable z.lib, needed for zscale filter [no] +@@ -1721,6 +1732,7 @@ libdavs2 librubberband -+ libsvthevc libvidstab ++ libsvthevc libx264 libx265 -@@ -1725,6 +1735,7 @@ EXTERNAL_LIBRARY_LIST=" + libxavs +@@ -1783,6 +1795,13 @@ libjack libklvanc libkvazaar + libgpac - libmodplug - libmp3lame - libmysofa -@@ -1746,10 +1757,16 @@ EXTERNAL_LIBRARY_LIST=" - libtensorflow - libtesseract - libtheora + libstitch + libDistributedEncoder + libtransform360 - libtwolame - libv4l2 - libvorbis - libvpx + libVRDashStreaming + libOmafDashAccess + libVROmafPacking + libmodplug + libmp3lame + libmysofa +@@ -1791,6 +1810,7 @@ + libopenjpeg + libopenmpt + libopus ++ libopenhevc + libpulse + librabbitmq + librav1e +@@ -1812,6 +1832,7 @@ + libvpx libwavpack libwebp ++ libxcam libxml2 -@@ -3080,6 +3097,9 @@ pcm_mulaw_at_encoder_select="audio_frame_queue" + libzimg + libzmq +@@ -3194,8 +3215,13 @@ chromaprint_muxer_deps="chromaprint" h264_videotoolbox_encoder_deps="pthreads" h264_videotoolbox_encoder_select="videotoolbox_encoder" @@ -73,35 +73,44 @@ index 85d5dd5..c168651 100755 +hevc_tile_encoder_deps="libsvthevc libstitch" hevc_videotoolbox_encoder_deps="pthreads" hevc_videotoolbox_encoder_select="videotoolbox_encoder" ++libopenhevc_decoder_deps="libopenhevc" ++libsvt_hevc_encoder_deps="libsvthevc" libaom_av1_decoder_deps="libaom" -@@ -3217,6 +3237,9 @@ spdif_muxer_select="adts_header" + libaom_av1_encoder_deps="libaom" + libaom_av1_encoder_select="extract_extradata_bsf" +@@ -3343,9 +3369,14 @@ spx_muxer_select="ogg_muxer" swf_demuxer_suggest="zlib" tak_demuxer_select="tak_parser" +tile_dash_muxer_select="libgpac" +tile_dash_demuxer_select="libOmafDashAccess" +omaf_packing_muxer_select="libVROmafPacking" + truehd_demuxer_select="mlp_parser" tg2_muxer_select="mov_muxer" tgp_muxer_select="mov_muxer" ++tile_dash_demuxer_select="libOmafDashAccess" ++omaf_packing_muxer_select="libVROmafPacking" vobsub_demuxer_select="mpegps_demuxer" -@@ -3466,6 +3489,8 @@ pixfmts_super2xsai_test_deps="super2xsai_filter" + w64_demuxer_select="wav_demuxer" + w64_muxer_select="wav_muxer" +@@ -3559,6 +3590,7 @@ + overlay_qsv_filter_deps="libmfx" + overlay_qsv_filter_select="qsvvpp" + overlay_vulkan_filter_deps="vulkan libglslang" ++xcam_filter_deps="libxcam" + owdenoise_filter_deps="gpl" + pad_opencl_filter_deps="opencl" + pan_filter_deps="swresample" +@@ -3613,6 +3645,8 @@ tinterlace_filter_deps="gpl" tinterlace_merge_test_deps="tinterlace_filter" tinterlace_pad_test_deps="tinterlace_filter" +transform360_filter_deps="libtransform360" +transform360_filter_select="libtransform360" tonemap_filter_deps="const_nan" + tonemap_vaapi_filter_deps="vaapi VAProcFilterParameterBufferHDRToneMapping" tonemap_opencl_filter_deps="opencl const_nan" - unsharp_opencl_filter_deps="opencl" -@@ -3481,6 +3506,7 @@ scale_vaapi_filter_deps="vaapi" - vpp_qsv_filter_deps="libmfx" - vpp_qsv_filter_select="qsvvpp" - -+ - # examples - avio_dir_cmd_deps="avformat avutil" - avio_reading_deps="avformat avcodec avutil" -@@ -6080,6 +6106,10 @@ enabled libgme && { check_pkg_config libgme libgme gme/gme.h gme_new_ +@@ -6314,6 +6348,13 @@ enabled libgsm && { for gsm_hdr in "gsm.h" "gsm/gsm.h"; do check_lib libgsm "${gsm_hdr}" gsm_create -lgsm && break; done || die "ERROR: libgsm not found"; } @@ -109,10 +118,21 @@ index 85d5dd5..c168651 100755 +enabled libVROmafPacking && require_pkg_config libVROmafPacking VROmafPacking VROmafPackingAPI.h VROmafPackingInit +enabled libVRDashStreaming && require_pkg_config libVRDashStreaming VRDashStreaming VRDashStreamingAPI.h DashStreaming_Init +enabled libOmafDashAccess && require_pkg_config libOmafDashAccess OmafDashAccess OmafDashAccessApi.h OmafAccess_Init ++enabled libxcam && { check_pkg_config libxcam "libxcam >= 1.4.0" "capi/xcam_handle.h" xcam_create_handle || ++ die "ERROR: libxcam must be installed and version must be >= 1.4.0"; } ++ enabled libilbc && require libilbc ilbc.h WebRtcIlbcfix_InitDecode -lilbc $pthreads_extralibs enabled libklvanc && require libklvanc libklvanc/vanc.h klvanc_context_create -lklvanc enabled libkvazaar && require_pkg_config libkvazaar "kvazaar >= 0.8.1" kvazaar.h kvz_api_get -@@ -6129,6 +6159,10 @@ enabled libsoxr && require libsoxr soxr.h soxr_create -lsoxr +@@ -6344,6 +6385,7 @@ + require libopencv opencv2/core/core_c.h cvCreateImageHeader -lopencv_core -lopencv_imgproc; } || + require_pkg_config libopencv opencv opencv/cxcore.h cvCreateImageHeader; } + enabled libopenh264 && require_pkg_config libopenh264 openh264 wels/codec_api.h WelsGetCodecVersion ++enabled libopenhevc && require libopenhevc libopenhevc/openhevc.h oh_decode -lopenhevc -lm + enabled libopenjpeg && { check_pkg_config libopenjpeg "libopenjp2 >= 2.1.0" openjpeg.h opj_version || + { require_pkg_config libopenjpeg "libopenjp2 >= 2.1.0" openjpeg.h opj_version -DOPJ_STATIC && add_cppflags -DOPJ_STATIC; } } + enabled libopenmpt && require_pkg_config libopenmpt "libopenmpt >= 0.2.6557" libopenmpt/libopenmpt.h openmpt_module_create -lstdc++ && append libopenmpt_extralibs "-lstdc++" +@@ -6369,6 +6411,10 @@ enabled libssh && require_pkg_config libssh libssh libssh/sftp.h sftp_init enabled libspeex && require_pkg_config libspeex speex speex/speex.h speex_decoder_init enabled libsrt && require_pkg_config libsrt "srt >= 1.3.0" srt/srt.h srt_socket @@ -123,20 +143,101 @@ index 85d5dd5..c168651 100755 enabled libtensorflow && require libtensorflow tensorflow/c/c_api.h TF_Version -ltensorflow enabled libtesseract && require_pkg_config libtesseract tesseract tesseract/capi.h TessBaseAPICreate enabled libtheora && require libtheora theora/theoraenc.h th_info_init -ltheoraenc -ltheoradec -logg -diff --git a/FFmpeg/fftools/ffmpeg.c b/FFmpeg/fftools/ffmpeg.c -index da4259a..ec42201 100644 ---- a/FFmpeg/fftools/ffmpeg.c -+++ b/FFmpeg/fftools/ffmpeg.c -@@ -1327,6 +1327,8 @@ static void do_video_out(OutputFile *of, +diff -urN FFmpeg/doc/filters.texi FFmpeg-patched/doc/filters.texi +--- FFmpeg/doc/filters.texi 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/doc/filters.texi 2020-09-27 13:35:13.073526575 +0800 +@@ -25732,3 +25732,79 @@ + @end table + + @c man end MULTIMEDIA SOURCES ++ ++@section xcam ++Image processing supported through libXCam. ++ ++To enable compilation of @var{xcam} filter you need to configure FFmpeg with ++@code{--enable-libxcam}. ++ ++libXCam supports automotive surround view stitching, 360 video stitching, ++digital video stabilization, noise reduction and so on. For more information ++about libxcam see @url{https://github.com/intel/libxcam}. ++ ++@subsection Options ++ ++@table @option ++ ++@item inputs ++The number of inputs. Default is @code{1}. 3dnr, waveletnr, fisheye, defog ++and dvs handlers support one input, stitch and stitchcl handlers support ++dynamic inputs. ++ ++@item w ++Output video width. Default is @code{0}. ++If the value is 0, the corresponding input width is used for the output. ++ ++@item h ++Output video height. Default is @code{0}. ++If the value is 0, the corresponding input height is used for the output. ++ ++@item fmt ++Pixel format. Default is @code{auto}. ++ ++@table @samp ++@item auto ++Negotiate pixel format automatically, selects the input pixel format as the ++processing format. ++@item nv12 ++NV12 format. All handlers support NV12 format. ++@item yuv420 ++YUV420 format. Currently, only @b{soft} stitching supports YUV420 format. ++@end table ++ ++@item name ++Handler name. Default is @code{stitch}. ++ ++@table @samp ++@item 3dnr ++3D denoising ++@item waveletnr ++Wavelet denoising ++@item fisheye ++Fisheye calibration ++@item defog ++Fog removal ++@item dvs ++Digital video stabilizer ++@item stitch ++Soft/GLES/Vulkan stitching, supports automotive surround view stitching and ++360 video stitching. ++@item stitchcl ++OpenCL stitching, supports automotive surround view stitching and 360 video ++stitching. ++@end table ++ ++@item allocoutbuf ++Whether or not to allocate output buffer. Default is @code{1}. ++ ++@item params ++Private parameters for each handler. Currently, only @b{stitch} and ++@b{stitchcl} handlers have private parameters. ++ ++@end table ++ ++@subsection Examples ++ ++For more detailed examples see @url{https://github.com/intel/libxcam/wiki/Tests#1-ffmpeg-xcam}. ++ +diff -urN FFmpeg/fftools/ffmpeg.c FFmpeg-patched/fftools/ffmpeg.c +--- FFmpeg/fftools/ffmpeg.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/fftools/ffmpeg.c 2020-09-27 13:35:13.078526574 +0800 +@@ -1314,6 +1314,7 @@ if (ost->logfile && enc->stats_out) { fprintf(ost->logfile, "%s", enc->stats_out); } -+ + break; } - } - ost->sync_opts++; -@@ -1524,6 +1526,7 @@ static int reap_filters(int flush) + ost->sync_opts++; + /* +@@ -1508,6 +1509,7 @@ } av_frame_unref(filtered_frame); @@ -144,43 +245,10 @@ index da4259a..ec42201 100644 } } -@@ -4091,10 +4094,10 @@ static int init_input_thread(int i) - { - int ret; - InputFile *f = input_files[i]; -- -+//#if 0 - if (nb_input_files == 1) - return 0; -- -+//#endif - if (f->ctx->pb ? !f->ctx->pb->seekable : - strcmp(f->ctx->iformat->name, "lavfi")) - f->non_blocking = 1; -@@ -4146,7 +4149,9 @@ static int get_input_packet(InputFile *f, AVPacket *pkt) - } - - #if HAVE_THREADS -+//#if 0 - if (nb_input_files > 1) -+//#endif - return get_input_packet_mt(f, pkt); - #endif - return av_read_frame(f->ctx, pkt); -diff --git a/FFmpeg/fftools/ffplay.c b/FFmpeg/fftools/ffplay.c -index ab1f9fa..c59fab5 100644 ---- a/FFmpeg/fftools/ffplay.c -+++ b/FFmpeg/fftools/ffplay.c -@@ -2922,6 +2922,8 @@ static int read_thread(void *arg) - if (infinite_buffer < 0 && is->realtime) - infinite_buffer = 1; - -+ //TiledDASHDecContext *c = ic->priv_data; -+ //c->mClearBuf = true; - for (;;) { - if (is->abort_request) - break; -@@ -3346,15 +3348,22 @@ static void event_loop(VideoState *cur_stream) +diff -urN FFmpeg/fftools/ffplay.c FFmpeg-patched/fftools/ffplay.c +--- FFmpeg/fftools/ffplay.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/fftools/ffplay.c 2020-09-27 13:35:13.078526574 +0800 +@@ -3355,15 +3355,22 @@ seek_chapter(cur_stream, -1); break; case SDLK_LEFT: @@ -209,7 +277,7 @@ index ab1f9fa..c59fab5 100644 incr = -60.0; do_seek: if (seek_by_bytes) { -@@ -3381,6 +3390,7 @@ static void event_loop(VideoState *cur_stream) +@@ -3390,6 +3397,7 @@ stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0); } break; @@ -217,71 +285,33 @@ index ab1f9fa..c59fab5 100644 default: break; } -diff --git a/FFmpeg/libavcodec/Makefile b/FFmpeg/libavcodec/Makefile -index a97055e..430419d 100644 ---- a/FFmpeg/libavcodec/Makefile -+++ b/FFmpeg/libavcodec/Makefile -@@ -374,6 +374,10 @@ OBJS-$(CONFIG_HEVC_QSV_DECODER) += qsvdec_h2645.o - OBJS-$(CONFIG_HEVC_QSV_ENCODER) += qsvenc_hevc.o hevc_ps_enc.o \ - hevc_data.o - OBJS-$(CONFIG_HEVC_RKMPP_DECODER) += rkmppdec.o -+OBJS-$(CONFIG_HEVC_SVT_ENCODER) += libsvt_hevc.o -+OBJS-$(CONFIG_DISTRIBUTED_ENCODER) += distributed_encoder.o -+OBJS-$(CONFIG_HEVC_TILE_ENCODER) += tile_encoder.o \ -+ tile_encode_svt_impl.o - OBJS-$(CONFIG_HEVC_VAAPI_ENCODER) += vaapi_encode_h265.o h265_profile_level.o - OBJS-$(CONFIG_HEVC_V4L2M2M_DECODER) += v4l2_m2m_dec.o - OBJS-$(CONFIG_HEVC_V4L2M2M_ENCODER) += v4l2_m2m_enc.o -@@ -540,6 +544,8 @@ OBJS-$(CONFIG_RA_144_ENCODER) += ra144enc.o ra144.o celp_filters.o - OBJS-$(CONFIG_RA_288_DECODER) += ra288.o celp_filters.o - OBJS-$(CONFIG_RALF_DECODER) += ralf.o - OBJS-$(CONFIG_RASC_DECODER) += rasc.o -+OBJS-$(CONFIG_HEVC_BYPASSVIDEO_DECODER) += bypass_hevc_decoder.o -+OBJS-$(CONFIG_H264_BYPASSVIDEO_DECODER) += bypass_h264_decoder.o - OBJS-$(CONFIG_RAWVIDEO_DECODER) += rawdec.o - OBJS-$(CONFIG_RAWVIDEO_ENCODER) += rawenc.o - OBJS-$(CONFIG_REALTEXT_DECODER) += realtextdec.o ass.o -diff --git a/FFmpeg/libavcodec/allcodecs.c b/FFmpeg/libavcodec/allcodecs.c -index c0b4d56..e587e11 100644 ---- a/FFmpeg/libavcodec/allcodecs.c -+++ b/FFmpeg/libavcodec/allcodecs.c -@@ -143,6 +143,8 @@ extern AVCodec ff_h264_qsv_decoder; +diff -urN FFmpeg/libavcodec/allcodecs.c FFmpeg-patched/libavcodec/allcodecs.c +--- FFmpeg/libavcodec/allcodecs.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/allcodecs.c 2020-09-27 13:35:13.111526572 +0800 +@@ -146,7 +146,10 @@ extern AVCodec ff_h264_rkmpp_decoder; extern AVCodec ff_hap_encoder; extern AVCodec ff_hap_decoder; +extern AVCodec ff_hevc_bypassvideo_decoder; +extern AVCodec ff_h264_bypassvideo_decoder; extern AVCodec ff_hevc_decoder; ++extern AVCodec ff_libopenhevc_decoder; extern AVCodec ff_hevc_qsv_decoder; extern AVCodec ff_hevc_rkmpp_decoder; -@@ -745,6 +747,9 @@ extern AVCodec ff_hevc_cuvid_decoder; - extern AVCodec ff_hevc_mediacodec_decoder; + extern AVCodec ff_hevc_v4l2m2m_decoder; +@@ -781,6 +784,9 @@ + extern AVCodec ff_hevc_mf_encoder; extern AVCodec ff_hevc_nvenc_encoder; extern AVCodec ff_hevc_qsv_encoder; -+extern AVCodec ff_hevc_svt_encoder; ++extern AVCodec ff_libsvt_hevc_encoder; +extern AVCodec ff_distributed_encoder; +extern AVCodec ff_hevc_tile_encoder; extern AVCodec ff_hevc_v4l2m2m_encoder; extern AVCodec ff_hevc_vaapi_encoder; extern AVCodec ff_hevc_videotoolbox_encoder; -diff --git a/FFmpeg/libavcodec/avcodec.h b/FFmpeg/libavcodec/avcodec.h -index 705a3ce..cb94a67 100644 ---- a/FFmpeg/libavcodec/avcodec.h -+++ b/FFmpeg/libavcodec/avcodec.h -@@ -229,6 +229,8 @@ enum AVCodecID { - AV_CODEC_ID_JPEGLS, - AV_CODEC_ID_MPEG4, - AV_CODEC_ID_RAWVIDEO, -+ AV_CODEC_ID_HEVCBYPASSVIDEO, -+ AV_CODEC_ID_H264BYPASSVIDEO, - AV_CODEC_ID_MSMPEG4V1, - AV_CODEC_ID_MSMPEG4V2, - AV_CODEC_ID_MSMPEG4V3, -diff --git a/FFmpeg/libavcodec/bypass_decoder.h b/FFmpeg/libavcodec/bypass_decoder.h -new file mode 100644 -index 0000000..49b668a ---- /dev/null -+++ b/FFmpeg/libavcodec/bypass_decoder.h +diff -urN FFmpeg/libavcodec/bypass_decoder.h FFmpeg-patched/libavcodec/bypass_decoder.h +--- FFmpeg/libavcodec/bypass_decoder.h 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/bypass_decoder.h 2020-09-27 13:35:13.176526567 +0800 @@ -0,0 +1,63 @@ +/* + * Raw Video Decoder @@ -346,11 +376,9 @@ index 0000000..49b668a +}; + + -diff --git a/FFmpeg/libavcodec/bypass_h264_decoder.c b/FFmpeg/libavcodec/bypass_h264_decoder.c -new file mode 100644 -index 0000000..2092ace ---- /dev/null -+++ b/FFmpeg/libavcodec/bypass_h264_decoder.c +diff -urN FFmpeg/libavcodec/bypass_h264_decoder.c FFmpeg-patched/libavcodec/bypass_h264_decoder.c +--- FFmpeg/libavcodec/bypass_h264_decoder.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/bypass_h264_decoder.c 2020-09-27 13:35:13.176526567 +0800 @@ -0,0 +1,150 @@ +/* + * Raw Video Decoder @@ -502,11 +530,9 @@ index 0000000..2092ace + .priv_class = &bypassdec_class, + .capabilities = AV_CODEC_CAP_PARAM_CHANGE, +}; -diff --git a/FFmpeg/libavcodec/bypass_hevc_decoder.c b/FFmpeg/libavcodec/bypass_hevc_decoder.c -new file mode 100644 -index 0000000..8561d8d ---- /dev/null -+++ b/FFmpeg/libavcodec/bypass_hevc_decoder.c +diff -urN FFmpeg/libavcodec/bypass_hevc_decoder.c FFmpeg-patched/libavcodec/bypass_hevc_decoder.c +--- FFmpeg/libavcodec/bypass_hevc_decoder.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/bypass_hevc_decoder.c 2020-09-27 13:35:13.176526567 +0800 @@ -0,0 +1,150 @@ +/* + * Raw Video Decoder @@ -658,11 +684,10 @@ index 0000000..8561d8d + .priv_class = &bypassdec_class, + .capabilities = AV_CODEC_CAP_PARAM_CHANGE, +}; -diff --git a/FFmpeg/libavcodec/codec_desc.c b/FFmpeg/libavcodec/codec_desc.c -index 67a3054..a810d39 100644 ---- a/FFmpeg/libavcodec/codec_desc.c -+++ b/FFmpeg/libavcodec/codec_desc.c -@@ -127,6 +127,20 @@ static const AVCodecDescriptor codec_descriptors[] = { +diff -urN FFmpeg/libavcodec/codec_desc.c FFmpeg-patched/libavcodec/codec_desc.c +--- FFmpeg/libavcodec/codec_desc.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/codec_desc.c 2020-09-27 13:35:13.183526566 +0800 +@@ -130,6 +130,20 @@ .props = AV_CODEC_PROP_INTRA_ONLY | AV_CODEC_PROP_LOSSLESS, }, { @@ -683,12 +708,22 @@ index 67a3054..a810d39 100644 .id = AV_CODEC_ID_MSMPEG4V1, .type = AVMEDIA_TYPE_VIDEO, .name = "msmpeg4v1", -diff --git a/FFmpeg/libavcodec/distributed_encoder.c b/FFmpeg/libavcodec/distributed_encoder.c -new file mode 100644 -index 0000000..332bf7b ---- /dev/null -+++ b/FFmpeg/libavcodec/distributed_encoder.c -@@ -0,0 +1,539 @@ +diff -urN FFmpeg/libavcodec/codec_id.h FFmpeg-patched/libavcodec/codec_id.h +--- FFmpeg/libavcodec/codec_id.h 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/codec_id.h 2020-09-27 13:35:13.390526550 +0800 +@@ -60,6 +60,8 @@ + AV_CODEC_ID_JPEGLS, + AV_CODEC_ID_MPEG4, + AV_CODEC_ID_RAWVIDEO, ++ AV_CODEC_ID_HEVCBYPASSVIDEO, ++ AV_CODEC_ID_H264BYPASSVIDEO, + AV_CODEC_ID_MSMPEG4V1, + AV_CODEC_ID_MSMPEG4V2, + AV_CODEC_ID_MSMPEG4V3, +diff -urN FFmpeg/libavcodec/distributed_encoder.c FFmpeg-patched/libavcodec/distributed_encoder.c +--- FFmpeg/libavcodec/distributed_encoder.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/distributed_encoder.c 2020-09-27 13:35:13.193526565 +0800 +@@ -0,0 +1,699 @@ +/* +* Scalable Video Technology for distributed encoder library plugin +* @@ -718,6 +753,7 @@ index 0000000..332bf7b + +#include "DistributedEncoderAPI.h" +#include "error_code.h" ++#include "common_data.h" + +#include "libavutil/common.h" +#include "libavutil/frame.h" @@ -726,57 +762,106 @@ index 0000000..332bf7b +#include "internal.h" +#include "avcodec.h" + -+typedef struct DEContext { -+ const AVClass *class; -+ -+ DistributedEncoderParam encode_params; -+ DEHandle handle; -+ const char* configFile; -+ InputStreamType input_type; -+ int inputCodec; -+ bool send_end; -+ bool eos_flag; -+ -+ // User options. -+ int vui_info; -+ int hierarchical_level; -+ int la_depth; -+ int enc_mode; -+ int rc_mode; -+ int scd; -+ int tune; -+ int qp; -+ int hdr; -+ int asm_type; ++static bool glog_initialized = false; ++static int min_loglevel = 2; + -+ int forced_idr; ++static void de_log_callback(LogLevel log_level, const char* file_name, uint64_t line_num, const char* fmt, ...) ++{ ++ va_list vl; ++ va_start(vl, fmt); + -+ int aud; ++ switch (log_level) ++ { ++ case LOG_INFO: ++ { ++ if(min_loglevel == 0) ++ { ++ av_vlog(NULL, AV_LOG_INFO, fmt, vl); ++ } ++ break; ++ } ++ case LOG_WARNING: ++ { ++ if(min_loglevel <= 1) ++ { ++ av_vlog(NULL, AV_LOG_WARNING, fmt, vl); ++ } ++ break; ++ } ++ case LOG_ERROR: ++ { ++ if(min_loglevel <= 2) ++ { ++ av_vlog(NULL, AV_LOG_ERROR, fmt, vl); ++ } ++ break; ++ } ++ case LOG_FATAL: ++ { ++ if(min_loglevel <= 3) ++ { ++ av_vlog(NULL, AV_LOG_FATAL, fmt, vl); ++ } ++ break; ++ } ++ default: ++ { ++ av_log(NULL, AV_LOG_ERROR, "Invalid log level !"); ++ break; ++ } ++ } ++ va_end(vl); ++} + -+ int profile; -+ int tier; -+ int level; ++typedef struct DEContext { ++ const AVClass *class; + -+ int base_layer_switch_mode; ++ DistributedEncoderParam encode_params; ++ DEHandle handle; ++ const char* configFile; ++ InputStreamType input_type; ++ int inputCodec; ++ bool send_end; ++ bool eos_flag; + -+ int tile_row; -+ int tile_column; -+ uint64_t frame_number; ++ // User options. ++ int vui_info; ++ int hierarchical_level; ++ int la_depth; ++ int enc_mode; ++ int rc_mode; ++ int scd; ++ int tune; ++ int qp; ++ int hdr; ++ int asm_type; ++ int forced_idr; ++ int aud; ++ int profile; ++ int tier; ++ int level; ++ int base_layer_switch_mode; ++ int tile_row; ++ int tile_column; ++ uint64_t frame_number; ++ bool in_parallel; ++ bool external_log_flag; ++ int min_log_level; ++ const char* proj_type; +} DEContext; + +static int set_enc_params(AVCodecContext *avctx, DistributedEncoderParam *DEparams) +{ -+ DEContext *deCxt = avctx->priv_data; ++ DEContext *deCxt = avctx->priv_data; + + memset(DEparams, 0, sizeof(DistributedEncoderParam)); -+ -+ EncoderParam params; ++ EncoderParam params; + memset(¶ms, 0, sizeof(EncoderParam)); + params.bit_depth = 8; + params.format = PixelColor_YUV420; + params.vui_info = deCxt->vui_info; + params.hierarchical_level = deCxt->hierarchical_level; -+ if (avctx->gop_size > 0) { ++ if(avctx->gop_size > 0) { + params.intra_period = avctx->gop_size - 1; + } + params.la_depth = deCxt->la_depth; @@ -793,11 +878,12 @@ index 0000000..332bf7b + params.level = deCxt->level; + params.aud = deCxt->aud; + params.asm_type = deCxt->asm_type; -+ if (avctx->framerate.num > 0 && avctx->framerate.den > 0) { ++ if(avctx->framerate.num > 0 && avctx->framerate.den > 0) { + params.framerate_num = avctx->framerate.num; + params.framerate_den = avctx->framerate.den * avctx->ticks_per_frame; + } -+ else{ ++ else ++ { + params.framerate_num = avctx->time_base.den; + params.framerate_den = avctx->time_base.num * avctx->ticks_per_frame; + } @@ -807,18 +893,51 @@ index 0000000..332bf7b + // tile related setting + params.tile_columnCnt = deCxt->tile_column; + params.tile_rowCnt = deCxt->tile_row; ++ params.target_socket = -1; ++ params.local_mode = false; + params.MCTS_enable = (params.tile_columnCnt * params.tile_rowCnt) > 1 ? 1 : 0; ++ ++ if(deCxt->in_parallel && params.MCTS_enable) ++ params.in_parallel = true; ++ else ++ params.in_parallel = false; ++ + memcpy(&(DEparams->encoderParams), ¶ms, sizeof(EncoderParam)); + + DEparams->type = ResourceBalanced; + -+ StreamInfo sInfo; ++ StreamInfo sInfo; + memset(&sInfo, 0, sizeof(StreamInfo)); + sInfo.frameWidth = avctx->width; + sInfo.frameHeight = avctx->height; + sInfo.tileUniformSpacing = true; + sInfo.tileColumn = deCxt->tile_column; + sInfo.tileRow = deCxt->tile_row; ++ if(sInfo.frameWidth % sInfo.tileColumn) ++ { ++ av_log(avctx, AV_LOG_ERROR, ++ "Frame Width can't be divided by tile column number \n"); ++ return -1; ++ } ++ if(((sInfo.frameWidth/sInfo.tileColumn)%64) && (sInfo.tileColumn != 1)) ++ { ++ av_log(avctx, AV_LOG_ERROR, ++ "Tile width can't be divided by 64 \n"); ++ return -1; ++ } ++ if(sInfo.frameHeight % sInfo.tileRow) ++ { ++ av_log(avctx, AV_LOG_ERROR, ++ "Frame Height can't be divided by tile row number \n"); ++ return -1; ++ } ++ if(((sInfo.frameHeight/sInfo.tileRow)%64) && (sInfo.tileRow != 1)) ++ { ++ av_log(avctx, AV_LOG_ERROR, ++ "Tile height can't be divided by 64 \n"); ++ return -1; ++ } ++ + sInfo.tileOverlapped = 0; + sInfo.overlapWidth = 0; + sInfo.overlapHeight = 0; @@ -828,7 +947,24 @@ index 0000000..332bf7b + ProjectionInfo projInfo; + memset(&projInfo, 0, sizeof(ProjectionInfo)); + projInfo.enable = true; -+ projInfo.type = E_EQUIRECT_PROJECTION; ++ if(0 == strncmp(deCxt->proj_type, "ERP", 3)) ++ { ++ projInfo.type = E_EQUIRECT_PROJECTION; ++ } ++ else if (0 == strncmp(deCxt->proj_type, "Cube", 4)) ++ { ++ projInfo.type = E_CUBEMAP_PROJECTION; ++ } ++ else if (0 == strncmp(deCxt->proj_type, "Planar", 6)) ++ { ++ projInfo.enable = false; ++ } ++ else ++ { ++ av_log(avctx, AV_LOG_ERROR, ++ "Invalid input source projection type %s \n", deCxt->proj_type); ++ return -1; ++ } + memcpy(&(DEparams->suppleEnhanceInfo.projInfo), &projInfo, sizeof(ProjectionInfo)); + + CodecAppOption codecOption; @@ -840,6 +976,8 @@ index 0000000..332bf7b + { + codecOption.decOption.decType = DecoderType_openHEVC; + ohOption *oh = (ohOption*)malloc(sizeof(ohOption)); ++ if(!oh) ++ return AVERROR(EINVAL); + oh->threadCount = 16; + oh->threadType = 4; + codecOption.decOption.decSetting = (void*)oh; @@ -848,6 +986,10 @@ index 0000000..332bf7b + { + codecOption.decOption.decType = DecoderType_ffmpeg; + ffmpegOption * fo = (ffmpegOption*)malloc(sizeof(ffmpegOption)); ++ if(!fo) ++ { ++ return AVERROR(EINVAL); ++ } + fo->codecID = CodecID_H264; + codecOption.decOption.decSetting = (void*)fo; + } @@ -855,7 +997,23 @@ index 0000000..332bf7b + return AVERROR(EINVAL); + } + -+ codecOption.encOption.encType = EncoderType_SVTHEVC; ++ if(params.in_parallel) ++ codecOption.encOption.encType = EncoderType_Multiple_SVTHEVC; ++ else ++ codecOption.encOption.encType = EncoderType_SVTHEVC; ++ ++ DEparams->glogInitialized = glog_initialized; ++ codecOption.minLogLevel = deCxt->min_log_level; ++ min_loglevel = deCxt->min_log_level; ++ if(deCxt->external_log_flag) ++ { ++ codecOption.logFunction = (void*)(de_log_callback); ++ } ++ else ++ { ++ codecOption.logFunction = NULL; ++ } ++ + memcpy(&(DEparams->codecOption), &codecOption, sizeof(CodecAppOption)); + + return 0; @@ -878,6 +1036,10 @@ index 0000000..332bf7b + { + return AVERROR(EINVAL); + } ++ else ++ { ++ glog_initialized = true; ++ } + + return 0; +} @@ -889,14 +1051,12 @@ index 0000000..332bf7b + InputFrame* inFrame = *inputFrame; + int data_num = isEncoded ? 1 : 3; + -+ bool needMemCopy = DistributedEncoder_NeedMemCpyForInput(deCxt->handle); -+ + for(int i = 0; i < data_num; i++) + { + int factor = i == 0 ? 1 : 2; + int copy_size = deCxt->input_type == encoded ? frame->linesize[i] : (frame->linesize[i] * enc_params.streamInfo.frameHeight / factor); + -+ if(isEncoded || needMemCopy) ++ if(isEncoded) + { + inFrame->data[i] = (char*)malloc(sizeof(char*) * copy_size); + memcpy(inFrame->data[i], frame->data[i], copy_size); @@ -920,6 +1080,8 @@ index 0000000..332bf7b + + if (!frame) { + InputFrame* lastFrame = (InputFrame*)malloc(sizeof(InputFrame)); ++ if(!lastFrame) ++ return AVERROR(EINVAL); + memset(lastFrame, 0, sizeof(InputFrame)); + lastFrame->data[0] = NULL; + lastFrame->stride[0] = 0; @@ -928,22 +1090,29 @@ index 0000000..332bf7b + lastFrame->format = enc_params.encoderParams.format; + lastFrame->picType = PictureType_NONE; + DistributedEncoder_Process(deCxt->handle, lastFrame); -+ ++ free(lastFrame); + deCxt->send_end = true; + av_log(avctx, AV_LOG_DEBUG, "Finish sending frames!!!\n"); + return 0; + } + + InputFrame* inFrame = (InputFrame*)malloc(sizeof(InputFrame)); ++ if(!inFrame) ++ return AVERROR(EINVAL); + memset(inFrame, 0 , sizeof(InputFrame)); + + if(deCxt->input_type != encoded && deCxt->input_type != raw) ++ { ++ free(inFrame); + return AVERROR(EINVAL); ++ } + + bool isEncoded = (deCxt->input_type == encoded); ++ bool useSharedMem = (deCxt->input_type == raw); + + prepare_input_frame(avctx, isEncoded, &inFrame, frame); + ++ inFrame->useSharedMem = useSharedMem; + inFrame->width = enc_params.streamInfo.frameWidth; + inFrame->height =enc_params.streamInfo.frameHeight ; + inFrame->format = enc_params.encoderParams.format; @@ -962,7 +1131,8 @@ index 0000000..332bf7b + break; + } + DistributedEncoder_Process(deCxt->handle, inFrame); -+ ++ if(inFrame) ++ free(inFrame); + return 0; +} + @@ -986,8 +1156,8 @@ index 0000000..332bf7b + if(!data && !size && !eos && !deCxt->send_end) + { + return AVERROR(EAGAIN); -+// *got_packet = 0; -+ // return 0; ++ // *got_packet = 0; ++ // return 0; + } + + if(!data && !size && deCxt->send_end) @@ -1006,6 +1176,10 @@ index 0000000..332bf7b + return ret; + } + ++ if(!data && !size) ++ { ++ return AVERROR(EAGAIN); ++ } + memcpy(pkt->data, data, size); + + pkt->size = size; @@ -1032,12 +1206,18 @@ index 0000000..332bf7b + if(ret == DE_STATUS_SUCCESS) + { + pkt->side_data = (AVPacketSideData*)malloc(sizeof(AVPacketSideData)); ++ if(!pkt->side_data) ++ { ++ free(header); ++ return AVERROR(EINVAL); ++ } + pkt->side_data->size = header->headerSize; + pkt->side_data->data = av_malloc(pkt->side_data->size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!(pkt->side_data->data)) + { + av_log(avctx, AV_LOG_ERROR, + "Cannot allocate HEVC header of size %d. \n", pkt->side_data->size); ++ free(header); + return AVERROR(ENOMEM); + } + memcpy(pkt->side_data->data, header->headerData, pkt->side_data->size); @@ -1113,11 +1293,17 @@ index 0000000..332bf7b +#define OFFSET(x) offsetof(DEContext, x) +#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM +static const AVOption options[] = { -+ { "config_file", "configure file path for workers information", OFFSET(configFile), AV_OPT_TYPE_STRING, { 0 }, 0, 0, VE }, ++ { "config_file", "configure file path for workers information", OFFSET(configFile), ++ AV_OPT_TYPE_STRING, { 0 }, 0, 0, VE }, + -+ { "input_type", "input stream type, 0 - encoded, 1 - raw, default is 0", OFFSET(input_type), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 0xff, VE}, ++ { "proj_type", "input source projection type, ERP or Cubemap", OFFSET(proj_type), ++ AV_OPT_TYPE_STRING, { .str = "ERP" }, 0, 0, VE }, + -+ { "input_codec", "input bitstream type, only work when input type is 0-encoded, 0 - HEVC, 1 - AVC, default is 0", OFFSET(inputCodec), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 0xff, VE}, ++ { "input_type", "input stream type, 0 - encoded, 1 - raw, default is 0", OFFSET(input_type), ++ AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 0xff, VE}, ++ ++ { "input_codec", "input bitstream type, only work when input type is 0-encoded, 0 - HEVC, 1 - AVC, default is 0", OFFSET(inputCodec), ++ AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 0xff, VE}, + + { "vui", "Enable vui info", OFFSET(vui_info), + AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, VE }, @@ -1180,12 +1366,21 @@ index 0000000..332bf7b + { "asm_type", "Assembly instruction set type [0: C Only, 1: Auto]", OFFSET(asm_type), + AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, VE }, + -+ { "tile_column", "tile column count number, default is 1", OFFSET(tile_column), ++ { "tile_column", "Tile column count number, default is 1", OFFSET(tile_column), + AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 256, VE }, + -+ { "tile_row", "tile row count number, default is 1", OFFSET(tile_row), ++ { "tile_row", "Tile row count number, default is 1", OFFSET(tile_row), + AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 256, VE }, + ++ { "in_parallel", "Multiple encoders running in parallel [0: Off, 1: On]", OFFSET(in_parallel), ++ AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE }, ++ ++ { "external_log_flag", "whether external log callback is needed", OFFSET(external_log_flag), ++ AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE }, ++ ++ { "min_log_level", "Minimal log level of output [0: INFO, 1: WARNING, 2: ERROR, 3: FATAL]", OFFSET(min_log_level), ++ AV_OPT_TYPE_INT, { .i64 = 2 }, 0, 3, VE }, ++ + {NULL}, +}; + @@ -1228,11 +1423,33 @@ index 0000000..332bf7b + .caps_internal = FF_CODEC_CAP_INIT_CLEANUP, + .wrapper_name = "distributed_encoder", +}; -diff --git a/FFmpeg/libavcodec/h264_parser.c b/FFmpeg/libavcodec/h264_parser.c -index 5f9a9c4..89d7366 100644 ---- a/FFmpeg/libavcodec/h264_parser.c -+++ b/NNmpeg/libavcodec/h264_parser.c -@@ -706,7 +706,7 @@ static av_cold int init(AVCodecParserContext *s) +diff -urN FFmpeg/libavcodec/encode.c FFmpeg-patched/libavcodec/encode.c +--- FFmpeg/libavcodec/encode.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/encode.c 2020-09-27 13:35:13.203526565 +0800 +@@ -430,12 +430,13 @@ + int ret; + if (avctx->internal->draining && !(avctx->codec->capabilities & AV_CODEC_CAP_DELAY)) + return AVERROR_EOF; +- ret = avctx->codec->receive_packet(avctx, avpkt); +- if (!ret) +- // Encoders must always return ref-counted buffers. +- // Side-data only packets have no data and can be not ref-counted. +- av_assert0(!avpkt->data || avpkt->buf); +- return ret; ++ // ret = avctx->codec->receive_packet(avctx, avpkt); ++ // if (!ret) ++ // // Encoders must always return ref-counted buffers. ++ // // Side-data only packets have no data and can be not ref-counted. ++ // av_assert0(!avpkt->data || avpkt->buf); ++ // return ret; ++ return ret = avctx->codec->receive_packet(avctx, avpkt); + } + + // Emulation via old API. +diff -urN FFmpeg/libavcodec/h264_parser.c FFmpeg-patched/libavcodec/h264_parser.c +--- FFmpeg/libavcodec/h264_parser.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/h264_parser.c 2020-09-27 13:35:13.224526563 +0800 +@@ -703,7 +703,7 @@ } AVCodecParser ff_h264_parser = { @@ -1241,11 +1458,10 @@ index 5f9a9c4..89d7366 100644 .priv_data_size = sizeof(H264ParseContext), .parser_init = init, .parser_parse = h264_parse, -diff --git a/FFmpeg/libavcodec/hevc_parser.c b/FFmpeg/libavcodec/hevc_parser.c -index 369d133..e720f9d 100644 ---- a/FFmpeg/libavcodec/hevc_parser.c -+++ b/FFmpeg/libavcodec/hevc_parser.c -@@ -370,7 +370,7 @@ static void hevc_parser_close(AVCodecParserContext *s) +diff -urN FFmpeg/libavcodec/hevc_parser.c FFmpeg-patched/libavcodec/hevc_parser.c +--- FFmpeg/libavcodec/hevc_parser.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/hevc_parser.c 2020-09-27 13:35:13.232526562 +0800 +@@ -381,7 +381,7 @@ } AVCodecParser ff_hevc_parser = { @@ -1254,11 +1470,191 @@ index 369d133..e720f9d 100644 .priv_data_size = sizeof(HEVCParserContext), .parser_parse = hevc_parse, .parser_close = hevc_parser_close, -diff --git a/FFmpeg/libavcodec/libsvt_hevc.c b/FFmpeg/libavcodec/libsvt_hevc.c -new file mode 100644 -index 0000000..9b7a310 ---- /dev/null -+++ b/FFmpeg/libavcodec/libsvt_hevc.c +diff -urN FFmpeg/libavcodec/libopenhevc.c FFmpeg-patched/libavcodec/libopenhevc.c +--- FFmpeg/libavcodec/libopenhevc.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/libopenhevc.c 2020-09-27 13:35:13.433526546 +0800 +@@ -0,0 +1,178 @@ ++/* ++ * OpenHEVC video Decoder ++ * ++ * This file is part of FFmpeg. ++ * ++ * FFmpeg is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * FFmpeg is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with FFmpeg; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ */ ++ ++#include ++#include "avcodec.h" ++#include "internal.h" ++#include "libavutil/imgutils.h" ++#include "libavutil/opt.h" ++#include "libavutil/common.h" ++#include "libavutil/internal.h" ++ ++//#define DEBUG ++typedef struct OpenHevcContext{ ++ const AVClass *class; ++ OHHandle handle; ++#ifdef DEBUG ++ FILE *fout; ++#endif ++ int thread_count; ++ int thread_type; ++ int temporal_layer_id; ++ int quality_layer_id; ++}OpenHevcContext; ++static av_cold int openhevc_close(AVCodecContext *ctx) ++{ ++ OpenHevcContext *c = ctx->priv_data; ++ if(c->handle){ ++ oh_close(c->handle); ++ c->handle = NULL; ++ } ++#ifdef DEBUG ++ if(c->fout){ ++ fclose(c->fout); ++ } ++#endif ++ return 0; ++} ++static av_cold int openhevc_init(AVCodecContext *ctx) ++{ ++ OpenHevcContext *c = ctx->priv_data; ++ c->handle = oh_init(c->thread_count, c->thread_type); ++ if(!c->handle){ ++ av_log(ctx,AV_LOG_ERROR,"oh_init failed\n"); ++ return AVERROR_EXTERNAL; ++ } ++ size_t extra_size_alloc; ++ extra_size_alloc = ctx->extradata_size > 0 ? (ctx->extradata_size +AV_INPUT_BUFFER_PADDING_SIZE) : 0; ++ if(extra_size_alloc){ ++ oh_extradata_cpy(c->handle, ctx->extradata, extra_size_alloc); ++ } ++ oh_disable_cropping(c->handle, !!(ctx->flags2 & AV_CODEC_FLAG2_IGNORE_CROP)); ++ oh_start(c->handle); ++ oh_select_temporal_layer(c->handle,c->temporal_layer_id); ++ oh_select_active_layer(c->handle,c->quality_layer_id); ++ oh_select_view_layer(c->handle,c->quality_layer_id); ++#ifdef DEBUG ++ c->fout = fopen("output.yuv","wb"); ++ if(!c->fout){ ++ printf("open file failed !\n"); ++ return -1; ++ } ++#endif ++ return 0; ++} ++ ++static int openhevc_decode(AVCodecContext *ctx, void *data, int *got_frame, AVPacket *avpkt) ++{ ++ OpenHevcContext *c = ctx->priv_data; ++ AVFrame *picture = data; ++ int ret; ++ OHFrame openHevcFrame; ++ ++ ret = oh_decode(c->handle, avpkt->data, avpkt->size, avpkt->pts); ++ if(ret<0){ ++ av_log(ctx, AV_LOG_ERROR, "failed to decode frame\n"); ++ return AVERROR_EXTERNAL; ++ } ++ if(ret){ ++ uint8_t *data_ptr_array[4] = {NULL}; ++ int stride_array[4] = {0}; ++ ++ oh_output_update(c->handle, 1, &openHevcFrame); ++ oh_frameinfo_update(c->handle, &openHevcFrame.frame_par); ++ ++ if(av_image_check_size(openHevcFrame.frame_par.width, openHevcFrame.frame_par.height, 0, ctx)) ++ return AVERROR_INVALIDDATA; ++ ctx->pix_fmt = AV_PIX_FMT_YUV420P; ++ ff_set_dimensions(ctx, openHevcFrame.frame_par.width, openHevcFrame.frame_par.height); ++ ++ if((ret=ff_get_buffer(ctx, picture, 0))<0) ++ return ret; ++ picture->sample_aspect_ratio.num = openHevcFrame.frame_par.sample_aspect_ratio.num; ++ picture->sample_aspect_ratio.den = openHevcFrame.frame_par.sample_aspect_ratio.den; ++ ++ data_ptr_array[0] = (uint8_t *)openHevcFrame.data_y_p; ++ data_ptr_array[1] = (uint8_t *)openHevcFrame.data_cb_p; ++ data_ptr_array[2] = (uint8_t *)openHevcFrame.data_cr_p; ++ ++ stride_array[0] = openHevcFrame.frame_par.linesize_y; ++ stride_array[1] = openHevcFrame.frame_par.linesize_cb; ++ stride_array[2] = openHevcFrame.frame_par.linesize_cr; ++#ifdef DEBUG ++ if (c->fout) { ++ int format = openHevcFrame.frame_par.chromat_format == OH_YUV420 ? 1 : 0; ++ fwrite( (uint8_t *)openHevcFrame.data_y_p , sizeof(uint8_t) , openHevcFrame.frame_par.linesize_y * openHevcFrame.frame_par.height, c->fout); ++ fwrite( (uint8_t *)openHevcFrame.data_cb_p , sizeof(uint8_t) , openHevcFrame.frame_par.linesize_cb * openHevcFrame.frame_par.height >> format, c->fout); ++ fwrite( (uint8_t *)openHevcFrame.data_cr_p , sizeof(uint8_t) , openHevcFrame.frame_par.linesize_cr * openHevcFrame.frame_par.height >> format, c->fout); ++ } ++#endif ++// av_image_copy(picture->data, picture->linesize, (uint8_t **)data_ptr_array, stride_array, ctx->pix_fmt, picture->width, picture->height); ++ picture->data[0] = data_ptr_array[0]; ++ picture->data[1] = data_ptr_array[1]; ++ picture->data[2] = data_ptr_array[2]; ++ picture->linesize[0] = stride_array[0]; ++ picture->linesize[1] = stride_array[1]; ++ picture->linesize[2] = stride_array[2]; ++ picture->format = ctx->pix_fmt; ++ ++ picture->pts = avpkt->pts; ++ picture->pkt_dts = avpkt->dts; ++ picture->pkt_duration = avpkt->duration; ++ ++ *got_frame = 1; ++ } ++ return avpkt->size; ++ ++} ++static void openhevc_flush(AVCodecContext *avctx) ++{ ++ OpenHevcContext *c = avctx->priv_data; ++ oh_flush(c->handle); ++} ++#define OFFSET(x) offsetof(OpenHevcContext, x) ++#define VE (AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM) ++static const AVOption options[] = { ++ {"thread_count", "for how many threads to be executed, 1 is for default", OFFSET(thread_count), AV_OPT_TYPE_INT, {.i64 = 1}, 0, INT_MAX, VE}, ++ {"thread_type", "which multithreads methods to use, 1 is for default", OFFSET(thread_type), AV_OPT_TYPE_INT, {.i64 = 1}, 0, INT_MAX, VE}, ++ {"temporal_layer_id","temporal layer id,7 is for default",OFFSET(temporal_layer_id),AV_OPT_TYPE_INT,{.i64 = 7}, 0 , INT_MAX, VE}, ++ {"quality_layer_id","quality layer id,0 is for default",OFFSET(quality_layer_id),AV_OPT_TYPE_INT,{.i64 = 0}, 0 , INT_MAX, VE}, ++ {NULL}, ++}; ++static const AVClass openhevc_class = { ++ .class_name = "libopenhevc", ++ .item_name = av_default_item_name, ++ .option = options, ++ .version = LIBAVUTIL_VERSION_INT, ++}; ++AVCodec ff_libopenhevc_decoder = { ++ .name = "libopenhevc", ++ .long_name = NULL_IF_CONFIG_SMALL("libopenhevc HEVC decoder"), ++ .type = AVMEDIA_TYPE_VIDEO, ++ .id = AV_CODEC_ID_HEVC, ++ .priv_data_size = sizeof(OpenHevcContext), ++ .priv_class = &openhevc_class, ++ .init = openhevc_init, ++ .flush = openhevc_flush, ++ .close = openhevc_close, ++ .decode = openhevc_decode, ++ .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_DR1, ++ .caps_internal = FF_CODEC_CAP_SETS_PKT_DTS | FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP, ++}; +diff -urN FFmpeg/libavcodec/libsvt_hevc.c FFmpeg-patched/libavcodec/libsvt_hevc.c +--- FFmpeg/libavcodec/libsvt_hevc.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/libsvt_hevc.c 2020-09-27 13:35:13.262526560 +0800 @@ -0,0 +1,341 @@ +/* +* Scalable Video Technology for HEVC encoder library plugin @@ -1601,12 +1997,41 @@ index 0000000..9b7a310 + .wrapper_name = "libsvt_hevc", +}; + -diff --git a/FFmpeg/libavcodec/tile_encode_svt_impl.c b/FFmpeg/libavcodec/tile_encode_svt_impl.c -new file mode 100644 -index 0000000..e18f8b6 ---- /dev/null -+++ b/FFmpeg/libavcodec/tile_encode_svt_impl.c -@@ -0,0 +1,488 @@ +diff -urN FFmpeg/libavcodec/Makefile FFmpeg-patched/libavcodec/Makefile +--- FFmpeg/libavcodec/Makefile 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavcodec/Makefile 2020-09-27 13:35:13.080526574 +0800 +@@ -96,6 +96,7 @@ + OBJS-$(CONFIG_H264PRED) += h264pred.o + OBJS-$(CONFIG_H264QPEL) += h264qpel.o + OBJS-$(CONFIG_HEVCPARSE) += hevc_parse.o h2645_parse.o hevc_ps.o hevc_sei.o hevc_data.o ++OBJS-$(CONFIG_LIBOPENHEVC_DECODER) += libopenhevc.o + OBJS-$(CONFIG_HPELDSP) += hpeldsp.o + OBJS-$(CONFIG_HUFFMAN) += huffman.o + OBJS-$(CONFIG_HUFFYUVDSP) += huffyuvdsp.o +@@ -387,6 +388,10 @@ + OBJS-$(CONFIG_HEVC_MF_ENCODER) += mfenc.o mf_utils.o + OBJS-$(CONFIG_HEVC_NVENC_ENCODER) += nvenc_hevc.o + OBJS-$(CONFIG_NVENC_HEVC_ENCODER) += nvenc_hevc.o ++OBJS-$(CONFIG_LIBSVT_HEVC_ENCODER) += libsvt_hevc.o ++OBJS-$(CONFIG_DISTRIBUTED_ENCODER) += distributed_encoder.o ++OBJS-$(CONFIG_HEVC_TILE_ENCODER) += tile_encoder.o \ ++ tile_encode_svt_impl.o + OBJS-$(CONFIG_HEVC_QSV_DECODER) += qsvdec_h2645.o + OBJS-$(CONFIG_HEVC_QSV_ENCODER) += qsvenc_hevc.o hevc_ps_enc.o \ + hevc_data.o +@@ -568,6 +573,8 @@ + OBJS-$(CONFIG_RA_288_DECODER) += ra288.o celp_filters.o + OBJS-$(CONFIG_RALF_DECODER) += ralf.o + OBJS-$(CONFIG_RASC_DECODER) += rasc.o ++OBJS-$(CONFIG_HEVC_BYPASSVIDEO_DECODER) += bypass_hevc_decoder.o ++OBJS-$(CONFIG_H264_BYPASSVIDEO_DECODER) += bypass_h264_decoder.o + OBJS-$(CONFIG_RAWVIDEO_DECODER) += rawdec.o + OBJS-$(CONFIG_RAWVIDEO_ENCODER) += rawenc.o + OBJS-$(CONFIG_REALTEXT_DECODER) += realtextdec.o ass.o +diff -urN FFmpeg/libavcodec/tile_encoder.c FFmpeg-patched/libavcodec/tile_encoder.c +--- FFmpeg/libavcodec/tile_encoder.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/tile_encoder.c 2020-09-27 13:35:13.371526551 +0800 +@@ -0,0 +1,586 @@ +/* + * Intel tile encoder + * @@ -1628,10 +2053,6 @@ index 0000000..e18f8b6 + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ -+#include -+#include -+#include -+#include + +#include +#include @@ -1646,1691 +2067,1701 @@ index 0000000..e18f8b6 +#include "libavutil/mem.h" +#include "libavutil/pixdesc.h" +#include "libavutil/opt.h" -+#include "libavutil/common.h" -+#include "libavutil/opt.h" -+#include "libavutil/pixdesc.h" + +#include "tile_encoder.h" -+#include "EbErrorCodes.h" -+#include "EbTime.h" -+#include "EbApi.h" + -+#include ++//pthread_mutex_t mutex; ++//pthread_cond_t cond; + -+typedef struct SvtEncoder { -+ EB_H265_ENC_CONFIGURATION enc_params; -+ EB_COMPONENTTYPE *svt_handle; -+ EB_BUFFERHEADERTYPE *in_buf; -+ EB_BUFFERHEADERTYPE *out_buf; -+ int raw_size; -+} SvtEncoder; ++typedef struct TileEncoderContext { ++ const AVClass *class; ++ EncoderWrapper api; ++ enum ENC_LIB enc_lib; ++ enum TILE_MODE tile_mode; + -+typedef struct SvtParams { -+ int vui_info; -+ int hierarchical_level; -+ int intra_period; -+ int la_depth; -+ int intra_ref_type; -+ int enc_mode; -+ int rc_mode; -+ int scd; -+ int tune; -+ int qp; -+ int profile; -+ int base_layer_switch_mode; -+ int bit_rate; -+ int intra_refresh_type; -+}SvtParams; -+ -+typedef struct SvtContext { -+ SvtEncoder *svt_enc; -+ SvtParams svt_param; -+ int eos_flag; -+ int i; -+} SvtContext; -+ -+static int error_mapping(int val) -+{ -+ if (val == EB_ErrorInsufficientResources) -+ return AVERROR(ENOMEM); -+ if ((val == EB_ErrorUndefined) || (val == EB_ErrorInvalidComponent) || -+ (val == EB_ErrorBadParameter)) -+ return AVERROR(EINVAL); -+ return AVERROR_EXTERNAL; -+} ++ //for average size tile, the input just give the layout, such as 3x3, 4x4 ++ int tiles_gw; ++ int tiles_gh; + -+static void free_buffer(SvtEncoder *svt_enc) -+{ -+ if (svt_enc->in_buf) { -+ EB_H265_ENC_INPUT *in_data = (EB_H265_ENC_INPUT* )svt_enc->in_buf->pBuffer; -+ if (in_data) { -+ av_freep(&in_data); -+ } -+ av_freep(&svt_enc->in_buf); -+ } -+ av_freep(&svt_enc->out_buf); -+} ++ //for fix size tile, the last one of colum or row is not the fixed value ++ int fix_tiles_w; ++ int fix_tiles_h; ++ char *params; ++} TileEncoderContext; + -+static EB_ERRORTYPE alloc_buffer(EB_H265_ENC_CONFIGURATION *config, SvtEncoder *svt_enc) ++// Support fix size tile ++static int assign_tiles_fix( TileEncoderContext* ctx ) +{ -+ EB_ERRORTYPE ret = EB_ErrorNone; -+ -+ const int pack_mode_10bit = (config->encoderBitDepth > 8) && (config->compressedTenBitFormat == 0) ? 1 : 0; -+ const size_t luma_size_8bit = config->sourceWidth * config->sourceHeight * (1 << pack_mode_10bit); -+ const size_t luma_size_10bit = (config->encoderBitDepth > 8 && pack_mode_10bit == 0) ? luma_size_8bit : 0; -+ -+ svt_enc->raw_size = (luma_size_8bit + luma_size_10bit) * 3 / 2; ++ EncoderWrapper* wrapper = &(ctx->api); + -+ // allocate buffer for in and out -+ svt_enc->in_buf = av_mallocz(sizeof(EB_BUFFERHEADERTYPE)); -+ svt_enc->out_buf = av_mallocz(sizeof(EB_BUFFERHEADERTYPE)); -+ if (!svt_enc->in_buf || !svt_enc->out_buf) -+ goto failed; ++ int *tiles_col_width, *tiles_row_height; ++ tiles_col_width = (int *)malloc(ctx->tiles_gw * sizeof(int)); ++ tiles_row_height = (int *)malloc(ctx->tiles_gh * sizeof(int)); ++ for (int i=0;itiles_gw - 1;i++)tiles_col_width[i]=ctx->fix_tiles_w; ++ for (int i=0;itiles_gh - 1;i++)tiles_row_height[i]=ctx->fix_tiles_h; + -+ svt_enc->in_buf->pBuffer = av_mallocz(sizeof(EB_H265_ENC_INPUT)); -+ if (!svt_enc->in_buf->pBuffer) -+ goto failed; ++ wrapper->tile_num = ctx->tiles_gw * ctx->tiles_gh; ++ wrapper->tile_w = ctx->tiles_gw; ++ wrapper->tile_h = ctx->tiles_gh; + -+ svt_enc->in_buf->nSize = sizeof(EB_BUFFERHEADERTYPE); -+ svt_enc->in_buf->pAppPrivate = NULL; -+ svt_enc->out_buf->nSize = sizeof(EB_BUFFERHEADERTYPE); -+ svt_enc->out_buf->nAllocLen = svt_enc->raw_size; -+ svt_enc->out_buf->pAppPrivate = NULL; -+ -+ return ret; ++ for(int i = 0; i < ctx->tiles_gh; i++) ++ { ++ for(int j = 0; j < ctx->tiles_gw; j++) ++ { ++ int idx = i * ctx->tiles_gw + j; ++ wrapper->tile_info[idx].left = (j == 0) ? 0 : wrapper->tile_info[idx - 1].left + tiles_col_width[j-1]; ++ wrapper->tile_info[idx].top = (i == 0) ? 0 : wrapper->tile_info[(i-1)*ctx->tiles_gw + j].top + tiles_row_height[i-1]; ++ wrapper->tile_info[idx].tHeight = (i == ctx->tiles_gh - 1) ? wrapper->height - wrapper->tile_info[idx].top : tiles_row_height[i]; ++ wrapper->tile_info[idx].tWidth = (j == ctx->tiles_gw - 1) ? wrapper->width - wrapper->tile_info[idx].left : tiles_col_width[j]; ++ } ++ } + -+failed: -+ free_buffer(svt_enc); -+ return AVERROR(ENOMEM); ++ if(tiles_col_width) ++ { ++ free(tiles_col_width); ++ tiles_col_width = NULL; ++ } ++ if(tiles_row_height) ++ { ++ free(tiles_row_height); ++ tiles_row_height = NULL; ++ } + -+ ++ return 0; +} -+ -+static EB_ERRORTYPE config_enc_params(EncoderWrapper* wrapper, int tile_idx, EB_H265_ENC_CONFIGURATION *param ) ++static int assign_tiles_avg( TileEncoderContext* ctx ) +{ -+ AVCodecContext *avctx = wrapper->avctx; -+ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; -+ SvtEncoder *svt_enc = q->svt_enc; -+ EB_ERRORTYPE ret = EB_ErrorNone; -+ int tenBits = 0; ++ EncoderWrapper* wrapper = &(ctx->api); + -+ param->sourceWidth = wrapper->tile_info[tile_idx].tWidth; -+ param->sourceHeight = wrapper->tile_info[tile_idx].tHeight; ++ wrapper->tile_num = ctx->tiles_gw * ctx->tiles_gh; ++ wrapper->tile_w = ctx->tiles_gw; ++ wrapper->tile_h = ctx->tiles_gh; + -+ if (avctx->pix_fmt == AV_PIX_FMT_YUV420P10LE) { -+ av_log(avctx, AV_LOG_DEBUG , "Encoder 10 bits depth input\n"); -+ param->compressedTenBitFormat = 0; -+ tenBits = 1; -+ } ++#define LCU_SIZE 64 + -+ // Update param from options -+ param->hierarchicalLevels = q->svt_param.hierarchical_level; -+ param->encMode = q->svt_param.enc_mode; -+ param->intraRefreshType = q->svt_param.intra_ref_type; -+ param->profile = q->svt_param.profile; -+ param->rateControlMode = q->svt_param.rc_mode; -+ param->sceneChangeDetection = q->svt_param.scd; -+ param->tune = q->svt_param.tune; -+ param->baseLayerSwitchMode = q->svt_param.base_layer_switch_mode; ++ // Width and Height should be divisible by LCU_SIZE ++ int width_in_lcu = wrapper->width / LCU_SIZE; ++ int height_in_lcu = wrapper->height / LCU_SIZE; + -+ param->targetBitRate = q->svt_param.bit_rate; -+ param->frameRateNumerator = avctx->time_base.den; -+ param->frameRateDenominator = avctx->time_base.num * avctx->ticks_per_frame; -+ // Need to disable deblock filter to disable loop_filter_across_slices_enable_flag -+ param->disableDlfFlag = 1; -+ param->enableSaoFlag = 0; -+ // Make encoded bitstream has I/P frame only -+ param->intraPeriodLength = q->svt_param.intra_period; -+ param->qp = q->svt_param.qp; -+ param->intraRefreshType = q->svt_param.intra_refresh_type; ++ // (6.5.1) in Rec. ITU-T H.265 v5 (02/2018) ++ int *tiles_col_width, *tiles_row_height; ++ tiles_col_width = (int *)malloc(ctx->tiles_gw * sizeof(int)); ++ tiles_row_height = (int *)malloc(ctx->tiles_gh * sizeof(int)); ++ for( int i=0; itiles_gw; i++) ++ { ++ tiles_col_width[i] = (i+1) * width_in_lcu / ctx->tiles_gw - i * width_in_lcu / ctx->tiles_gw; ++ } ++ for( int i=0; itiles_gh; i++) ++ { ++ tiles_row_height[i] = (i+1) * height_in_lcu / ctx->tiles_gh - i * height_in_lcu / ctx->tiles_gh; + -+ if (q->svt_param.vui_info) -+ param->videoUsabilityInfo = q->svt_param.vui_info; -+ if (q->svt_param.la_depth != -1) -+ param->lookAheadDistance = q->svt_param.la_depth; ++ } + -+ if (tenBits == 1) { -+ param->encoderBitDepth = 10; -+ param->profile = 2; ++ for(int i = 0; i < ctx->tiles_gh; i++) ++ { ++ for(int j = 0; j < ctx->tiles_gw; j++) ++ { ++ int idx = i * ctx->tiles_gw + j; ++ wrapper->tile_info[idx].left = (j == 0) ? 0 : wrapper->tile_info[idx - 1].left + tiles_col_width[j-1] * LCU_SIZE; ++ wrapper->tile_info[idx].top = (i == 0) ? 0 : wrapper->tile_info[(i-1)*ctx->tiles_gw + j].top + tiles_row_height[i-1] * LCU_SIZE; ++ wrapper->tile_info[idx].tHeight = tiles_row_height[i] * LCU_SIZE; ++ wrapper->tile_info[idx].tWidth = tiles_col_width[j] * LCU_SIZE; ++ } + } + -+ ret = alloc_buffer(param, svt_enc); ++ if(tiles_col_width) ++ { ++ free(tiles_col_width); ++ tiles_col_width = NULL; ++ } ++ if(tiles_row_height) ++ { ++ free(tiles_row_height); ++ tiles_row_height = NULL; ++ } + -+ return ret; ++ return 0; +} + -+static int eb_enc_init(EncoderWrapper* wrapper, int tile_idx) ++/// assign bit rate for each tile. ++int get_tile_bitrate(EncoderWrapper* wrapper, int idx) +{ -+ SvtContext* ctx = wrapper->tile_info[tile_idx].enc_ctx; -+ -+ EB_ERRORTYPE ret = EB_ErrorNone; -+ SvtEncoder* svt_enc = NULL; ++ int bit_rate = wrapper->avctx->bit_rate; ++ double percent = 0.0; + -+ ctx->svt_enc = av_mallocz(sizeof(*ctx->svt_enc)); -+ if (!ctx->svt_enc) -+ return AVERROR(ENOMEM); ++ if( 0==bit_rate ) bit_rate = wrapper->avctx->bit_rate_tolerance; + -+ svt_enc = ctx->svt_enc; -+ -+ ctx->eos_flag = 0; -+ -+ ret = EbInitHandle(&svt_enc->svt_handle, ctx, &svt_enc->enc_params); -+ if (ret != EB_ErrorNone) -+ goto failed_init; -+ -+ ret = config_enc_params( wrapper, tile_idx, &svt_enc->enc_params); -+ if (ret != EB_ErrorNone) -+ goto failed_init; -+ -+ ret = EbH265EncSetParameter(svt_enc->svt_handle, &svt_enc->enc_params); -+ if (ret != EB_ErrorNone) -+ goto failed_init; -+ -+ ret = EbInitEncoder(svt_enc->svt_handle); -+ if (ret != EB_ErrorNone) -+ goto failed_init; -+ -+ return ret; -+ -+failed_init: -+ return error_mapping(ret); -+} ++ ///FIXME if there is more suitable way to calculate bit rate for each tile ++ percent = (double)( wrapper->tile_info[idx].tHeight * wrapper->tile_info[idx].tWidth ) / (double)(wrapper->width * wrapper->height); ++ ++ return (int) (bit_rate * percent); ++ ++ } + -+static void read_in_data(EB_H265_ENC_CONFIGURATION *config, const AVFrame* frame, EB_BUFFERHEADERTYPE *headerPtr) ++int get_tile_maxrate(EncoderWrapper* wrapper, int idx) +{ -+ unsigned int is16bit = config->encoderBitDepth > 8; -+ unsigned long long lumaReadSize = (unsigned long long)config->sourceWidth * config->sourceHeight<< is16bit; -+ EB_H265_ENC_INPUT *in_data = (EB_H265_ENC_INPUT*)headerPtr->pBuffer; -+ ++ int max_rate = wrapper->avctx->rc_max_rate; + -+ // support yuv420p and yuv420p010 -+ in_data->luma = frame->data[0]; -+ in_data->cb = frame->data[1]; -+ in_data->cr = frame->data[2]; ++ ///FIXME if there is more suitable way to calculate bit rate for each tile ++ double percent = (double)( wrapper->tile_info[idx].tHeight * wrapper->tile_info[idx].tWidth ) / (double)(wrapper->width * wrapper->height); + -+ // stride info -+ in_data->yStride = frame->linesize[0] >> is16bit; -+ in_data->cbStride = frame->linesize[1] >> is16bit; -+ in_data->crStride = frame->linesize[2] >> is16bit; ++ return (int) (max_rate * percent); + -+ headerPtr->nFilledLen += lumaReadSize * 3/2u; -+ +} + -+static int eb_send_frame(EncoderWrapper* wrapper, int tile_idx, const AVFrame *frame) ++int bFifoReady( EncoderWrapper* wrapper ) +{ -+ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; -+ SvtEncoder *svt_enc = q->svt_enc; -+ EB_BUFFERHEADERTYPE *headerPtr = svt_enc->in_buf; -+ -+ AVFrame* tile_pic = NULL; -+ int ret = 0; -+ -+ if (!frame) { -+ EB_BUFFERHEADERTYPE headerPtrLast; -+ headerPtrLast.nAllocLen = 0; -+ headerPtrLast.nFilledLen = 0; -+ headerPtrLast.nTickCount = 0; -+ headerPtrLast.pAppPrivate = NULL; -+ //headerPtrLast.nOffset = 0; -+ //headerPtrLast.nTimeStamp = 0; -+ headerPtrLast.nFlags = EB_BUFFERFLAG_EOS; -+ headerPtrLast.pBuffer = NULL; -+ EbH265EncSendPicture(svt_enc->svt_handle, &headerPtrLast); -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "========tile id = %d NULL frame!!!\n", tile_idx); -+ q->eos_flag = 1; -+ av_log(wrapper->avctx, AV_LOG_ERROR, "Finish sending frames!!!\n"); -+ return ret; ++ int eos = 0; ++ int ready = 0; ++ for(int i=0; itile_num; i++){ ++ if( wrapper->tile_info[i].outpkt_fifo ){ ++ if( av_fifo_size(wrapper->tile_info[i].outpkt_fifo) ){ ++ ready++; ++ }else{ ++ if(wrapper->tile_info[i].eos) eos++; ++ } ++ } + } -+ get_tile_frame_nocopy(wrapper, tile_idx, frame, &tile_pic); -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "------tile id = %d start frame address: y=%p, u=%p, v=%p!!!\n", -+ tile_idx, tile_pic->data[0], tile_pic->data[1], tile_pic->data[2]); -+ -+ read_in_data(&svt_enc->enc_params, tile_pic, headerPtr); -+ -+ //headerPtr->nOffset = 0; -+ headerPtr->nFlags = 0; -+ headerPtr->pAppPrivate = NULL; -+ headerPtr->pts = frame->pts; -+ //headerPtr->nFlags = 0; -+ //headerPtr->nTimeStamp = 0; -+ //headerPtr->pAppPrivate = NULL; -+ headerPtr->sliceType = INVALID_SLICE; -+ q->i += 1; -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d start to send frame, times = %d!!!\n", tile_idx, q->i); ++ if( ready == wrapper->tile_num ) return 1; ++ if( eos == wrapper->tile_num ) return AVERROR_EOF; + -+ EbH265EncSendPicture(svt_enc->svt_handle, headerPtr); ++ return 0; ++} ++int bs_tile_stitching(EncoderWrapper* wrapper, AVPacket* outPkt) ++{ ++ int ret = 0; ++ AVPacket pkt[MAX_TILES]; ++ int bReady = bFifoReady(wrapper); ++ int totalsize=0; ++ uint8_t* dst = NULL; ++ if( AVERROR_EOF == bReady ) return AVERROR_EOF; + -+ if(NULL!= tile_pic) av_frame_free(&tile_pic); -+ return ret; ++ if( 1 == bReady ){ ++ for(int i=0; itile_num; i++){ ++ av_fifo_generic_read( wrapper->tile_info[i].outpkt_fifo, &pkt[i], sizeof(AVPacket), NULL); ++#ifdef FILE_DEBUG ++ wrapper->tile_info[i].nSpkt += 1; ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "######tile id=%d, getpkt=%d, stitched packet=%d#########\n", i, wrapper->tile_info[i].nGetpkt, wrapper->tile_info[i].nSpkt); ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "**********tile id = %d, packet size = %d, packet addr=%p!!!\n", i,pkt[i].size, pkt[i].data); ++#endif ++ totalsize += pkt[i].size; ++ } ++ ++ // Sometimes the size of output is larger than size of input, ++ // so we alloc 2 times larger size packet. ++ ret = ff_alloc_packet2(wrapper->avctx, outPkt, 2*totalsize, 2*totalsize); ++ if( ret < 0) return -1; ++ ++ dst = outPkt->data; ++ ++ // call stitching library ++ wrapper->paramTiledStream.pOutputTiledBitstream = dst; ++ ++ for (int i = 0; i < wrapper->paramTiledStream.tilesHeightCount; i++) ++ { ++ for (int j = 0; j < wrapper->paramTiledStream.tilesWidthCount; j++) ++ { ++ param_oneStream_info *ptempinput = wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j]; ++ ptempinput->pTiledBitstreamBuffer = pkt[i*wrapper->paramTiledStream.tilesWidthCount + j].data; ++ ptempinput->inputBufferLen = pkt[i*wrapper->paramTiledStream.tilesWidthCount + j].size; ++ } ++ } ++ ++ wrapper->paramTiledStream.inputBistreamsLen = totalsize; ++ genTiledStream_process(&(wrapper->paramTiledStream), wrapper->pGen); ++ dst += wrapper->paramTiledStream.outputiledbistreamlen; ++ outPkt->size = wrapper->paramTiledStream.outputiledbistreamlen; ++/* ++#ifdef FILE_DEBUG ++ for(int i=0; itile_num; i++){ ++ memcpy(dst, pkt[i].data, pkt[i].size); ++ dst += pkt[i].size; ++ fwrite(pkt[i].data, 1, pkt[i].size, wrapper->tile_info[i].file); ++ } ++#endif ++*/ ++ // Send vps+sps+pps info ++ AVCodecContext* avctx = wrapper->avctx; ++ if(avctx->extradata_size == 0) ++ { ++ unsigned char *headerAddr; ++ avctx->extradata_size = genTiledStream_getParam(wrapper->pGen, ID_GEN_TILED_BITSTREAMS_HEADER, &headerAddr); ++ avctx->extradata = av_malloc(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); ++ if (!avctx->extradata) { ++ av_log(avctx, AV_LOG_ERROR, ++ "Cannot allocate HEVC header of size %d.\n", avctx->extradata_size); ++ return AVERROR(ENOMEM); ++ } ++ memcpy(avctx->extradata, headerAddr, avctx->extradata_size); ++ } ++ ++ switch (wrapper->paramTiledStream.sliceType) { ++ case SLICE_IDR: ++ case SLICE_I: ++ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; ++ break; ++ case SLICE_P: ++ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P; ++ break; ++ case SLICE_B: ++ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B; ++ break; ++ } ++ ++ //outPkt->pts = paramTiledStream.pts; ++ ++ ///unref the packet read from fifo ++ for(int i=0; itile_num; i++){ ++ av_packet_unref(&pkt[i]); ++ free(pkt[i].data); ++ } ++ ++ return 0; ++ } ++ return -1; +} + -+static int eb_receive_packet(EncoderWrapper* wrapper, int tile_idx, AVPacket *pkt) ++int get_tile_frame_copy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ) +{ -+ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; -+ SvtEncoder *svt_enc = q->svt_enc; -+ EB_BUFFERHEADERTYPE *headerPtr = svt_enc->out_buf; -+ EB_ERRORTYPE stream_status = EB_ErrorNone; -+ + int ret = 0; -+ -+ //if ((ret = ff_alloc_packet2(wrapper->avctx, pkt, svt_enc->raw_size, 0)) < 0){ -+ // av_log(wrapper->avctx, AV_LOG_ERROR, "tile id = %d ff_alloc_packet2 ret = %d!!!\n", tile_idx, ret); -+ // return ret; -+ //} -+ pkt->data = malloc(svt_enc->raw_size); -+ pkt->size = svt_enc->raw_size; ++ uint8_t* src = NULL; ++ uint8_t* dst = NULL; ++ int factor = 1; ++ AVFrame* frame = NULL; + -+ headerPtr->pBuffer = pkt->data; -+ stream_status = EbH265GetPacket(svt_enc->svt_handle, headerPtr, q->eos_flag); -+ if ((stream_status == EB_NoErrorEmptyQueue)){ -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d stream_status == EB_NoErrorEmptyQueue!!!\n", tile_idx); -+ return AVERROR(EAGAIN); ++ if( NULL == *tile_pic ){ ++ *tile_pic = av_frame_alloc(); ++ if (!*tile_pic) { ++ av_freep(*tile_pic); ++ return AVERROR(ENOMEM); ++ } + } -+ pkt->size = headerPtr->nFilledLen; -+ pkt->pts = headerPtr->pts; -+ pkt->dts = headerPtr->dts; -+ ret = (headerPtr->nFlags & EB_BUFFERFLAG_EOS) ? AVERROR_EOF : 0; + -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d ret = %d!!!\n", tile_idx, ret); -+ return ret; -+} ++ frame = *tile_pic; ++ frame->height = wrapper->tile_info[tile_idx].tHeight; ++ frame->width = wrapper->tile_info[tile_idx].tWidth; + -+static av_cold int eb_enc_close(EncoderWrapper* wrapper, int tile_idx) -+{ -+ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; -+ SvtEncoder *svt_enc = q->svt_enc; ++ frame->format = pic->format; + -+ EbDeinitEncoder(svt_enc->svt_handle); -+ EbDeinitHandle(svt_enc->svt_handle); ++ if (!frame->data[0]) { ++ ret = av_frame_get_buffer(frame, 32); ++ if (ret < 0){ ++ av_freep(*tile_pic); ++ return ret; ++ } ++ } + -+ free_buffer(svt_enc); -+ av_freep(&svt_enc); ++ ///current copy is based on YUV420p format ++ for( int planner=0; planner<3; planner++ ){ ++ if( planner > 0 ){ ++ factor = 2; ++ } ++ src = pic->data[planner] + pic->linesize[planner]*(wrapper->tile_info[tile_idx].top / factor) + wrapper->tile_info[tile_idx].left / factor; ++ dst = frame->data[planner]; ++ for( int i=0; iheight/factor; i++ ){ ++ src += pic->linesize[planner]; ++ dst += frame->linesize[planner]; ++ memcpy( dst, src, frame->width / factor ); ++ } ++ } + -+ return 0; ++ return ret; +} + -+///encode each tile with SVT -+int svt_enc_close(void* ctx) ++int get_tile_frame_nocopy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ) +{ -+ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; -+ SvtContext* svt_ctx = NULL; ++ AVFrame* frame = NULL; ++ int factor = 1; + -+ for(int i=0; itile_num; i++){ -+ svt_ctx = (SvtContext*)wrapper->tile_info[i].enc_ctx; -+ if( NULL != svt_ctx){ -+ eb_enc_close(wrapper, i); -+ free(svt_ctx); ++ if( NULL == *tile_pic ){ ++ *tile_pic = av_frame_alloc(); ++ if (!*tile_pic) { ++ av_freep(*tile_pic); ++ return AVERROR(ENOMEM); + } -+ wrapper->tile_info[i].enc_ctx = NULL; + } + -+ return 0; ++ frame = *tile_pic; ++ frame->height = wrapper->tile_info[tile_idx].tHeight; ++ frame->width = wrapper->tile_info[tile_idx].tWidth; ++ frame->format = pic->format; ++ ++ for( int i=0; i<4; i++ ){ ++ if( i > 0 ){ ++ factor = 2; ++ } ++ frame->data[i] = pic->data[i] + pic->linesize[i]*(wrapper->tile_info[tile_idx].top / factor) + wrapper->tile_info[tile_idx].left / factor; ++ frame->linesize[i] = pic->linesize[i]; ++ } ++ ++ return 0; +} + -+int svt_enc_init(void* ctx) ++static av_cold int tile_encode_close(AVCodecContext *avctx) +{ -+ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; -+ SvtContext* svt_ctx = NULL; -+ int ret = 0; -+ -+ for(int i=0; itile_num; i++){ -+ svt_ctx = malloc(sizeof(SvtContext)); -+ svt_ctx->svt_param.hierarchical_level = 3; -+ svt_ctx->svt_param.enc_mode = 9; -+ svt_ctx->svt_param.intra_ref_type = 1; -+ svt_ctx->svt_param.profile = 2; -+ svt_ctx->svt_param.rc_mode = 0;//0-CQP, 1-VBR -+ svt_ctx->svt_param.qp = 32; -+ svt_ctx->svt_param.scd = 0; -+ svt_ctx->svt_param.tune = 1; -+ svt_ctx->svt_param.intra_period = 5; -+ svt_ctx->svt_param.base_layer_switch_mode = 0; -+ svt_ctx->svt_param.vui_info = 0; -+ svt_ctx->svt_param.la_depth = -1; -+ svt_ctx->svt_param.bit_rate = wrapper->tile_info[i].tBitrate; -+ svt_ctx->i = 0; -+ svt_ctx->svt_param.intra_refresh_type = 1;//1-CRA, 2-IDR intra refresh -+ wrapper->tile_info[i].enc_ctx = svt_ctx; -+ ret = eb_enc_init(wrapper, i); -+ if( 0 != ret ) return ret; -+ } -+ wrapper->initialized = 1; -+ return 0; -+} ++ TileEncoderContext *ctx = avctx->priv_data; ++ EncoderWrapper *wrapper = &(ctx->api); ++ AVFifoBuffer* fifo = NULL; + -+int svt_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet) -+{ -+ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; -+ SvtContext *q = NULL; -+ -+ int ret = 0; ++ if(wrapper->pGen) ++ { ++ genTiledStream_unInit(wrapper->pGen); ++ } + -+ for(int i=0; itile_num; i++){ -+ q = (SvtContext *)wrapper->tile_info[i].enc_ctx; -+ if( wrapper->tile_info[i].eos ) continue; -+ -+ if(!q->eos_flag) eb_send_frame( wrapper, i, pic ); ++ if (wrapper->paramTiledStream.pTiledBitstream) ++ { ++ for (int i = 0; i < wrapper->paramTiledStream.tilesHeightCount; i++) ++ { ++ for (int j = 0; j < wrapper->paramTiledStream.tilesWidthCount; j++) ++ { ++ free(wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j]); ++ wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j] = NULL; ++ } ++ } ++ free(wrapper->paramTiledStream.pTiledBitstream); ++ wrapper->paramTiledStream.pTiledBitstream = NULL; ++ } ++ if(avctx->extradata) ++ { ++ free(avctx->extradata); ++ avctx->extradata = NULL; + } + -+ // Wake up all receive tile threads -+ if(!q->eos_flag) ++ if(wrapper->tid) + { -+ pthread_cond_broadcast(&(wrapper->cond)); ++ free(wrapper->tid); ++ wrapper->tid = NULL; + } -+ else ++ if(wrapper->tile_enc_info) + { -+ // Wait until all tiles are ready -+ while(0==bFifoReady(wrapper)) -+ { -+ pthread_cond_broadcast(&(wrapper->cond)); -+ usleep(10000); -+ } ++ free(wrapper->tile_enc_info); ++ wrapper->tile_enc_info = NULL; + } + -+ //FIXME, suppose all encoder has the rhythm to get packet, so there is no buffer in the first time ++ if( NULL != ctx->api.enc_close ) ++ ctx->api.enc_close(&(ctx->api)); + -+ ret = bs_tile_stitching(wrapper, pkt); ++ for( int i=0; i < ctx->api.tile_num; i++ ){ + -+ if( AVERROR_EOF == ret ){ -+ return AVERROR_EOF; ++#ifdef FILE_DEBUG ++ if(ctx->api.tile_info[i].file) fclose(ctx->api.tile_info[i].file); ++#endif ++ ++ fifo = ctx->api.tile_info[i].outpkt_fifo; ++ while ( fifo && av_fifo_size(fifo)) { ++ AVPacket pkt; ++ av_fifo_generic_read(fifo, &pkt, sizeof(pkt), NULL); ++ free(pkt.data); ++ av_packet_unref(&pkt); ++ } ++ av_fifo_free(fifo); ++ fifo = NULL; + } -+ *got_packet = 1; -+ -+ if( -1 == ret ) *got_packet = 0; -+ + return 0; +} + -+int svt_enc_tile(TileEncoderInfo *tile_enc_info) ++static av_cold int tile_encode_init(AVCodecContext *avctx) +{ ++ TileEncoderContext *ctx = avctx->priv_data; ++ EncoderWrapper* wrapper = &(ctx->api); + int ret = 0; ++ char filename[256]; + -+ EncoderWrapper *wrapper = (EncoderWrapper*)tile_enc_info->ctx; -+ int tile_idx = tile_enc_info->tile_idx; -+ -+ while(1) -+ { -+ if(wrapper->initialized) ++ wrapper->width = avctx->coded_width; ++ wrapper->height = avctx->coded_height; ++ ++ wrapper->avctx = avctx; ++ switch(ctx->tile_mode){ ++ case FIX_SIZE: ++ wrapper->uniform_split = false; ++ assign_tiles_fix( ctx ); ++ break; ++ case AVG_SIZE: ++ wrapper->uniform_split = true; ++ assign_tiles_avg( ctx ); ++ break; ++ default: + break; + } + -+ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; -+ SvtEncoder *svt_enc = q->svt_enc; -+ EB_BUFFERHEADERTYPE *headerPtr = svt_enc->out_buf; + -+ while(!wrapper->tile_info[tile_idx].eos) -+ { -+ // Wait until next frame is sent -+ if(!q->eos_flag) -+ pthread_cond_wait(&(wrapper->cond),&(wrapper->mutex)); ++ switch(ctx->enc_lib){ ++ case ENC_X265: ++ wrapper->enc_close = libx265_enc_close; ++ wrapper->enc_frame = libx265_enc_frame; ++ wrapper->enc_init = libx265_enc_init; ++ break; ++ case ENC_SVT: ++ wrapper->enc_close = svt_enc_close; ++ wrapper->enc_frame = svt_enc_frame; ++ wrapper->enc_init = svt_enc_init; ++ break; ++ default: ++ break; ++ } + -+ AVPacket tile_pkts = {0}; -+ ret = eb_receive_packet(wrapper, tile_idx, &tile_pkts); -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d begin to eb_receive_packet!!!\n", tile_idx); -+ if( 0 == ret || AVERROR_EOF == ret ){ -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "**********tile id = %d eb_receive_packet got packet, packet size = %d, packet addr=%p!!!\n", tile_idx, tile_pkts.size, tile_pkts.data); -+ av_fifo_generic_write( wrapper->tile_info[tile_idx].outpkt_fifo, &tile_pkts, sizeof(AVPacket), NULL); ++ pthread_mutex_init(&(wrapper->mutex), NULL); ++ pthread_cond_init(&(wrapper->cond), NULL); ++ wrapper->tid = malloc(wrapper->tile_num * sizeof(pthread_t)); ++ wrapper->tile_enc_info = malloc(wrapper->tile_num * sizeof(TileEncoderInfo)); ++ for(int i=0; itile_num; i++){ ++ wrapper->tile_info[i].tBitrate = get_tile_bitrate(wrapper, i); ++ wrapper->tile_info[i].tMaxrate = get_tile_maxrate(wrapper, i); ++ wrapper->tile_info[i].eos = 0; ++ wrapper->tile_info[i].outpkt_fifo = av_fifo_alloc( FIFO_SIZE * sizeof(AVPacket)); +#ifdef FILE_DEBUG -+ wrapper->tile_info[tile_idx].nGetpkt += 1; -+ //fwrite(tile_pkts.data, 1, tile_pkts.size, wrapper->tile_info[i].file); ++ wrapper->tile_info[i].nGetpkt = 0; ++ wrapper->tile_info[i].nSpkt = 0; ++ sprintf(filename, "out_%d.265", i); ++ wrapper->tile_info[i].file = fopen(filename, "wb+"); +#endif -+ if( AVERROR_EOF == ret ){ -+ av_log(wrapper->avctx, AV_LOG_ERROR, "tile id = %d EOS!!!\n", tile_idx); -+ wrapper->tile_info[tile_idx].eos = 1; -+ } -+ }else{ -+ av_packet_unref(&tile_pkts); -+ free(tile_pkts.data); ++ wrapper->tile_enc_info[i].ctx = wrapper; ++ wrapper->tile_enc_info[i].tile_idx = i; ++ ++ ret = pthread_create(&wrapper->tid[i], NULL, svt_enc_tile, &(wrapper->tile_enc_info[i])); ++ if(0 != ret) ++ { ++ av_log(avctx, AV_LOG_ERROR, "Cannot create thread!\n"); ++ return ret; + } ++ } + ++ if( NULL != ctx->api.enc_init ){ ++ ret = wrapper->enc_init(wrapper); ++ if( 0 != ret ) return ret; + } + -+ // Wait until all tiles are done -+ while(AVERROR_EOF!=bFifoReady(wrapper)) ++ wrapper->paramTiledStream.tilesHeightCount = wrapper->tile_h; ++ wrapper->paramTiledStream.tilesWidthCount = wrapper->tile_w; ++ wrapper->paramTiledStream.tilesUniformSpacing = wrapper->uniform_split; ++ wrapper->paramTiledStream.frameWidth = wrapper->width; ++ wrapper->paramTiledStream.frameHeight = wrapper->height; ++ wrapper->paramTiledStream.pTiledBitstream = (param_oneStream_info**)malloc(wrapper->tile_h * wrapper->tile_w * sizeof(param_oneStream_info *)); ++ if (!wrapper->paramTiledStream.pTiledBitstream) + { -+ pthread_cond_wait(&(wrapper->cond),&(wrapper->mutex)); -+ usleep(10000); ++ printf("memory alloc failed!"); ++ return 1; + } + -+ return ret; -+} -diff --git a/FFmpeg/libavcodec/tile_encode_x265_impl.c b/FFmpeg/libavcodec/tile_encode_x265_impl.c -new file mode 100644 -index 0000000..c464db9 ---- /dev/null -+++ b/FFmpeg/libavcodec/tile_encode_x265_impl.c -@@ -0,0 +1,477 @@ -+/* -+ * Intel tile encoder -+ * -+ * Copyright (c) 2018 Intel Cooperation -+ * -+ * This file is part of FFmpeg. -+ * -+ * FFmpeg is free software; you can redistribute it and/or -+ * modify it under the terms of the GNU Lesser General Public -+ * License as published by the Free Software Foundation; either -+ * version 2.1 of the License, or (at your option) any later version. -+ * -+ * FFmpeg is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ * Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with FFmpeg; if not, write to the Free Software -+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -+ */ ++ for (int i = 0; i < wrapper->paramTiledStream.tilesHeightCount; i++) ++ { ++ for (int j = 0; j < wrapper->paramTiledStream.tilesWidthCount; j++) ++ { ++ wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j] = (param_oneStream_info*)malloc(sizeof(param_oneStream_info)); ++ } ++ } + -+#include -+#include ++ wrapper->pGen = genTiledStream_Init(&(wrapper->paramTiledStream)); ++ if (!wrapper->pGen) ++ { ++ printf("the initialize failed\n"); ++ return 1; ++ } + -+#include -+#include ++ return 0; ++} + -+#include "libavutil/attributes.h" -+#include "libavutil/avassert.h" -+#include "libavutil/dict.h" -+#include "libavutil/error.h" -+#include "libavutil/imgutils.h" -+#include "libavutil/internal.h" -+#include "libavutil/log.h" -+#include "libavutil/mem.h" -+#include "libavutil/pixdesc.h" -+#include "libavutil/opt.h" -+#include "libavutil/common.h" -+#include "libavutil/opt.h" -+#include "libavutil/pixdesc.h" ++static int tile_encode_frame(AVCodecContext *avctx, AVPacket *pkt, ++ const AVFrame *pic, int *got_packet) ++{ ++ TileEncoderContext *ctx = avctx->priv_data; ++ if( NULL != ctx->api.enc_frame ) ++ ctx->api.enc_frame(&(ctx->api), pkt, pic, got_packet); + -+#include "tile_encoder.h" -+#include -+#include -+ -+typedef struct x265Context { -+ x265_encoder *encoder; -+ x265_param *params; -+ const x265_api *api; -+ -+ int bit_rate; -+ int rc_max_rate; -+ -+ float crf; -+ int forced_idr; -+ char *preset; -+ char *tune; -+ char *profile; -+ char *x265_opts; -+ -+} x265Context; -+ -+///encode each tile with libx265 -+static int is_keyframe(NalUnitType naltype) -+{ -+ switch (naltype) { -+ case NAL_UNIT_CODED_SLICE_BLA_W_LP: -+ case NAL_UNIT_CODED_SLICE_BLA_W_RADL: -+ case NAL_UNIT_CODED_SLICE_BLA_N_LP: -+ case NAL_UNIT_CODED_SLICE_IDR_W_RADL: -+ case NAL_UNIT_CODED_SLICE_IDR_N_LP: -+ case NAL_UNIT_CODED_SLICE_CRA: -+ return 1; -+ default: -+ return 0; -+ } ++ return 0; +} + -+static int x265_single_close( EncoderWrapper* wrapper, int tile_idx ) -+{ -+ x265Context* ctx = (x265Context*)wrapper->tile_info[tile_idx].enc_ctx; -+ -+ ctx->api->param_free(ctx->params); ++#define OFFSET(x) offsetof(TileEncoderContext, x) ++#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM ++static const AVOption options[] = { ++ { "enc", "what's the encoder for each tile. so far, x265=1, svt=2.", OFFSET(enc_lib), AV_OPT_TYPE_INT, { .i64 = 2 }, 0, 3, VE }, ++ { "tile_mode", "specify how to divide the tiles of the picture: 1 fixed size tiles; 2. grid layout, 3x3, 4x4.", OFFSET(tile_mode), AV_OPT_TYPE_INT, { .i64 = 2 }, 0, 3, VE }, ++ { "tiles_gw", "horizontal grid number of tiles; available when tile is divided via grid layout .", OFFSET(tiles_gw), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, INT_MAX, VE }, ++ { "tiles_gh", "vertical grid number of tiles; available when tile is divided via grid layout .", OFFSET(tiles_gh), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, INT_MAX, VE }, ++ { "tiles_fixw", "horizontal width of tiles; available when tile is divided via fixed size.", OFFSET(fix_tiles_w), AV_OPT_TYPE_INT, { .i64 = 512 }, 0, INT_MAX, VE }, ++ { "tiles_fixh", "vertical height of tiles; available when tile is divided via fixed size.", OFFSET(fix_tiles_h), AV_OPT_TYPE_INT, { .i64 = 512 }, 0, INT_MAX, VE }, ++ { "params", "Set parameters as a comma-separated list of key=value pairs.", OFFSET(params), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, VE }, ++ { NULL }, ++}; + -+ if (ctx->encoder) -+ ctx->api->encoder_close(ctx->encoder); -+ -+ return 0; -+} ++static const AVClass class = { ++ .class_name = "hevc_tile_encoder", ++ .item_name = av_default_item_name, ++ .option = options, ++ .version = LIBAVUTIL_VERSION_INT, ++}; + -+static int x265_single_init( EncoderWrapper* wrapper, int tile_idx ) -+{ -+ x265Context* ctx = wrapper->tile_info[tile_idx].enc_ctx; -+ AVCodecContext* avctx = wrapper->avctx; ++static const AVCodecDefault defaults[] = { ++ { "b", "0" }, ++ { NULL }, ++}; + -+ ctx->api = x265_api_get(av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth); -+ if (!ctx->api) -+ ctx->api = x265_api_get(0); ++AVCodec ff_hevc_tile_encoder = { ++ .name = "hevc_tile_encoder", ++ .long_name = NULL_IF_CONFIG_SMALL("distribute tile H.265 / HEVC"), ++ .type = AVMEDIA_TYPE_VIDEO, ++ .id = AV_CODEC_ID_HEVC, ++ .capabilities = AV_CODEC_CAP_DELAY, ++ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, ++ AV_PIX_FMT_YUV420P10, ++ AV_PIX_FMT_NONE }, + -+ ctx->params = ctx->api->param_alloc(); -+ if (!ctx->params) { -+ av_log(avctx, AV_LOG_ERROR, "Could not allocate x265 param structure.\n"); -+ return AVERROR(ENOMEM); -+ } ++ .priv_class = &class, ++ .priv_data_size = sizeof(TileEncoderContext), ++ .defaults = defaults, + -+ if (ctx->api->param_default_preset(ctx->params, ctx->preset, ctx->tune) < 0) { -+ int i; ++ .init = tile_encode_init, ++ .encode2 = tile_encode_frame, ++ .close = tile_encode_close, + -+ av_log(avctx, AV_LOG_ERROR, "Error setting preset/tune %s/%s.\n", ctx->preset, ctx->tune); -+ av_log(avctx, AV_LOG_INFO, "Possible presets:"); -+ for (i = 0; x265_preset_names[i]; i++) -+ av_log(avctx, AV_LOG_INFO, " %s", x265_preset_names[i]); ++ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP, + -+ av_log(avctx, AV_LOG_INFO, "\n"); -+ av_log(avctx, AV_LOG_INFO, "Possible tunes:"); -+ for (i = 0; x265_tune_names[i]; i++) -+ av_log(avctx, AV_LOG_INFO, " %s", x265_tune_names[i]); ++ .wrapper_name = "hevc_tile_encoder", ++}; +diff -urN FFmpeg/libavcodec/tile_encoder.h FFmpeg-patched/libavcodec/tile_encoder.h +--- FFmpeg/libavcodec/tile_encoder.h 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/tile_encoder.h 2020-09-27 13:35:13.371526551 +0800 +@@ -0,0 +1,125 @@ ++/* ++ * Intel tile encoder ++ * ++ * Copyright (c) 2018 Intel Cooperation ++ * ++ * This file is part of FFmpeg. ++ * ++ * FFmpeg is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * FFmpeg is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with FFmpeg; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ */ + -+ av_log(avctx, AV_LOG_INFO, "\n"); ++#ifndef TILE_ENCODER_H ++#define TILE_ENCODER_H ++#define FILE_DEBUG + -+ return AVERROR(EINVAL); -+ } ++#include "libavutil/fifo.h" + -+ ctx->params->frameNumThreads = avctx->thread_count; -+ ctx->params->fpsNum = avctx->time_base.den; -+ ctx->params->fpsDenom = avctx->time_base.num * avctx->ticks_per_frame; -+ ctx->params->sourceWidth = wrapper->tile_info[tile_idx].tWidth; -+ ctx->params->sourceHeight = wrapper->tile_info[tile_idx].tHeight; -+ ctx->params->bEnablePsnr = !!(avctx->flags & AV_CODEC_FLAG_PSNR); -+ ctx->params->bOpenGOP = !(avctx->flags & AV_CODEC_FLAG_CLOSED_GOP); ++#include "avcodec.h" ++#include "internal.h" ++#include ++#include ++#include ++#include + -+ /* Tune the CTU size based on input resolution. */ -+ if (ctx->params->sourceWidth < 64 || ctx->params->sourceHeight < 64) -+ ctx->params->maxCUSize = 32; -+ if (ctx->params->sourceWidth < 32 || ctx->params->sourceHeight < 32) -+ ctx->params->maxCUSize = 16; -+ if (ctx->params->sourceWidth < 16 || ctx->params->sourceHeight < 16) { -+ av_log(avctx, AV_LOG_ERROR, "Image size is too small (%dx%d).\n", -+ ctx->params->sourceWidth, ctx->params->sourceHeight); -+ return AVERROR(EINVAL); -+ } ++#include "genTiledstreamAPI.h" + -+ if ((avctx->color_primaries <= AVCOL_PRI_SMPTE432 && -+ avctx->color_primaries != AVCOL_PRI_UNSPECIFIED) || -+ (avctx->color_trc <= AVCOL_TRC_ARIB_STD_B67 && -+ avctx->color_trc != AVCOL_TRC_UNSPECIFIED) || -+ (avctx->colorspace <= AVCOL_SPC_ICTCP && -+ avctx->colorspace != AVCOL_SPC_UNSPECIFIED)) { ++#define MAX_TILES 256 ++#define FIFO_SIZE 8024 + -+ ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1; -+ ctx->params->vui.bEnableColorDescriptionPresentFlag = 1; ++enum ENC_LIB{ ++ ENC_NULL = 0, ++ ENC_X265 = 1, ++ ENC_SVT = 2 ++}; + -+ // x265 validates the parameters internally -+ ctx->params->vui.colorPrimaries = avctx->color_primaries; -+ ctx->params->vui.transferCharacteristics = avctx->color_trc; -+ ctx->params->vui.matrixCoeffs = avctx->colorspace; -+ } ++enum TILE_MODE{ ++ NULL_MODE = 0, ++ FIX_SIZE = 1, ++ AVG_SIZE = 2 ++}; ++typedef int (*ENC_CLOSE)(void*); ++typedef int (*ENC_INIT)(void*); ++typedef int (*ENC_FRAME)(void*, AVPacket*, const AVFrame*, int*); + -+ if (avctx->sample_aspect_ratio.num > 0 && avctx->sample_aspect_ratio.den > 0) { -+ char sar[12]; -+ int sar_num, sar_den; ++typedef struct TileInfo{ ++ int top; ++ int left; ++ int tWidth; ++ int tHeight; ++ int tBitrate; ++ int tMaxrate; ++ AVFifoBuffer* outpkt_fifo; ++ int proc_idx; ++ int eos; ++ void* enc_ctx; ++ AVPacket* internal_pkt; ++#ifdef FILE_DEBUG ++ int nGetpkt; ++ int nSpkt; ++ FILE* file; ++#endif ++} TileInfo; + -+ av_reduce(&sar_num, &sar_den, -+ avctx->sample_aspect_ratio.num, -+ avctx->sample_aspect_ratio.den, 65535); -+ snprintf(sar, sizeof(sar), "%d:%d", sar_num, sar_den); -+ if (ctx->api->param_parse(ctx->params, "sar", sar) == X265_PARAM_BAD_VALUE) { -+ av_log(avctx, AV_LOG_ERROR, "Invalid SAR: %d:%d.\n", sar_num, sar_den); -+ return AVERROR_INVALIDDATA; -+ } -+ } ++typedef struct TileEncoderInfo{ ++ void *ctx; ++ int tile_idx; ++}TileEncoderInfo; + -+ switch (avctx->pix_fmt) { -+ case AV_PIX_FMT_YUV420P: -+ case AV_PIX_FMT_YUV420P10: -+ case AV_PIX_FMT_YUV420P12: -+ ctx->params->internalCsp = X265_CSP_I420; -+ break; -+ case AV_PIX_FMT_YUV422P: -+ case AV_PIX_FMT_YUV422P10: -+ case AV_PIX_FMT_YUV422P12: -+ ctx->params->internalCsp = X265_CSP_I422; -+ break; -+ case AV_PIX_FMT_GBRP: -+ case AV_PIX_FMT_GBRP10: -+ case AV_PIX_FMT_GBRP12: -+ ctx->params->vui.matrixCoeffs = AVCOL_SPC_RGB; -+ ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1; -+ ctx->params->vui.bEnableColorDescriptionPresentFlag = 1; -+ case AV_PIX_FMT_YUV444P: -+ case AV_PIX_FMT_YUV444P10: -+ case AV_PIX_FMT_YUV444P12: -+ ctx->params->internalCsp = X265_CSP_I444; -+ break; -+ case AV_PIX_FMT_GRAY8: -+ case AV_PIX_FMT_GRAY10: -+ case AV_PIX_FMT_GRAY12: -+ if (ctx->api->api_build_number < 85) { -+ av_log(avctx, AV_LOG_ERROR, -+ "libx265 version is %d, must be at least 85 for gray encoding.\n", -+ ctx->api->api_build_number); -+ return AVERROR_INVALIDDATA; -+ } -+ ctx->params->internalCsp = X265_CSP_I400; -+ break; -+ } ++typedef struct EncoderWrapper{ ++ AVCodecContext* avctx; + -+ if (ctx->crf >= 0) { -+ char crf[6]; ++ int width; ++ int height; ++ void* enc_param; + -+ snprintf(crf, sizeof(crf), "%2.2f", ctx->crf); -+ if (ctx->api->param_parse(ctx->params, "crf", crf) == X265_PARAM_BAD_VALUE) { -+ av_log(avctx, AV_LOG_ERROR, "Invalid crf: %2.2f.\n", ctx->crf); -+ return AVERROR(EINVAL); -+ } -+ } else if (ctx->bit_rate > 0) { -+ ctx->params->rc.bitrate = ctx->bit_rate / 1000; -+ ctx->params->rc.rateControlMode = X265_RC_ABR; -+ } ++ bool uniform_split; ++ int tile_num; ++ int tile_w; ++ int tile_h; ++ TileInfo tile_info[MAX_TILES]; + -+ ctx->params->rc.vbvBufferSize = ctx->bit_rate * 2 / 3000; -+ ctx->params->rc.vbvMaxBitrate = ctx->rc_max_rate / 1000; ++ ENC_CLOSE enc_close; ++ ENC_INIT enc_init; ++ ENC_FRAME enc_frame; + -+ if (!(avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER)) -+ ctx->params->bRepeatHeaders = 1; ++ TileEncoderInfo *tile_enc_info; ++ pthread_t *tid; ++ int initialized; + -+ if (ctx->x265_opts) { -+ AVDictionary *dict = NULL; -+ AVDictionaryEntry *en = NULL; ++ void *pGen; ++ param_gen_tiledStream paramTiledStream; ++ pthread_mutex_t mutex; ++ pthread_cond_t cond; ++} EncoderWrapper; + -+ if (!av_dict_parse_string(&dict, ctx->x265_opts, "=", ":", 0)) { -+ while ((en = av_dict_get(dict, "", en, AV_DICT_IGNORE_SUFFIX))) { -+ int parse_ret = ctx->api->param_parse(ctx->params, en->key, en->value); ++int get_tile_frame_copy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ); ++int get_tile_frame_nocopy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ); + -+ switch (parse_ret) { -+ case X265_PARAM_BAD_NAME: -+ av_log(avctx, AV_LOG_WARNING, -+ "Unknown option: %s.\n", en->key); -+ break; -+ case X265_PARAM_BAD_VALUE: -+ av_log(avctx, AV_LOG_WARNING, -+ "Invalid value for %s: %s.\n", en->key, en->value); -+ break; -+ default: -+ break; -+ } -+ } -+ av_dict_free(&dict); -+ } -+ } ++int bs_tile_stitching(EncoderWrapper* wrapper, AVPacket* outPkt); ++int get_tile_bitrate(EncoderWrapper* wrapper, int idx); ++int get_tile_maxrate(EncoderWrapper* wrapper, int idx); + -+ if (ctx->params->rc.vbvBufferSize && avctx->rc_initial_buffer_occupancy > 1000 && -+ ctx->params->rc.vbvBufferInit == 0.9) { -+ ctx->params->rc.vbvBufferInit = (float)avctx->rc_initial_buffer_occupancy / 1000; -+ } ++int libx265_enc_close(void* ctx); ++int libx265_enc_init(void* ctx); ++int libx265_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet); + -+ if (ctx->profile) { -+ if (ctx->api->param_apply_profile(ctx->params, ctx->profile) < 0) { -+ int i; -+ av_log(avctx, AV_LOG_ERROR, "Invalid or incompatible profile set: %s.\n", ctx->profile); -+ av_log(avctx, AV_LOG_INFO, "Possible profiles:"); -+ for (i = 0; x265_profile_names[i]; i++) -+ av_log(avctx, AV_LOG_INFO, " %s", x265_profile_names[i]); -+ av_log(avctx, AV_LOG_INFO, "\n"); -+ return AVERROR(EINVAL); -+ } -+ } ++int svt_enc_close(void* ctx); ++int svt_enc_init(void* ctx); ++int svt_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet); ++int svt_enc_tile(TileEncoderInfo *tile_enc_info); ++int bFifoReady( EncoderWrapper* wrapper ); + -+ ctx->encoder = ctx->api->encoder_open(ctx->params); -+ if (!ctx->encoder) { -+ av_log(avctx, AV_LOG_ERROR, "Cannot open libx265 encoder.\n"); -+ x265_single_close(wrapper, tile_idx); -+ return AVERROR_INVALIDDATA; -+ } ++#endif /* TILE_ENCODER_H */ + -+/* if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) { -+ x265_nal *nal; -+ int nnal; +diff -urN FFmpeg/libavcodec/tile_encode_svt_impl.c FFmpeg-patched/libavcodec/tile_encode_svt_impl.c +--- FFmpeg/libavcodec/tile_encode_svt_impl.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/tile_encode_svt_impl.c 2020-09-27 13:35:13.371526551 +0800 +@@ -0,0 +1,488 @@ ++/* ++ * Intel tile encoder ++ * ++ * Copyright (c) 2018 Intel Cooperation ++ * ++ * This file is part of FFmpeg. ++ * ++ * FFmpeg is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * FFmpeg is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with FFmpeg; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ */ ++#include ++#include ++#include ++#include + -+ avctx->extradata_size = ctx->api->encoder_headers(ctx->encoder, &nal, &nnal); -+ if (avctx->extradata_size <= 0) { -+ av_log(avctx, AV_LOG_ERROR, "Cannot encode headers.\n"); -+ libx265_encode_close(avctx); -+ return AVERROR_INVALIDDATA; -+ } ++#include ++#include + -+ avctx->extradata = av_malloc(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); -+ if (!avctx->extradata) { -+ av_log(avctx, AV_LOG_ERROR, -+ "Cannot allocate HEVC header of size %d.\n", avctx->extradata_size); -+ libx265_encode_close(avctx); -+ return AVERROR(ENOMEM); -+ } ++#include "libavutil/attributes.h" ++#include "libavutil/avassert.h" ++#include "libavutil/dict.h" ++#include "libavutil/error.h" ++#include "libavutil/imgutils.h" ++#include "libavutil/internal.h" ++#include "libavutil/log.h" ++#include "libavutil/mem.h" ++#include "libavutil/pixdesc.h" ++#include "libavutil/opt.h" ++#include "libavutil/common.h" ++#include "libavutil/opt.h" ++#include "libavutil/pixdesc.h" + -+ memcpy(avctx->extradata, nal[0].payload, avctx->extradata_size); -+ } -+*/ -+ return 0; -+} ++#include "tile_encoder.h" ++#include "EbErrorCodes.h" ++#include "EbTime.h" ++#include "EbApi.h" + -+static int x265_single_frame( EncoderWrapper* wrapper, int tile_idx, AVPacket *pkt, const AVFrame *pic, int *got_packet ) -+{ -+ x265Context* ctx = (x265Context*)wrapper->tile_info[tile_idx].enc_ctx; -+ AVCodecContext* avctx = wrapper->avctx; -+ AVFrame* tile_pic = NULL; -+ x265_picture x265pic; -+ x265_picture x265pic_out = { 0 }; -+ x265_nal *nal; -+ uint8_t *dst; -+ int payload = 0; -+ int nnal; -+ int ret; -+ int i; ++#include + -+ ctx->api->picture_init(ctx->params, &x265pic); -+ -+ if (pic) { -+ get_tile_frame_nocopy(wrapper, tile_idx, pic, &tile_pic); -+ -+ for (i = 0; i < 3; i++) { -+ x265pic.planes[i] = tile_pic->data[i]; -+ x265pic.stride[i] = tile_pic->linesize[i]; -+ } ++typedef struct SvtEncoder { ++ EB_H265_ENC_CONFIGURATION enc_params; ++ EB_COMPONENTTYPE *svt_handle; ++ EB_BUFFERHEADERTYPE *in_buf; ++ EB_BUFFERHEADERTYPE *out_buf; ++ int raw_size; ++} SvtEncoder; + -+ x265pic.pts = pic->pts; -+ x265pic.bitDepth = av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth; ++typedef struct SvtParams { ++ int vui_info; ++ int hierarchical_level; ++ int intra_period; ++ int la_depth; ++ int intra_ref_type; ++ int enc_mode; ++ int rc_mode; ++ int scd; ++ int tune; ++ int qp; ++ int profile; ++ int base_layer_switch_mode; ++ int bit_rate; ++ int intra_refresh_type; ++}SvtParams; + -+ x265pic.sliceType = pic->pict_type == AV_PICTURE_TYPE_I ? -+ (ctx->forced_idr ? X265_TYPE_IDR : X265_TYPE_I) : -+ pic->pict_type == AV_PICTURE_TYPE_P ? X265_TYPE_P : -+ pic->pict_type == AV_PICTURE_TYPE_B ? X265_TYPE_B : -+ X265_TYPE_AUTO; ++typedef struct SvtContext { ++ SvtEncoder *svt_enc; ++ SvtParams svt_param; ++ int eos_flag; ++ int i; ++} SvtContext; ++ ++static int error_mapping(int val) ++{ ++ if (val == EB_ErrorInsufficientResources) ++ return AVERROR(ENOMEM); ++ if ((val == EB_ErrorUndefined) || (val == EB_ErrorInvalidComponent) || ++ (val == EB_ErrorBadParameter)) ++ return AVERROR(EINVAL); ++ return AVERROR_EXTERNAL; ++} ++ ++static void free_buffer(SvtEncoder *svt_enc) ++{ ++ if (svt_enc->in_buf) { ++ EB_H265_ENC_INPUT *in_data = (EB_H265_ENC_INPUT* )svt_enc->in_buf->pBuffer; ++ if (in_data) { ++ av_freep(&in_data); ++ } ++ av_freep(&svt_enc->in_buf); + } ++ av_freep(&svt_enc->out_buf); ++} + -+ ret = ctx->api->encoder_encode(ctx->encoder, &nal, &nnal, -+ pic ? &x265pic : NULL, &x265pic_out); -+ -+ if(NULL!= tile_pic) av_frame_free(&tile_pic); -+ -+ if (ret < 0) -+ return AVERROR_EXTERNAL; ++static EB_ERRORTYPE alloc_buffer(EB_H265_ENC_CONFIGURATION *config, SvtEncoder *svt_enc) ++{ ++ EB_ERRORTYPE ret = EB_ErrorNone; + -+ if (!nnal) -+ return 0; ++ const int pack_mode_10bit = (config->encoderBitDepth > 8) && (config->compressedTenBitFormat == 0) ? 1 : 0; ++ const size_t luma_size_8bit = config->sourceWidth * config->sourceHeight * (1 << pack_mode_10bit); ++ const size_t luma_size_10bit = (config->encoderBitDepth > 8 && pack_mode_10bit == 0) ? luma_size_8bit : 0; + -+ ///FIXME, need to assign each NAL to a packet if stitching library can only process one NAL -+ for (i = 0; i < nnal; i++) -+ payload += nal[i].sizeBytes; ++ svt_enc->raw_size = (luma_size_8bit + luma_size_10bit) * 3 / 2; + -+ /*ret = ff_alloc_packet2(avctx, pkt, payload, payload); -+ if (ret < 0) { -+ av_log(avctx, AV_LOG_ERROR, "Error getting output packet.\n"); -+ return ret; -+ }*/ -+ pkt->data = malloc(payload); -+ pkt->size = payload; -+ dst = pkt->data; ++ // allocate buffer for in and out ++ svt_enc->in_buf = av_mallocz(sizeof(EB_BUFFERHEADERTYPE)); ++ svt_enc->out_buf = av_mallocz(sizeof(EB_BUFFERHEADERTYPE)); ++ if (!svt_enc->in_buf || !svt_enc->out_buf) ++ goto failed; + -+ for (i = 0; i < nnal; i++) { -+ memcpy(dst, nal[i].payload, nal[i].sizeBytes); -+ dst += nal[i].sizeBytes; ++ svt_enc->in_buf->pBuffer = av_mallocz(sizeof(EB_H265_ENC_INPUT)); ++ if (!svt_enc->in_buf->pBuffer) ++ goto failed; + -+ if (is_keyframe(nal[i].type)) -+ pkt->flags |= AV_PKT_FLAG_KEY; -+ } ++ svt_enc->in_buf->nSize = sizeof(EB_BUFFERHEADERTYPE); ++ svt_enc->in_buf->pAppPrivate = NULL; ++ svt_enc->out_buf->nSize = sizeof(EB_BUFFERHEADERTYPE); ++ svt_enc->out_buf->nAllocLen = svt_enc->raw_size; ++ svt_enc->out_buf->pAppPrivate = NULL; + -+ pkt->pts = x265pic_out.pts; -+ pkt->dts = x265pic_out.dts; ++ return ret; + -+#if FF_API_CODED_FRAME -+FF_DISABLE_DEPRECATION_WARNINGS -+ switch (x265pic_out.sliceType) { -+ case X265_TYPE_IDR: -+ case X265_TYPE_I: -+ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; -+ break; -+ case X265_TYPE_P: -+ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P; -+ break; -+ case X265_TYPE_B: -+ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B; -+ break; -+ } -+FF_ENABLE_DEPRECATION_WARNINGS -+#endif ++failed: ++ free_buffer(svt_enc); ++ return AVERROR(ENOMEM); + -+#if X265_BUILD >= 130 -+ if (x265pic_out.sliceType == X265_TYPE_B) -+#else -+ if (x265pic_out.frameData.sliceType == 'b') -+#endif -+ pkt->flags |= AV_PKT_FLAG_DISPOSABLE; + -+ *got_packet = 1; -+ -+ return 0; +} + -+int libx265_enc_close(void* ctx) ++static EB_ERRORTYPE config_enc_params(EncoderWrapper* wrapper, int tile_idx, EB_H265_ENC_CONFIGURATION *param ) +{ -+ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; -+ x265Context* x265_ctx = NULL; -+ -+ for(int i=0; itile_num; i++){ -+ x265_ctx = (x265Context*)wrapper->tile_info[i].enc_ctx; -+ if( NULL != x265_ctx){ -+ x265_single_close(wrapper, i); -+ free(x265_ctx); -+ } -+ wrapper->tile_info[i].enc_ctx = NULL; ++ AVCodecContext *avctx = wrapper->avctx; ++ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; ++ SvtEncoder *svt_enc = q->svt_enc; ++ EB_ERRORTYPE ret = EB_ErrorNone; ++ int tenBits = 0; ++ ++ param->sourceWidth = wrapper->tile_info[tile_idx].tWidth; ++ param->sourceHeight = wrapper->tile_info[tile_idx].tHeight; ++ ++ if (avctx->pix_fmt == AV_PIX_FMT_YUV420P10LE) { ++ av_log(avctx, AV_LOG_DEBUG , "Encoder 10 bits depth input\n"); ++ param->compressedTenBitFormat = 0; ++ tenBits = 1; + } -+ -+ return 0; -+} + -+int libx265_enc_init(void* ctx) -+{ -+ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; -+ x265Context* x265_ctx = NULL; -+ -+ for(int i=0; itile_num; i++){ -+ x265_ctx = malloc(sizeof(x265Context)); -+ x265_ctx->api = NULL; -+ x265_ctx->encoder = NULL; -+ x265_ctx->x265_opts = wrapper->enc_param; -+ x265_ctx->preset = "fast"; -+ x265_ctx->tune = "psnr"; -+ x265_ctx->crf = -1; -+ x265_ctx->profile = "main"; -+ x265_ctx->forced_idr = 0; -+ x265_ctx->bit_rate = wrapper->tile_info[i].tBitrate; -+ x265_ctx->rc_max_rate = wrapper->tile_info[i].tMaxrate; -+ wrapper->tile_info[i].enc_ctx = x265_ctx; -+ x265_single_init(wrapper, i); ++ // Update param from options ++ param->hierarchicalLevels = q->svt_param.hierarchical_level; ++ param->encMode = q->svt_param.enc_mode; ++ param->intraRefreshType = q->svt_param.intra_ref_type; ++ param->profile = q->svt_param.profile; ++ param->rateControlMode = q->svt_param.rc_mode; ++ param->sceneChangeDetection = q->svt_param.scd; ++ param->tune = q->svt_param.tune; ++ param->baseLayerSwitchMode = q->svt_param.base_layer_switch_mode; ++ ++ param->targetBitRate = q->svt_param.bit_rate; ++ param->frameRateNumerator = avctx->time_base.den; ++ param->frameRateDenominator = avctx->time_base.num * avctx->ticks_per_frame; ++ // Need to disable deblock filter to disable loop_filter_across_slices_enable_flag ++ param->disableDlfFlag = 1; ++ param->enableSaoFlag = 0; ++ // Make encoded bitstream has I/P frame only ++ param->intraPeriodLength = q->svt_param.intra_period; ++ param->qp = q->svt_param.qp; ++ param->intraRefreshType = q->svt_param.intra_refresh_type; ++ ++ if (q->svt_param.vui_info) ++ param->videoUsabilityInfo = q->svt_param.vui_info; ++ if (q->svt_param.la_depth != -1) ++ param->lookAheadDistance = q->svt_param.la_depth; ++ ++ if (tenBits == 1) { ++ param->encoderBitDepth = 10; ++ param->profile = 2; + } -+ return 0; ++ ++ ret = alloc_buffer(param, svt_enc); ++ ++ return ret; +} + -+int libx265_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet) ++static int eb_enc_init(EncoderWrapper* wrapper, int tile_idx) +{ -+ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; -+ int ret = 0; -+ int got_pkt = 0; -+ -+ for(int i=0; itile_num; i++){ -+ if( wrapper->tile_info[i].eos ) continue; -+ got_pkt = 0; -+ AVPacket tile_pkts = {0}; -+ -+ ret = x265_single_frame(wrapper, i, &tile_pkts, pic, &got_pkt); -+ -+ if( got_pkt ){ -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "**********tile id = %d receive_packet got packet!!!\n", i); -+ av_fifo_generic_write( wrapper->tile_info[i].outpkt_fifo, &tile_pkts, sizeof(AVPacket), NULL); -+ }else{ -+ av_packet_unref(&tile_pkts); -+ free(tile_pkts.data); -+ } -+ if( NULL==pic && !got_pkt ){ -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d EOS!!!\n", i); -+ wrapper->tile_info[i].eos = 1; -+ } -+ } -+ -+ //FIXME, suppose all encoder has the rhythm to get packet, so there is no buffer in the first time -+ ret = bs_tile_stitching(wrapper, pkt); -+ -+ if( AVERROR_EOF == ret ){ -+ return AVERROR_EOF; -+ } -+ *got_packet = 1; -+ -+ if( -1 == ret ) *got_packet = 0; -+ -+ return 0; -+} -\ No newline at end of file -diff --git a/FFmpeg/libavcodec/tile_encoder.c b/FFmpeg/libavcodec/tile_encoder.c -new file mode 100644 -index 0000000..08b0fb6 ---- /dev/null -+++ b/FFmpeg/libavcodec/tile_encoder.c -@@ -0,0 +1,586 @@ -+/* -+ * Intel tile encoder -+ * -+ * Copyright (c) 2018 Intel Cooperation -+ * -+ * This file is part of FFmpeg. -+ * -+ * FFmpeg is free software; you can redistribute it and/or -+ * modify it under the terms of the GNU Lesser General Public -+ * License as published by the Free Software Foundation; either -+ * version 2.1 of the License, or (at your option) any later version. -+ * -+ * FFmpeg is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ * Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with FFmpeg; if not, write to the Free Software -+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -+ */ ++ SvtContext* ctx = wrapper->tile_info[tile_idx].enc_ctx; + -+#include -+#include ++ EB_ERRORTYPE ret = EB_ErrorNone; ++ SvtEncoder* svt_enc = NULL; + -+#include "libavutil/attributes.h" -+#include "libavutil/avassert.h" -+#include "libavutil/dict.h" -+#include "libavutil/error.h" -+#include "libavutil/imgutils.h" -+#include "libavutil/internal.h" -+#include "libavutil/log.h" -+#include "libavutil/mem.h" -+#include "libavutil/pixdesc.h" -+#include "libavutil/opt.h" ++ ctx->svt_enc = av_mallocz(sizeof(*ctx->svt_enc)); ++ if (!ctx->svt_enc) ++ return AVERROR(ENOMEM); + -+#include "tile_encoder.h" ++ svt_enc = ctx->svt_enc; + -+//pthread_mutex_t mutex; -+//pthread_cond_t cond; ++ ctx->eos_flag = 0; + -+typedef struct TileEncoderContext { -+ const AVClass *class; -+ EncoderWrapper api; -+ enum ENC_LIB enc_lib; -+ enum TILE_MODE tile_mode; -+ -+ //for average size tile, the input just give the layout, such as 3x3, 4x4 -+ int tiles_gw; -+ int tiles_gh; -+ -+ //for fix size tile, the last one of colum or row is not the fixed value -+ int fix_tiles_w; -+ int fix_tiles_h; -+ char *params; -+} TileEncoderContext; ++ ret = EbInitHandle(&svt_enc->svt_handle, ctx, &svt_enc->enc_params); ++ if (ret != EB_ErrorNone) ++ goto failed_init; + -+// Support fix size tile -+static int assign_tiles_fix( TileEncoderContext* ctx ) -+{ -+ EncoderWrapper* wrapper = &(ctx->api); ++ ret = config_enc_params( wrapper, tile_idx, &svt_enc->enc_params); ++ if (ret != EB_ErrorNone) ++ goto failed_init; + -+ int *tiles_col_width, *tiles_row_height; -+ tiles_col_width = (int *)malloc(ctx->tiles_gw * sizeof(int)); -+ tiles_row_height = (int *)malloc(ctx->tiles_gh * sizeof(int)); -+ for (int i=0;itiles_gw - 1;i++)tiles_col_width[i]=ctx->fix_tiles_w; -+ for (int i=0;itiles_gh - 1;i++)tiles_row_height[i]=ctx->fix_tiles_h; ++ ret = EbH265EncSetParameter(svt_enc->svt_handle, &svt_enc->enc_params); ++ if (ret != EB_ErrorNone) ++ goto failed_init; + -+ wrapper->tile_num = ctx->tiles_gw * ctx->tiles_gh; -+ wrapper->tile_w = ctx->tiles_gw; -+ wrapper->tile_h = ctx->tiles_gh; ++ ret = EbInitEncoder(svt_enc->svt_handle); ++ if (ret != EB_ErrorNone) ++ goto failed_init; + -+ for(int i = 0; i < ctx->tiles_gh; i++) -+ { -+ for(int j = 0; j < ctx->tiles_gw; j++) -+ { -+ int idx = i * ctx->tiles_gw + j; -+ wrapper->tile_info[idx].left = (j == 0) ? 0 : wrapper->tile_info[idx - 1].left + tiles_col_width[j-1]; -+ wrapper->tile_info[idx].top = (i == 0) ? 0 : wrapper->tile_info[(i-1)*ctx->tiles_gw + j].top + tiles_row_height[i-1]; -+ wrapper->tile_info[idx].tHeight = (i == ctx->tiles_gh - 1) ? wrapper->height - wrapper->tile_info[idx].top : tiles_row_height[i]; -+ wrapper->tile_info[idx].tWidth = (j == ctx->tiles_gw - 1) ? wrapper->width - wrapper->tile_info[idx].left : tiles_col_width[j]; -+ } -+ } -+ -+ if(tiles_col_width) -+ { -+ free(tiles_col_width); -+ tiles_col_width = NULL; -+ } -+ if(tiles_row_height) -+ { -+ free(tiles_row_height); -+ tiles_row_height = NULL; -+ } ++ return ret; + -+ return 0; ++failed_init: ++ return error_mapping(ret); +} -+static int assign_tiles_avg( TileEncoderContext* ctx ) ++ ++static void read_in_data(EB_H265_ENC_CONFIGURATION *config, const AVFrame* frame, EB_BUFFERHEADERTYPE *headerPtr) +{ -+ EncoderWrapper* wrapper = &(ctx->api); -+ -+ wrapper->tile_num = ctx->tiles_gw * ctx->tiles_gh; -+ wrapper->tile_w = ctx->tiles_gw; -+ wrapper->tile_h = ctx->tiles_gh; ++ unsigned int is16bit = config->encoderBitDepth > 8; ++ unsigned long long lumaReadSize = (unsigned long long)config->sourceWidth * config->sourceHeight<< is16bit; ++ EB_H265_ENC_INPUT *in_data = (EB_H265_ENC_INPUT*)headerPtr->pBuffer; + -+#define LCU_SIZE 64 + -+ // Width and Height should be divisible by LCU_SIZE -+ int width_in_lcu = wrapper->width / LCU_SIZE; -+ int height_in_lcu = wrapper->height / LCU_SIZE; ++ // support yuv420p and yuv420p010 ++ in_data->luma = frame->data[0]; ++ in_data->cb = frame->data[1]; ++ in_data->cr = frame->data[2]; + -+ // (6.5.1) in Rec. ITU-T H.265 v5 (02/2018) -+ int *tiles_col_width, *tiles_row_height; -+ tiles_col_width = (int *)malloc(ctx->tiles_gw * sizeof(int)); -+ tiles_row_height = (int *)malloc(ctx->tiles_gh * sizeof(int)); -+ for( int i=0; itiles_gw; i++) -+ { -+ tiles_col_width[i] = (i+1) * width_in_lcu / ctx->tiles_gw - i * width_in_lcu / ctx->tiles_gw; -+ } -+ for( int i=0; itiles_gh; i++) -+ { -+ tiles_row_height[i] = (i+1) * height_in_lcu / ctx->tiles_gh - i * height_in_lcu / ctx->tiles_gh; ++ // stride info ++ in_data->yStride = frame->linesize[0] >> is16bit; ++ in_data->cbStride = frame->linesize[1] >> is16bit; ++ in_data->crStride = frame->linesize[2] >> is16bit; + -+ } ++ headerPtr->nFilledLen += lumaReadSize * 3/2u; + -+ for(int i = 0; i < ctx->tiles_gh; i++) -+ { -+ for(int j = 0; j < ctx->tiles_gw; j++) -+ { -+ int idx = i * ctx->tiles_gw + j; -+ wrapper->tile_info[idx].left = (j == 0) ? 0 : wrapper->tile_info[idx - 1].left + tiles_col_width[j-1] * LCU_SIZE; -+ wrapper->tile_info[idx].top = (i == 0) ? 0 : wrapper->tile_info[(i-1)*ctx->tiles_gw + j].top + tiles_row_height[i-1] * LCU_SIZE; -+ wrapper->tile_info[idx].tHeight = tiles_row_height[i] * LCU_SIZE; -+ wrapper->tile_info[idx].tWidth = tiles_col_width[j] * LCU_SIZE; -+ } -+ } ++} + -+ if(tiles_col_width) -+ { -+ free(tiles_col_width); -+ tiles_col_width = NULL; -+ } -+ if(tiles_row_height) -+ { -+ free(tiles_row_height); -+ tiles_row_height = NULL; ++static int eb_send_frame(EncoderWrapper* wrapper, int tile_idx, const AVFrame *frame) ++{ ++ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; ++ SvtEncoder *svt_enc = q->svt_enc; ++ EB_BUFFERHEADERTYPE *headerPtr = svt_enc->in_buf; ++ ++ AVFrame* tile_pic = NULL; ++ int ret = 0; ++ ++ if (!frame) { ++ EB_BUFFERHEADERTYPE headerPtrLast; ++ headerPtrLast.nAllocLen = 0; ++ headerPtrLast.nFilledLen = 0; ++ headerPtrLast.nTickCount = 0; ++ headerPtrLast.pAppPrivate = NULL; ++ //headerPtrLast.nOffset = 0; ++ //headerPtrLast.nTimeStamp = 0; ++ headerPtrLast.nFlags = EB_BUFFERFLAG_EOS; ++ headerPtrLast.pBuffer = NULL; ++ EbH265EncSendPicture(svt_enc->svt_handle, &headerPtrLast); ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "========tile id = %d NULL frame!!!\n", tile_idx); ++ q->eos_flag = 1; ++ av_log(wrapper->avctx, AV_LOG_ERROR, "Finish sending frames!!!\n"); ++ return ret; + } ++ get_tile_frame_nocopy(wrapper, tile_idx, frame, &tile_pic); ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "------tile id = %d start frame address: y=%p, u=%p, v=%p!!!\n", ++ tile_idx, tile_pic->data[0], tile_pic->data[1], tile_pic->data[2]); + -+ return 0; ++ read_in_data(&svt_enc->enc_params, tile_pic, headerPtr); ++ ++ //headerPtr->nOffset = 0; ++ headerPtr->nFlags = 0; ++ headerPtr->pAppPrivate = NULL; ++ headerPtr->pts = frame->pts; ++ //headerPtr->nFlags = 0; ++ //headerPtr->nTimeStamp = 0; ++ //headerPtr->pAppPrivate = NULL; ++ headerPtr->sliceType = INVALID_SLICE; ++ q->i += 1; ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d start to send frame, times = %d!!!\n", tile_idx, q->i); ++ ++ EbH265EncSendPicture(svt_enc->svt_handle, headerPtr); ++ ++ if(NULL!= tile_pic) av_frame_free(&tile_pic); ++ return ret; +} + -+/// assign bit rate for each tile. -+int get_tile_bitrate(EncoderWrapper* wrapper, int idx) ++static int eb_receive_packet(EncoderWrapper* wrapper, int tile_idx, AVPacket *pkt) +{ -+ int bit_rate = wrapper->avctx->bit_rate; -+ double percent = 0.0; -+ -+ if( 0==bit_rate ) bit_rate = wrapper->avctx->bit_rate_tolerance; -+ -+ ///FIXME if there is more suitable way to calculate bit rate for each tile -+ percent = (double)( wrapper->tile_info[idx].tHeight * wrapper->tile_info[idx].tWidth ) / (double)(wrapper->width * wrapper->height); -+ -+ return (int) (bit_rate * percent); -+ -+ } ++ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; ++ SvtEncoder *svt_enc = q->svt_enc; ++ EB_BUFFERHEADERTYPE *headerPtr = svt_enc->out_buf; ++ EB_ERRORTYPE stream_status = EB_ErrorNone; + -+int get_tile_maxrate(EncoderWrapper* wrapper, int idx) ++ int ret = 0; ++ ++ //if ((ret = ff_alloc_packet2(wrapper->avctx, pkt, svt_enc->raw_size, 0)) < 0){ ++ // av_log(wrapper->avctx, AV_LOG_ERROR, "tile id = %d ff_alloc_packet2 ret = %d!!!\n", tile_idx, ret); ++ // return ret; ++ //} ++ pkt->data = malloc(svt_enc->raw_size); ++ pkt->size = svt_enc->raw_size; ++ ++ headerPtr->pBuffer = pkt->data; ++ stream_status = EbH265GetPacket(svt_enc->svt_handle, headerPtr, q->eos_flag); ++ if ((stream_status == EB_NoErrorEmptyQueue)){ ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d stream_status == EB_NoErrorEmptyQueue!!!\n", tile_idx); ++ return AVERROR(EAGAIN); ++ } ++ pkt->size = headerPtr->nFilledLen; ++ pkt->pts = headerPtr->pts; ++ pkt->dts = headerPtr->dts; ++ ret = (headerPtr->nFlags & EB_BUFFERFLAG_EOS) ? AVERROR_EOF : 0; ++ ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d ret = %d!!!\n", tile_idx, ret); ++ return ret; ++} ++ ++static av_cold int eb_enc_close(EncoderWrapper* wrapper, int tile_idx) +{ -+ int max_rate = wrapper->avctx->rc_max_rate; -+ -+ ///FIXME if there is more suitable way to calculate bit rate for each tile -+ double percent = (double)( wrapper->tile_info[idx].tHeight * wrapper->tile_info[idx].tWidth ) / (double)(wrapper->width * wrapper->height); -+ -+ return (int) (max_rate * percent); ++ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; ++ SvtEncoder *svt_enc = q->svt_enc; ++ ++ EbDeinitEncoder(svt_enc->svt_handle); ++ EbDeinitHandle(svt_enc->svt_handle); ++ ++ free_buffer(svt_enc); ++ av_freep(&svt_enc); + ++ return 0; +} + -+int bFifoReady( EncoderWrapper* wrapper ) ++///encode each tile with SVT ++int svt_enc_close(void* ctx) +{ -+ int eos = 0; -+ int ready = 0; ++ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; ++ SvtContext* svt_ctx = NULL; ++ + for(int i=0; itile_num; i++){ -+ if( wrapper->tile_info[i].outpkt_fifo ){ -+ if( av_fifo_size(wrapper->tile_info[i].outpkt_fifo) ){ -+ ready++; -+ }else{ -+ if(wrapper->tile_info[i].eos) eos++; -+ } ++ svt_ctx = (SvtContext*)wrapper->tile_info[i].enc_ctx; ++ if( NULL != svt_ctx){ ++ eb_enc_close(wrapper, i); ++ free(svt_ctx); + } ++ wrapper->tile_info[i].enc_ctx = NULL; + } -+ if( ready == wrapper->tile_num ) return 1; -+ if( eos == wrapper->tile_num ) return AVERROR_EOF; -+ ++ + return 0; +} -+int bs_tile_stitching(EncoderWrapper* wrapper, AVPacket* outPkt) ++ ++int svt_enc_init(void* ctx) +{ ++ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; ++ SvtContext* svt_ctx = NULL; + int ret = 0; -+ AVPacket pkt[MAX_TILES]; -+ int bReady = bFifoReady(wrapper); -+ int totalsize=0; -+ uint8_t* dst = NULL; -+ if( AVERROR_EOF == bReady ) return AVERROR_EOF; + -+ if( 1 == bReady ){ -+ for(int i=0; itile_num; i++){ -+ av_fifo_generic_read( wrapper->tile_info[i].outpkt_fifo, &pkt[i], sizeof(AVPacket), NULL); -+#ifdef FILE_DEBUG -+ wrapper->tile_info[i].nSpkt += 1; -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "######tile id=%d, getpkt=%d, stitched packet=%d#########\n", i, wrapper->tile_info[i].nGetpkt, wrapper->tile_info[i].nSpkt); -+ av_log(wrapper->avctx, AV_LOG_DEBUG, "**********tile id = %d, packet size = %d, packet addr=%p!!!\n", i,pkt[i].size, pkt[i].data); -+#endif -+ totalsize += pkt[i].size; -+ } -+ -+ // Sometimes the size of output is larger than size of input, -+ // so we alloc 2 times larger size packet. -+ ret = ff_alloc_packet2(wrapper->avctx, outPkt, 2*totalsize, 2*totalsize); -+ if( ret < 0) return -1; -+ -+ dst = outPkt->data; -+ -+ // call stitching library -+ wrapper->paramTiledStream.pOutputTiledBitstream = dst; -+ -+ for (int i = 0; i < wrapper->paramTiledStream.tilesHeightCount; i++) -+ { -+ for (int j = 0; j < wrapper->paramTiledStream.tilesWidthCount; j++) -+ { -+ param_oneStream_info *ptempinput = wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j]; -+ ptempinput->pTiledBitstreamBuffer = pkt[i*wrapper->paramTiledStream.tilesWidthCount + j].data; -+ ptempinput->inputBufferLen = pkt[i*wrapper->paramTiledStream.tilesWidthCount + j].size; -+ } -+ } -+ -+ wrapper->paramTiledStream.inputBistreamsLen = totalsize; -+ genTiledStream_process(&(wrapper->paramTiledStream), wrapper->pGen); -+ dst += wrapper->paramTiledStream.outputiledbistreamlen; -+ outPkt->size = wrapper->paramTiledStream.outputiledbistreamlen; -+/* -+#ifdef FILE_DEBUG -+ for(int i=0; itile_num; i++){ -+ memcpy(dst, pkt[i].data, pkt[i].size); -+ dst += pkt[i].size; -+ fwrite(pkt[i].data, 1, pkt[i].size, wrapper->tile_info[i].file); -+ } -+#endif -+*/ -+ // Send vps+sps+pps info -+ AVCodecContext* avctx = wrapper->avctx; -+ if(avctx->extradata_size == 0) -+ { -+ unsigned char *headerAddr; -+ avctx->extradata_size = genTiledStream_getParam(wrapper->pGen, ID_GEN_TILED_BITSTREAMS_HEADER, &headerAddr); -+ avctx->extradata = av_malloc(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); -+ if (!avctx->extradata) { -+ av_log(avctx, AV_LOG_ERROR, -+ "Cannot allocate HEVC header of size %d.\n", avctx->extradata_size); -+ return AVERROR(ENOMEM); -+ } -+ memcpy(avctx->extradata, headerAddr, avctx->extradata_size); -+ } -+ -+ switch (wrapper->paramTiledStream.sliceType) { -+ case SLICE_IDR: -+ case SLICE_I: -+ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; -+ break; -+ case SLICE_P: -+ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P; -+ break; -+ case SLICE_B: -+ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B; -+ break; -+ } -+ -+ //outPkt->pts = paramTiledStream.pts; -+ -+ ///unref the packet read from fifo -+ for(int i=0; itile_num; i++){ -+ av_packet_unref(&pkt[i]); -+ free(pkt[i].data); -+ } -+ -+ return 0; ++ for(int i=0; itile_num; i++){ ++ svt_ctx = malloc(sizeof(SvtContext)); ++ svt_ctx->svt_param.hierarchical_level = 3; ++ svt_ctx->svt_param.enc_mode = 9; ++ svt_ctx->svt_param.intra_ref_type = 1; ++ svt_ctx->svt_param.profile = 2; ++ svt_ctx->svt_param.rc_mode = 0;//0-CQP, 1-VBR ++ svt_ctx->svt_param.qp = 32; ++ svt_ctx->svt_param.scd = 0; ++ svt_ctx->svt_param.tune = 1; ++ svt_ctx->svt_param.intra_period = 5; ++ svt_ctx->svt_param.base_layer_switch_mode = 0; ++ svt_ctx->svt_param.vui_info = 0; ++ svt_ctx->svt_param.la_depth = -1; ++ svt_ctx->svt_param.bit_rate = wrapper->tile_info[i].tBitrate; ++ svt_ctx->i = 0; ++ svt_ctx->svt_param.intra_refresh_type = 1;//1-CRA, 2-IDR intra refresh ++ wrapper->tile_info[i].enc_ctx = svt_ctx; ++ ret = eb_enc_init(wrapper, i); ++ if( 0 != ret ) return ret; + } -+ return -1; ++ wrapper->initialized = 1; ++ return 0; +} + -+int get_tile_frame_copy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ) ++int svt_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet) +{ -+ int ret = 0; -+ uint8_t* src = NULL; -+ uint8_t* dst = NULL; -+ int factor = 1; -+ AVFrame* frame = NULL; ++ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; ++ SvtContext *q = NULL; + -+ if( NULL == *tile_pic ){ -+ *tile_pic = av_frame_alloc(); -+ if (!*tile_pic) { -+ av_freep(*tile_pic); -+ return AVERROR(ENOMEM); -+ } ++ int ret = 0; ++ ++ for(int i=0; itile_num; i++){ ++ q = (SvtContext *)wrapper->tile_info[i].enc_ctx; ++ if( wrapper->tile_info[i].eos ) continue; ++ ++ if(!q->eos_flag) eb_send_frame( wrapper, i, pic ); + } -+ -+ frame = *tile_pic; -+ frame->height = wrapper->tile_info[tile_idx].tHeight; -+ frame->width = wrapper->tile_info[tile_idx].tWidth; -+ -+ frame->format = pic->format; + -+ if (!frame->data[0]) { -+ ret = av_frame_get_buffer(frame, 32); -+ if (ret < 0){ -+ av_freep(*tile_pic); -+ return ret; -+ } ++ // Wake up all receive tile threads ++ if(!q->eos_flag) ++ { ++ pthread_cond_broadcast(&(wrapper->cond)); + } -+ -+ ///current copy is based on YUV420p format -+ for( int planner=0; planner<3; planner++ ){ -+ if( planner > 0 ){ -+ factor = 2; -+ } -+ src = pic->data[planner] + pic->linesize[planner]*(wrapper->tile_info[tile_idx].top / factor) + wrapper->tile_info[tile_idx].left / factor; -+ dst = frame->data[planner]; -+ for( int i=0; iheight/factor; i++ ){ -+ src += pic->linesize[planner]; -+ dst += frame->linesize[planner]; -+ memcpy( dst, src, frame->width / factor ); ++ else ++ { ++ // Wait until all tiles are ready ++ while(0==bFifoReady(wrapper)) ++ { ++ pthread_cond_broadcast(&(wrapper->cond)); ++ usleep(10000); + } + } -+ -+ return ret; -+} + -+int get_tile_frame_nocopy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ) -+{ -+ AVFrame* frame = NULL; -+ int factor = 1; -+ -+ if( NULL == *tile_pic ){ -+ *tile_pic = av_frame_alloc(); -+ if (!*tile_pic) { -+ av_freep(*tile_pic); -+ return AVERROR(ENOMEM); -+ } ++ //FIXME, suppose all encoder has the rhythm to get packet, so there is no buffer in the first time ++ ++ ret = bs_tile_stitching(wrapper, pkt); ++ ++ if( AVERROR_EOF == ret ){ ++ return AVERROR_EOF; + } ++ *got_packet = 1; ++ ++ if( -1 == ret ) *got_packet = 0; + -+ frame = *tile_pic; -+ frame->height = wrapper->tile_info[tile_idx].tHeight; -+ frame->width = wrapper->tile_info[tile_idx].tWidth; -+ frame->format = pic->format; -+ -+ for( int i=0; i<4; i++ ){ -+ if( i > 0 ){ -+ factor = 2; -+ } -+ frame->data[i] = pic->data[i] + pic->linesize[i]*(wrapper->tile_info[tile_idx].top / factor) + wrapper->tile_info[tile_idx].left / factor; -+ frame->linesize[i] = pic->linesize[i]; -+ } -+ + return 0; +} + -+static av_cold int tile_encode_close(AVCodecContext *avctx) ++int svt_enc_tile(TileEncoderInfo *tile_enc_info) +{ -+ TileEncoderContext *ctx = avctx->priv_data; -+ EncoderWrapper *wrapper = &(ctx->api); -+ AVFifoBuffer* fifo = NULL; ++ int ret = 0; + -+ if(wrapper->pGen) ++ EncoderWrapper *wrapper = (EncoderWrapper*)tile_enc_info->ctx; ++ int tile_idx = tile_enc_info->tile_idx; ++ ++ while(1) + { -+ genTiledStream_unInit(wrapper->pGen); ++ if(wrapper->initialized) ++ break; + } + -+ if (wrapper->paramTiledStream.pTiledBitstream) ++ SvtContext *q = (SvtContext *)wrapper->tile_info[tile_idx].enc_ctx; ++ SvtEncoder *svt_enc = q->svt_enc; ++ EB_BUFFERHEADERTYPE *headerPtr = svt_enc->out_buf; ++ ++ while(!wrapper->tile_info[tile_idx].eos) + { -+ for (int i = 0; i < wrapper->paramTiledStream.tilesHeightCount; i++) -+ { -+ for (int j = 0; j < wrapper->paramTiledStream.tilesWidthCount; j++) -+ { -+ free(wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j]); -+ wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j] = NULL; ++ // Wait until next frame is sent ++ if(!q->eos_flag) ++ pthread_cond_wait(&(wrapper->cond),&(wrapper->mutex)); ++ ++ AVPacket tile_pkts = {0}; ++ ret = eb_receive_packet(wrapper, tile_idx, &tile_pkts); ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d begin to eb_receive_packet!!!\n", tile_idx); ++ if( 0 == ret || AVERROR_EOF == ret ){ ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "**********tile id = %d eb_receive_packet got packet, packet size = %d, packet addr=%p!!!\n", tile_idx, tile_pkts.size, tile_pkts.data); ++ av_fifo_generic_write( wrapper->tile_info[tile_idx].outpkt_fifo, &tile_pkts, sizeof(AVPacket), NULL); ++#ifdef FILE_DEBUG ++ wrapper->tile_info[tile_idx].nGetpkt += 1; ++ //fwrite(tile_pkts.data, 1, tile_pkts.size, wrapper->tile_info[i].file); ++#endif ++ if( AVERROR_EOF == ret ){ ++ av_log(wrapper->avctx, AV_LOG_ERROR, "tile id = %d EOS!!!\n", tile_idx); ++ wrapper->tile_info[tile_idx].eos = 1; + } ++ }else{ ++ av_packet_unref(&tile_pkts); ++ free(tile_pkts.data); + } -+ free(wrapper->paramTiledStream.pTiledBitstream); -+ wrapper->paramTiledStream.pTiledBitstream = NULL; -+ } -+ if(avctx->extradata) -+ { -+ free(avctx->extradata); -+ avctx->extradata = NULL; -+ } + -+ if(wrapper->tid) -+ { -+ free(wrapper->tid); -+ wrapper->tid = NULL; + } -+ if(wrapper->tile_enc_info) ++ ++ // Wait until all tiles are done ++ while(AVERROR_EOF!=bFifoReady(wrapper)) + { -+ free(wrapper->tile_enc_info); -+ wrapper->tile_enc_info = NULL; ++ pthread_cond_wait(&(wrapper->cond),&(wrapper->mutex)); ++ usleep(10000); + } + -+ if( NULL != ctx->api.enc_close ) -+ ctx->api.enc_close(&(ctx->api)); -+ -+ for( int i=0; i < ctx->api.tile_num; i++ ){ -+ -+#ifdef FILE_DEBUG -+ if(ctx->api.tile_info[i].file) fclose(ctx->api.tile_info[i].file); -+#endif -+ -+ fifo = ctx->api.tile_info[i].outpkt_fifo; -+ while ( fifo && av_fifo_size(fifo)) { -+ AVPacket pkt; -+ av_fifo_generic_read(fifo, &pkt, sizeof(pkt), NULL); -+ free(pkt.data); -+ av_packet_unref(&pkt); -+ } -+ av_fifo_free(fifo); -+ fifo = NULL; -+ } -+ return 0; ++ return ret; +} -+ -+static av_cold int tile_encode_init(AVCodecContext *avctx) -+{ -+ TileEncoderContext *ctx = avctx->priv_data; -+ EncoderWrapper* wrapper = &(ctx->api); -+ int ret = 0; -+ char filename[256]; -+ -+ wrapper->width = avctx->coded_width; -+ wrapper->height = avctx->coded_height; -+ -+ wrapper->avctx = avctx; -+ switch(ctx->tile_mode){ -+ case FIX_SIZE: -+ wrapper->uniform_split = false; -+ assign_tiles_fix( ctx ); -+ break; -+ case AVG_SIZE: -+ wrapper->uniform_split = true; -+ assign_tiles_avg( ctx ); -+ break; -+ default: -+ break; -+ } +diff -urN FFmpeg/libavcodec/tile_encode_x265_impl.c FFmpeg-patched/libavcodec/tile_encode_x265_impl.c +--- FFmpeg/libavcodec/tile_encode_x265_impl.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavcodec/tile_encode_x265_impl.c 2020-09-27 13:35:13.371526551 +0800 +@@ -0,0 +1,477 @@ ++/* ++ * Intel tile encoder ++ * ++ * Copyright (c) 2018 Intel Cooperation ++ * ++ * This file is part of FFmpeg. ++ * ++ * FFmpeg is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * FFmpeg is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with FFmpeg; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ */ ++ ++#include ++#include ++ ++#include ++#include ++ ++#include "libavutil/attributes.h" ++#include "libavutil/avassert.h" ++#include "libavutil/dict.h" ++#include "libavutil/error.h" ++#include "libavutil/imgutils.h" ++#include "libavutil/internal.h" ++#include "libavutil/log.h" ++#include "libavutil/mem.h" ++#include "libavutil/pixdesc.h" ++#include "libavutil/opt.h" ++#include "libavutil/common.h" ++#include "libavutil/opt.h" ++#include "libavutil/pixdesc.h" ++ ++#include "tile_encoder.h" ++#include ++#include ++ ++typedef struct x265Context { ++ x265_encoder *encoder; ++ x265_param *params; ++ const x265_api *api; + ++ int bit_rate; ++ int rc_max_rate; + -+ switch(ctx->enc_lib){ -+ case ENC_X265: -+ wrapper->enc_close = libx265_enc_close; -+ wrapper->enc_frame = libx265_enc_frame; -+ wrapper->enc_init = libx265_enc_init; -+ break; -+ case ENC_SVT: -+ wrapper->enc_close = svt_enc_close; -+ wrapper->enc_frame = svt_enc_frame; -+ wrapper->enc_init = svt_enc_init; -+ break; -+ default: -+ break; ++ float crf; ++ int forced_idr; ++ char *preset; ++ char *tune; ++ char *profile; ++ char *x265_opts; ++ ++} x265Context; ++ ++///encode each tile with libx265 ++static int is_keyframe(NalUnitType naltype) ++{ ++ switch (naltype) { ++ case NAL_UNIT_CODED_SLICE_BLA_W_LP: ++ case NAL_UNIT_CODED_SLICE_BLA_W_RADL: ++ case NAL_UNIT_CODED_SLICE_BLA_N_LP: ++ case NAL_UNIT_CODED_SLICE_IDR_W_RADL: ++ case NAL_UNIT_CODED_SLICE_IDR_N_LP: ++ case NAL_UNIT_CODED_SLICE_CRA: ++ return 1; ++ default: ++ return 0; + } ++} + -+ pthread_mutex_init(&(wrapper->mutex), NULL); -+ pthread_cond_init(&(wrapper->cond), NULL); -+ wrapper->tid = malloc(wrapper->tile_num * sizeof(pthread_t)); -+ wrapper->tile_enc_info = malloc(wrapper->tile_num * sizeof(TileEncoderInfo)); -+ for(int i=0; itile_num; i++){ -+ wrapper->tile_info[i].tBitrate = get_tile_bitrate(wrapper, i); -+ wrapper->tile_info[i].tMaxrate = get_tile_maxrate(wrapper, i); -+ wrapper->tile_info[i].eos = 0; -+ wrapper->tile_info[i].outpkt_fifo = av_fifo_alloc( FIFO_SIZE * sizeof(AVPacket)); -+#ifdef FILE_DEBUG -+ wrapper->tile_info[i].nGetpkt = 0; -+ wrapper->tile_info[i].nSpkt = 0; -+ sprintf(filename, "out_%d.265", i); -+ wrapper->tile_info[i].file = fopen(filename, "wb+"); -+#endif -+ wrapper->tile_enc_info[i].ctx = wrapper; -+ wrapper->tile_enc_info[i].tile_idx = i; ++static int x265_single_close( EncoderWrapper* wrapper, int tile_idx ) ++{ ++ x265Context* ctx = (x265Context*)wrapper->tile_info[tile_idx].enc_ctx; ++ ++ ctx->api->param_free(ctx->params); + -+ ret = pthread_create(&wrapper->tid[i], NULL, svt_enc_tile, &(wrapper->tile_enc_info[i])); -+ if(0 != ret) -+ { -+ av_log(avctx, AV_LOG_ERROR, "Cannot create thread!\n"); -+ return ret; ++ if (ctx->encoder) ++ ctx->api->encoder_close(ctx->encoder); ++ ++ return 0; ++} ++ ++static int x265_single_init( EncoderWrapper* wrapper, int tile_idx ) ++{ ++ x265Context* ctx = wrapper->tile_info[tile_idx].enc_ctx; ++ AVCodecContext* avctx = wrapper->avctx; ++ ++ ctx->api = x265_api_get(av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth); ++ if (!ctx->api) ++ ctx->api = x265_api_get(0); ++ ++ ctx->params = ctx->api->param_alloc(); ++ if (!ctx->params) { ++ av_log(avctx, AV_LOG_ERROR, "Could not allocate x265 param structure.\n"); ++ return AVERROR(ENOMEM); ++ } ++ ++ if (ctx->api->param_default_preset(ctx->params, ctx->preset, ctx->tune) < 0) { ++ int i; ++ ++ av_log(avctx, AV_LOG_ERROR, "Error setting preset/tune %s/%s.\n", ctx->preset, ctx->tune); ++ av_log(avctx, AV_LOG_INFO, "Possible presets:"); ++ for (i = 0; x265_preset_names[i]; i++) ++ av_log(avctx, AV_LOG_INFO, " %s", x265_preset_names[i]); ++ ++ av_log(avctx, AV_LOG_INFO, "\n"); ++ av_log(avctx, AV_LOG_INFO, "Possible tunes:"); ++ for (i = 0; x265_tune_names[i]; i++) ++ av_log(avctx, AV_LOG_INFO, " %s", x265_tune_names[i]); ++ ++ av_log(avctx, AV_LOG_INFO, "\n"); ++ ++ return AVERROR(EINVAL); ++ } ++ ++ ctx->params->frameNumThreads = avctx->thread_count; ++ ctx->params->fpsNum = avctx->time_base.den; ++ ctx->params->fpsDenom = avctx->time_base.num * avctx->ticks_per_frame; ++ ctx->params->sourceWidth = wrapper->tile_info[tile_idx].tWidth; ++ ctx->params->sourceHeight = wrapper->tile_info[tile_idx].tHeight; ++ ctx->params->bEnablePsnr = !!(avctx->flags & AV_CODEC_FLAG_PSNR); ++ ctx->params->bOpenGOP = !(avctx->flags & AV_CODEC_FLAG_CLOSED_GOP); ++ ++ /* Tune the CTU size based on input resolution. */ ++ if (ctx->params->sourceWidth < 64 || ctx->params->sourceHeight < 64) ++ ctx->params->maxCUSize = 32; ++ if (ctx->params->sourceWidth < 32 || ctx->params->sourceHeight < 32) ++ ctx->params->maxCUSize = 16; ++ if (ctx->params->sourceWidth < 16 || ctx->params->sourceHeight < 16) { ++ av_log(avctx, AV_LOG_ERROR, "Image size is too small (%dx%d).\n", ++ ctx->params->sourceWidth, ctx->params->sourceHeight); ++ return AVERROR(EINVAL); ++ } ++ ++ if ((avctx->color_primaries <= AVCOL_PRI_SMPTE432 && ++ avctx->color_primaries != AVCOL_PRI_UNSPECIFIED) || ++ (avctx->color_trc <= AVCOL_TRC_ARIB_STD_B67 && ++ avctx->color_trc != AVCOL_TRC_UNSPECIFIED) || ++ (avctx->colorspace <= AVCOL_SPC_ICTCP && ++ avctx->colorspace != AVCOL_SPC_UNSPECIFIED)) { ++ ++ ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1; ++ ctx->params->vui.bEnableColorDescriptionPresentFlag = 1; ++ ++ // x265 validates the parameters internally ++ ctx->params->vui.colorPrimaries = avctx->color_primaries; ++ ctx->params->vui.transferCharacteristics = avctx->color_trc; ++ ctx->params->vui.matrixCoeffs = avctx->colorspace; ++ } ++ ++ if (avctx->sample_aspect_ratio.num > 0 && avctx->sample_aspect_ratio.den > 0) { ++ char sar[12]; ++ int sar_num, sar_den; ++ ++ av_reduce(&sar_num, &sar_den, ++ avctx->sample_aspect_ratio.num, ++ avctx->sample_aspect_ratio.den, 65535); ++ snprintf(sar, sizeof(sar), "%d:%d", sar_num, sar_den); ++ if (ctx->api->param_parse(ctx->params, "sar", sar) == X265_PARAM_BAD_VALUE) { ++ av_log(avctx, AV_LOG_ERROR, "Invalid SAR: %d:%d.\n", sar_num, sar_den); ++ return AVERROR_INVALIDDATA; + } + } -+ -+ if( NULL != ctx->api.enc_init ){ -+ ret = wrapper->enc_init(wrapper); -+ if( 0 != ret ) return ret; ++ ++ switch (avctx->pix_fmt) { ++ case AV_PIX_FMT_YUV420P: ++ case AV_PIX_FMT_YUV420P10: ++ case AV_PIX_FMT_YUV420P12: ++ ctx->params->internalCsp = X265_CSP_I420; ++ break; ++ case AV_PIX_FMT_YUV422P: ++ case AV_PIX_FMT_YUV422P10: ++ case AV_PIX_FMT_YUV422P12: ++ ctx->params->internalCsp = X265_CSP_I422; ++ break; ++ case AV_PIX_FMT_GBRP: ++ case AV_PIX_FMT_GBRP10: ++ case AV_PIX_FMT_GBRP12: ++ ctx->params->vui.matrixCoeffs = AVCOL_SPC_RGB; ++ ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1; ++ ctx->params->vui.bEnableColorDescriptionPresentFlag = 1; ++ case AV_PIX_FMT_YUV444P: ++ case AV_PIX_FMT_YUV444P10: ++ case AV_PIX_FMT_YUV444P12: ++ ctx->params->internalCsp = X265_CSP_I444; ++ break; ++ case AV_PIX_FMT_GRAY8: ++ case AV_PIX_FMT_GRAY10: ++ case AV_PIX_FMT_GRAY12: ++ if (ctx->api->api_build_number < 85) { ++ av_log(avctx, AV_LOG_ERROR, ++ "libx265 version is %d, must be at least 85 for gray encoding.\n", ++ ctx->api->api_build_number); ++ return AVERROR_INVALIDDATA; ++ } ++ ctx->params->internalCsp = X265_CSP_I400; ++ break; ++ } ++ ++ if (ctx->crf >= 0) { ++ char crf[6]; ++ ++ snprintf(crf, sizeof(crf), "%2.2f", ctx->crf); ++ if (ctx->api->param_parse(ctx->params, "crf", crf) == X265_PARAM_BAD_VALUE) { ++ av_log(avctx, AV_LOG_ERROR, "Invalid crf: %2.2f.\n", ctx->crf); ++ return AVERROR(EINVAL); ++ } ++ } else if (ctx->bit_rate > 0) { ++ ctx->params->rc.bitrate = ctx->bit_rate / 1000; ++ ctx->params->rc.rateControlMode = X265_RC_ABR; ++ } ++ ++ ctx->params->rc.vbvBufferSize = ctx->bit_rate * 2 / 3000; ++ ctx->params->rc.vbvMaxBitrate = ctx->rc_max_rate / 1000; ++ ++ if (!(avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER)) ++ ctx->params->bRepeatHeaders = 1; ++ ++ if (ctx->x265_opts) { ++ AVDictionary *dict = NULL; ++ AVDictionaryEntry *en = NULL; ++ ++ if (!av_dict_parse_string(&dict, ctx->x265_opts, "=", ":", 0)) { ++ while ((en = av_dict_get(dict, "", en, AV_DICT_IGNORE_SUFFIX))) { ++ int parse_ret = ctx->api->param_parse(ctx->params, en->key, en->value); ++ ++ switch (parse_ret) { ++ case X265_PARAM_BAD_NAME: ++ av_log(avctx, AV_LOG_WARNING, ++ "Unknown option: %s.\n", en->key); ++ break; ++ case X265_PARAM_BAD_VALUE: ++ av_log(avctx, AV_LOG_WARNING, ++ "Invalid value for %s: %s.\n", en->key, en->value); ++ break; ++ default: ++ break; ++ } ++ } ++ av_dict_free(&dict); ++ } ++ } ++ ++ if (ctx->params->rc.vbvBufferSize && avctx->rc_initial_buffer_occupancy > 1000 && ++ ctx->params->rc.vbvBufferInit == 0.9) { ++ ctx->params->rc.vbvBufferInit = (float)avctx->rc_initial_buffer_occupancy / 1000; ++ } ++ ++ if (ctx->profile) { ++ if (ctx->api->param_apply_profile(ctx->params, ctx->profile) < 0) { ++ int i; ++ av_log(avctx, AV_LOG_ERROR, "Invalid or incompatible profile set: %s.\n", ctx->profile); ++ av_log(avctx, AV_LOG_INFO, "Possible profiles:"); ++ for (i = 0; x265_profile_names[i]; i++) ++ av_log(avctx, AV_LOG_INFO, " %s", x265_profile_names[i]); ++ av_log(avctx, AV_LOG_INFO, "\n"); ++ return AVERROR(EINVAL); ++ } ++ } ++ ++ ctx->encoder = ctx->api->encoder_open(ctx->params); ++ if (!ctx->encoder) { ++ av_log(avctx, AV_LOG_ERROR, "Cannot open libx265 encoder.\n"); ++ x265_single_close(wrapper, tile_idx); ++ return AVERROR_INVALIDDATA; + } + -+ wrapper->paramTiledStream.tilesHeightCount = wrapper->tile_h; -+ wrapper->paramTiledStream.tilesWidthCount = wrapper->tile_w; -+ wrapper->paramTiledStream.tilesUniformSpacing = wrapper->uniform_split; -+ wrapper->paramTiledStream.frameWidth = wrapper->width; -+ wrapper->paramTiledStream.frameHeight = wrapper->height; -+ wrapper->paramTiledStream.pTiledBitstream = (param_oneStream_info**)malloc(wrapper->tile_h * wrapper->tile_w * sizeof(param_oneStream_info *)); -+ if (!wrapper->paramTiledStream.pTiledBitstream) -+ { -+ printf("memory alloc failed!"); -+ return 1; -+ } ++/* if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) { ++ x265_nal *nal; ++ int nnal; + -+ for (int i = 0; i < wrapper->paramTiledStream.tilesHeightCount; i++) -+ { -+ for (int j = 0; j < wrapper->paramTiledStream.tilesWidthCount; j++) -+ { -+ wrapper->paramTiledStream.pTiledBitstream[i*wrapper->paramTiledStream.tilesWidthCount + j] = (param_oneStream_info*)malloc(sizeof(param_oneStream_info)); ++ avctx->extradata_size = ctx->api->encoder_headers(ctx->encoder, &nal, &nnal); ++ if (avctx->extradata_size <= 0) { ++ av_log(avctx, AV_LOG_ERROR, "Cannot encode headers.\n"); ++ libx265_encode_close(avctx); ++ return AVERROR_INVALIDDATA; + } -+ } + -+ wrapper->pGen = genTiledStream_Init(&(wrapper->paramTiledStream)); -+ if (!wrapper->pGen) -+ { -+ printf("the initialize failed\n"); -+ return 1; -+ } ++ avctx->extradata = av_malloc(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); ++ if (!avctx->extradata) { ++ av_log(avctx, AV_LOG_ERROR, ++ "Cannot allocate HEVC header of size %d.\n", avctx->extradata_size); ++ libx265_encode_close(avctx); ++ return AVERROR(ENOMEM); ++ } + ++ memcpy(avctx->extradata, nal[0].payload, avctx->extradata_size); ++ } ++*/ + return 0; +} + -+static int tile_encode_frame(AVCodecContext *avctx, AVPacket *pkt, -+ const AVFrame *pic, int *got_packet) ++static int x265_single_frame( EncoderWrapper* wrapper, int tile_idx, AVPacket *pkt, const AVFrame *pic, int *got_packet ) +{ -+ TileEncoderContext *ctx = avctx->priv_data; -+ if( NULL != ctx->api.enc_frame ) -+ ctx->api.enc_frame(&(ctx->api), pkt, pic, got_packet); -+ -+ return 0; -+} ++ x265Context* ctx = (x265Context*)wrapper->tile_info[tile_idx].enc_ctx; ++ AVCodecContext* avctx = wrapper->avctx; ++ AVFrame* tile_pic = NULL; ++ x265_picture x265pic; ++ x265_picture x265pic_out = { 0 }; ++ x265_nal *nal; ++ uint8_t *dst; ++ int payload = 0; ++ int nnal; ++ int ret; ++ int i; + -+#define OFFSET(x) offsetof(TileEncoderContext, x) -+#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM -+static const AVOption options[] = { -+ { "enc", "what's the encoder for each tile. so far, x265=1, svt=2.", OFFSET(enc_lib), AV_OPT_TYPE_INT, { .i64 = 2 }, 0, 3, VE }, -+ { "tile_mode", "specify how to divide the tiles of the picture: 1 fixed size tiles; 2. grid layout, 3x3, 4x4.", OFFSET(tile_mode), AV_OPT_TYPE_INT, { .i64 = 2 }, 0, 3, VE }, -+ { "tiles_gw", "horizontal grid number of tiles; available when tile is divided via grid layout .", OFFSET(tiles_gw), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, INT_MAX, VE }, -+ { "tiles_gh", "vertical grid number of tiles; available when tile is divided via grid layout .", OFFSET(tiles_gh), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, INT_MAX, VE }, -+ { "tiles_fixw", "horizontal width of tiles; available when tile is divided via fixed size.", OFFSET(fix_tiles_w), AV_OPT_TYPE_INT, { .i64 = 512 }, 0, INT_MAX, VE }, -+ { "tiles_fixh", "vertical height of tiles; available when tile is divided via fixed size.", OFFSET(fix_tiles_h), AV_OPT_TYPE_INT, { .i64 = 512 }, 0, INT_MAX, VE }, -+ { "params", "Set parameters as a comma-separated list of key=value pairs.", OFFSET(params), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, VE }, -+ { NULL }, -+}; ++ ctx->api->picture_init(ctx->params, &x265pic); + -+static const AVClass class = { -+ .class_name = "hevc_tile_encoder", -+ .item_name = av_default_item_name, -+ .option = options, -+ .version = LIBAVUTIL_VERSION_INT, -+}; ++ if (pic) { ++ get_tile_frame_nocopy(wrapper, tile_idx, pic, &tile_pic); + -+static const AVCodecDefault defaults[] = { -+ { "b", "0" }, -+ { NULL }, -+}; ++ for (i = 0; i < 3; i++) { ++ x265pic.planes[i] = tile_pic->data[i]; ++ x265pic.stride[i] = tile_pic->linesize[i]; ++ } + -+AVCodec ff_hevc_tile_encoder = { -+ .name = "hevc_tile_encoder", -+ .long_name = NULL_IF_CONFIG_SMALL("distribute tile H.265 / HEVC"), -+ .type = AVMEDIA_TYPE_VIDEO, -+ .id = AV_CODEC_ID_HEVC, -+ .capabilities = AV_CODEC_CAP_DELAY, -+ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, -+ AV_PIX_FMT_YUV420P10, -+ AV_PIX_FMT_NONE }, ++ x265pic.pts = pic->pts; ++ x265pic.bitDepth = av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth; + -+ .priv_class = &class, -+ .priv_data_size = sizeof(TileEncoderContext), -+ .defaults = defaults, ++ x265pic.sliceType = pic->pict_type == AV_PICTURE_TYPE_I ? ++ (ctx->forced_idr ? X265_TYPE_IDR : X265_TYPE_I) : ++ pic->pict_type == AV_PICTURE_TYPE_P ? X265_TYPE_P : ++ pic->pict_type == AV_PICTURE_TYPE_B ? X265_TYPE_B : ++ X265_TYPE_AUTO; ++ } + -+ .init = tile_encode_init, -+ .encode2 = tile_encode_frame, -+ .close = tile_encode_close, ++ ret = ctx->api->encoder_encode(ctx->encoder, &nal, &nnal, ++ pic ? &x265pic : NULL, &x265pic_out); + -+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP, ++ if(NULL!= tile_pic) av_frame_free(&tile_pic); + -+ .wrapper_name = "hevc_tile_encoder", -+}; -diff --git a/FFmpeg/libavcodec/tile_encoder.h b/FFmpeg/libavcodec/tile_encoder.h -new file mode 100644 -index 0000000..d9afc5c ---- /dev/null -+++ b/FFmpeg/libavcodec/tile_encoder.h -@@ -0,0 +1,125 @@ -+/* -+ * Intel tile encoder -+ * -+ * Copyright (c) 2018 Intel Cooperation -+ * -+ * This file is part of FFmpeg. -+ * -+ * FFmpeg is free software; you can redistribute it and/or -+ * modify it under the terms of the GNU Lesser General Public -+ * License as published by the Free Software Foundation; either -+ * version 2.1 of the License, or (at your option) any later version. -+ * -+ * FFmpeg is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ * Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with FFmpeg; if not, write to the Free Software -+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -+ */ ++ if (ret < 0) ++ return AVERROR_EXTERNAL; + -+#ifndef TILE_ENCODER_H -+#define TILE_ENCODER_H -+#define FILE_DEBUG ++ if (!nnal) ++ return 0; + -+#include "libavutil/fifo.h" ++ ///FIXME, need to assign each NAL to a packet if stitching library can only process one NAL ++ for (i = 0; i < nnal; i++) ++ payload += nal[i].sizeBytes; + -+#include "avcodec.h" -+#include "internal.h" -+#include -+#include -+#include -+#include ++ /*ret = ff_alloc_packet2(avctx, pkt, payload, payload); ++ if (ret < 0) { ++ av_log(avctx, AV_LOG_ERROR, "Error getting output packet.\n"); ++ return ret; ++ }*/ ++ pkt->data = malloc(payload); ++ pkt->size = payload; ++ dst = pkt->data; + -+#include "genTiledstreamAPI.h" ++ for (i = 0; i < nnal; i++) { ++ memcpy(dst, nal[i].payload, nal[i].sizeBytes); ++ dst += nal[i].sizeBytes; + -+#define MAX_TILES 256 -+#define FIFO_SIZE 8024 ++ if (is_keyframe(nal[i].type)) ++ pkt->flags |= AV_PKT_FLAG_KEY; ++ } + -+enum ENC_LIB{ -+ ENC_NULL = 0, -+ ENC_X265 = 1, -+ ENC_SVT = 2 -+}; ++ pkt->pts = x265pic_out.pts; ++ pkt->dts = x265pic_out.dts; + -+enum TILE_MODE{ -+ NULL_MODE = 0, -+ FIX_SIZE = 1, -+ AVG_SIZE = 2 -+}; -+typedef int (*ENC_CLOSE)(void*); -+typedef int (*ENC_INIT)(void*); -+typedef int (*ENC_FRAME)(void*, AVPacket*, const AVFrame*, int*); ++#if FF_API_CODED_FRAME ++FF_DISABLE_DEPRECATION_WARNINGS ++ switch (x265pic_out.sliceType) { ++ case X265_TYPE_IDR: ++ case X265_TYPE_I: ++ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; ++ break; ++ case X265_TYPE_P: ++ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P; ++ break; ++ case X265_TYPE_B: ++ avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B; ++ break; ++ } ++FF_ENABLE_DEPRECATION_WARNINGS ++#endif + -+typedef struct TileInfo{ -+ int top; -+ int left; -+ int tWidth; -+ int tHeight; -+ int tBitrate; -+ int tMaxrate; -+ AVFifoBuffer* outpkt_fifo; -+ int proc_idx; -+ int eos; -+ void* enc_ctx; -+ AVPacket* internal_pkt; -+#ifdef FILE_DEBUG -+ int nGetpkt; -+ int nSpkt; -+ FILE* file; ++#if X265_BUILD >= 130 ++ if (x265pic_out.sliceType == X265_TYPE_B) ++#else ++ if (x265pic_out.frameData.sliceType == 'b') +#endif -+} TileInfo; ++ pkt->flags |= AV_PKT_FLAG_DISPOSABLE; + -+typedef struct TileEncoderInfo{ -+ void *ctx; -+ int tile_idx; -+}TileEncoderInfo; ++ *got_packet = 1; + -+typedef struct EncoderWrapper{ -+ AVCodecContext* avctx; -+ -+ int width; -+ int height; -+ void* enc_param; ++ return 0; ++} + -+ bool uniform_split; -+ int tile_num; -+ int tile_w; -+ int tile_h; -+ TileInfo tile_info[MAX_TILES]; -+ -+ ENC_CLOSE enc_close; -+ ENC_INIT enc_init; -+ ENC_FRAME enc_frame; ++int libx265_enc_close(void* ctx) ++{ ++ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; ++ x265Context* x265_ctx = NULL; ++ ++ for(int i=0; itile_num; i++){ ++ x265_ctx = (x265Context*)wrapper->tile_info[i].enc_ctx; ++ if( NULL != x265_ctx){ ++ x265_single_close(wrapper, i); ++ free(x265_ctx); ++ } ++ wrapper->tile_info[i].enc_ctx = NULL; ++ } + -+ TileEncoderInfo *tile_enc_info; -+ pthread_t *tid; -+ int initialized; ++ return 0; ++} + -+ void *pGen; -+ param_gen_tiledStream paramTiledStream; -+ pthread_mutex_t mutex; -+ pthread_cond_t cond; -+} EncoderWrapper; ++int libx265_enc_init(void* ctx) ++{ ++ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; ++ x265Context* x265_ctx = NULL; + -+int get_tile_frame_copy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ); -+int get_tile_frame_nocopy(EncoderWrapper* wrapper, int tile_idx, const AVFrame *pic, AVFrame** tile_pic ); ++ for(int i=0; itile_num; i++){ ++ x265_ctx = malloc(sizeof(x265Context)); ++ x265_ctx->api = NULL; ++ x265_ctx->encoder = NULL; ++ x265_ctx->x265_opts = wrapper->enc_param; ++ x265_ctx->preset = "fast"; ++ x265_ctx->tune = "psnr"; ++ x265_ctx->crf = -1; ++ x265_ctx->profile = "main"; ++ x265_ctx->forced_idr = 0; ++ x265_ctx->bit_rate = wrapper->tile_info[i].tBitrate; ++ x265_ctx->rc_max_rate = wrapper->tile_info[i].tMaxrate; ++ wrapper->tile_info[i].enc_ctx = x265_ctx; ++ x265_single_init(wrapper, i); ++ } ++ return 0; ++} ++ ++int libx265_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet) ++{ ++ EncoderWrapper* wrapper = (EncoderWrapper*)ctx; ++ int ret = 0; ++ int got_pkt = 0; + -+int bs_tile_stitching(EncoderWrapper* wrapper, AVPacket* outPkt); -+int get_tile_bitrate(EncoderWrapper* wrapper, int idx); -+int get_tile_maxrate(EncoderWrapper* wrapper, int idx); ++ for(int i=0; itile_num; i++){ ++ if( wrapper->tile_info[i].eos ) continue; ++ got_pkt = 0; ++ AVPacket tile_pkts = {0}; ++ ++ ret = x265_single_frame(wrapper, i, &tile_pkts, pic, &got_pkt); ++ ++ if( got_pkt ){ ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "**********tile id = %d receive_packet got packet!!!\n", i); ++ av_fifo_generic_write( wrapper->tile_info[i].outpkt_fifo, &tile_pkts, sizeof(AVPacket), NULL); ++ }else{ ++ av_packet_unref(&tile_pkts); ++ free(tile_pkts.data); ++ } ++ if( NULL==pic && !got_pkt ){ ++ av_log(wrapper->avctx, AV_LOG_DEBUG, "tile id = %d EOS!!!\n", i); ++ wrapper->tile_info[i].eos = 1; ++ } ++ } + -+int libx265_enc_close(void* ctx); -+int libx265_enc_init(void* ctx); -+int libx265_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet); ++ //FIXME, suppose all encoder has the rhythm to get packet, so there is no buffer in the first time ++ ret = bs_tile_stitching(wrapper, pkt); + -+int svt_enc_close(void* ctx); -+int svt_enc_init(void* ctx); -+int svt_enc_frame(void* ctx, AVPacket *pkt, const AVFrame *pic, int *got_packet); -+int svt_enc_tile(TileEncoderInfo *tile_enc_info); -+int bFifoReady( EncoderWrapper* wrapper ); ++ if( AVERROR_EOF == ret ){ ++ return AVERROR_EOF; ++ } ++ *got_packet = 1; + -+#endif /* TILE_ENCODER_H */ ++ if( -1 == ret ) *got_packet = 0; + -diff --git a/FFmpeg/libavfilter/Makefile b/FFmpeg/libavfilter/Makefile -index 7beec31..e3cd0d2 100644 ---- a/FFmpeg/libavfilter/Makefile -+++ b/FFmpeg/libavfilter/Makefile -@@ -382,6 +382,7 @@ OBJS-$(CONFIG_TMIX_FILTER) += vf_mix.o framesync.o - OBJS-$(CONFIG_TONEMAP_FILTER) += vf_tonemap.o colorspace.o - OBJS-$(CONFIG_TONEMAP_OPENCL_FILTER) += vf_tonemap_opencl.o colorspace.o opencl.o \ - opencl/tonemap.o opencl/colorspace_common.o -+OBJS-$(CONFIG_TRANSFORM360_FILTER) += vf_transform360.o - OBJS-$(CONFIG_TRANSPOSE_FILTER) += vf_transpose.o - OBJS-$(CONFIG_TRANSPOSE_NPP_FILTER) += vf_transpose_npp.o - OBJS-$(CONFIG_TRIM_FILTER) += trim.o -diff --git a/FFmpeg/libavfilter/allfilters.c b/FFmpeg/libavfilter/allfilters.c -index 6f1e7cf..563777e 100644 ---- a/FFmpeg/libavfilter/allfilters.c -+++ b/FFmpeg/libavfilter/allfilters.c -@@ -364,6 +364,7 @@ extern AVFilter ff_vf_tlut2; ++ return 0; ++} +\ No newline at end of file +diff -urN FFmpeg/libavfilter/allfilters.c FFmpeg-patched/libavfilter/allfilters.c +--- FFmpeg/libavfilter/allfilters.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavfilter/allfilters.c 2020-09-27 13:35:13.453526545 +0800 +@@ -406,6 +406,7 @@ extern AVFilter ff_vf_tmix; extern AVFilter ff_vf_tonemap; extern AVFilter ff_vf_tonemap_opencl; +extern AVFilter ff_vf_transform360; + extern AVFilter ff_vf_tonemap_vaapi; + extern AVFilter ff_vf_tpad; extern AVFilter ff_vf_transpose; - extern AVFilter ff_vf_transpose_npp; - extern AVFilter ff_vf_trim; -diff --git a/FFmpeg/libavfilter/vf_transform360.c b/FFmpeg/libavfilter/vf_transform360.c -new file mode 100644 -index 0000000..44674bc ---- /dev/null -+++ b/FFmpeg/libavfilter/vf_transform360.c +@@ -444,6 +445,7 @@ + extern AVFilter ff_vf_zmq; + extern AVFilter ff_vf_zoompan; + extern AVFilter ff_vf_zscale; ++extern AVFilter ff_vf_xcam; + + extern AVFilter ff_vsrc_allrgb; + extern AVFilter ff_vsrc_allyuv; +diff -urN FFmpeg/libavfilter/Makefile FFmpeg-patched/libavfilter/Makefile +--- FFmpeg/libavfilter/Makefile 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavfilter/Makefile 2020-09-27 13:35:13.450526545 +0800 +@@ -143,6 +143,7 @@ + OBJS-$(CONFIG_VIBRATO_FILTER) += af_vibrato.o generate_wave_table.o + OBJS-$(CONFIG_VOLUME_FILTER) += af_volume.o + OBJS-$(CONFIG_VOLUMEDETECT_FILTER) += af_volumedetect.o ++OBJS-$(CONFIG_XCAM_FILTER) += vf_xcam.o + + OBJS-$(CONFIG_AEVALSRC_FILTER) += aeval.o + OBJS-$(CONFIG_AFIRSRC_FILTER) += asrc_afirsrc.o +@@ -426,6 +427,7 @@ + OBJS-$(CONFIG_TONEMAP_FILTER) += vf_tonemap.o colorspace.o + OBJS-$(CONFIG_TONEMAP_OPENCL_FILTER) += vf_tonemap_opencl.o colorspace.o opencl.o \ + opencl/tonemap.o opencl/colorspace_common.o ++OBJS-$(CONFIG_TRANSFORM360_FILTER) += vf_transform360.o + OBJS-$(CONFIG_TONEMAP_VAAPI_FILTER) += vf_tonemap_vaapi.o vaapi_vpp.o + OBJS-$(CONFIG_TPAD_FILTER) += vf_tpad.o + OBJS-$(CONFIG_TRANSPOSE_FILTER) += vf_transpose.o +diff -urN FFmpeg/libavfilter/vf_transform360.c FFmpeg-patched/libavfilter/vf_transform360.c +--- FFmpeg/libavfilter/vf_transform360.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavfilter/vf_transform360.c 2020-09-27 13:35:13.495526541 +0800 @@ -0,0 +1,483 @@ +/** + * Copyright (c) 2015-present, Facebook, Inc. @@ -3779,95 +4210,412 @@ index 0000000..44674bc + { NULL } +}; + -+static const AVClass transform360_class = { -+ .class_name = "transform360", -+ .item_name = av_default_item_name, -+ .option = transform360_options, -+ .version = LIBAVUTIL_VERSION_INT, -+ .category = AV_CLASS_CATEGORY_FILTER, -+}; ++static const AVClass transform360_class = { ++ .class_name = "transform360", ++ .item_name = av_default_item_name, ++ .option = transform360_options, ++ .version = LIBAVUTIL_VERSION_INT, ++ .category = AV_CLASS_CATEGORY_FILTER, ++}; ++ ++static const AVFilterPad avfilter_vf_transform_inputs[] = { ++ { ++ .name = "default", ++ .type = AVMEDIA_TYPE_VIDEO, ++ .filter_frame = filter_frame, ++ }, ++ { NULL } ++}; ++ ++static const AVFilterPad avfilter_vf_transform_outputs[] = { ++ { ++ .name = "default", ++ .type = AVMEDIA_TYPE_VIDEO, ++ .config_props = config_output, ++ }, ++ { NULL } ++}; ++ ++AVFilter ff_vf_transform360 = { ++ .name = "transform360", ++ .description = NULL_IF_CONFIG_SMALL("Transforms equirectangular input video to the other format."), ++ .init_dict = init_dict, ++ .uninit = uninit, ++ .priv_size = sizeof(TransformContext), ++ .priv_class = &transform360_class, ++ .inputs = avfilter_vf_transform_inputs, ++ .outputs = avfilter_vf_transform_outputs, ++}; +diff -urN FFmpeg/libavfilter/vf_xcam.c FFmpeg-patched/libavfilter/vf_xcam.c +--- FFmpeg/libavfilter/vf_xcam.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavfilter/vf_xcam.c 2020-09-27 13:35:13.516526540 +0800 +@@ -0,0 +1,350 @@ ++/* ++ * Copyright (c) 2020 Intel Corporation, all rights reserved. ++ * ++ * This file is part of FFmpeg. ++ * ++ * FFmpeg is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Lesser General Public ++ * License as published by the Free Software Foundation; either ++ * version 2.1 of the License, or (at your option) any later version. ++ * ++ * FFmpeg is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Lesser General Public License for more details. ++ * ++ * You should have received a copy of the GNU Lesser General Public ++ * License along with FFmpeg; if not, write to the Free Software ++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ++ */ ++ ++/** ++ * @file ++ * libxcam wrapper functions ++ */ ++ ++#include ++#include "libavutil/avstring.h" ++#include "libavutil/opt.h" ++#include "framesync.h" ++#include "internal.h" ++ ++typedef struct XCamVideoFilterBuf { ++ XCamVideoBuffer buf; ++ AVFrame *frame; ++} XCamVideoFilterBuf; ++ ++typedef struct XCAMContext { ++ const AVClass *class; ++ ++ int nb_inputs; ++ int w; ++ int h; ++ char *fmt; ++ char *name; ++ int allocoutbuf; ++ char *params; ++ ++ XCamHandle *handle; ++ uint32_t v4l2_fmt; ++ ++ XCamVideoFilterBuf *inbufs[XCAM_MAX_INPUTS_NUM + 1]; ++ FFFrameSync fs; ++} XCAMContext; ++ ++static void xcambuf_ref(XCamVideoBuffer *buf) { ++ return; ++} ++ ++static void xcambuf_unref(XCamVideoBuffer *buf) { ++ return; ++} ++ ++static uint8_t *xcambuf_map(XCamVideoBuffer *buf) { ++ XCamVideoFilterBuf *avfilter_buf = (XCamVideoFilterBuf *)(buf); ++ return avfilter_buf->frame->data[0]; ++} ++ ++static void xcambuf_unmap(XCamVideoBuffer *buf) { ++ return; ++} ++ ++static int xcambuf_get_fd(XCamVideoBuffer *buf) { ++ return 1; ++} ++ ++static void fill_xcambuf_from_avframe(XCamVideoFilterBuf *buf, AVFrame *frame) ++{ ++ buf->frame = frame; ++} ++ ++static void fill_avframe_from_xcambuf(AVFrame *frame, XCamVideoBuffer *buf) ++{ ++ XCamVideoBufferPlanarInfo planar; ++ ++ uint8_t *start = xcam_video_buffer_map(buf); ++ if (!start) ++ return; ++ ++ for (uint32_t idx = 0; idx < buf->info.components; idx++) { ++ uint8_t *src = start + buf->info.offsets[idx]; ++ uint8_t *dest = frame->data[idx]; ++ xcam_video_buffer_get_planar_info(&buf->info, &planar, idx); ++ ++ for (uint32_t h = 0; h < planar.height; h++) { ++ memcpy(dest, src, frame->linesize[idx]); ++ src += buf->info.strides[idx]; ++ dest += frame->linesize[idx]; ++ } ++ } ++ ++ xcam_video_buffer_unmap (buf); ++} ++ ++static uint32_t avfmt_to_v4l2fmt(int avfmt) { ++ if (avfmt == AV_PIX_FMT_YUV420P) ++ return V4L2_PIX_FMT_YUV420; ++ return V4L2_PIX_FMT_NV12; ++} ++ ++static int set_parameters(AVFilterContext *ctx, const AVFilterLink *inlink, const AVFilterLink *outlink) ++{ ++ XCAMContext *s = inlink->dst->priv; ++ ++ char params[XCAM_MAX_PARAMS_LENGTH] = { 0 }; ++ snprintf(params, XCAM_MAX_PARAMS_LENGTH - 1, "inw=%d inh=%d outw=%d outh=%d fmt=%d allocoutbuf=%d %s", ++ inlink->w, inlink->h, outlink->w, outlink->h, s->v4l2_fmt, s->allocoutbuf, s->params); ++ ++ if (xcam_handle_set_parameters(s->handle, params) != XCAM_RETURN_NO_ERROR) { ++ av_log(ctx, AV_LOG_ERROR, "xcam handler set parameters failed\n"); ++ return AVERROR(EINVAL); ++ } ++ ++ return 0; ++} ++ ++static int ++init_xcambuf_info(XCAMContext *s, XCamVideoBuffer *buf, AVFrame *frame) ++{ ++ XCamReturn ret = xcam_video_buffer_info_reset( ++ &buf->info, s->v4l2_fmt, frame->width, frame->height, frame->linesize[0], frame->height, 0); ++ if (ret != XCAM_RETURN_NO_ERROR) ++ return AVERROR(EINVAL); ++ ++ for (int i = 0; frame->linesize[i]; i++) { ++ buf->info.offsets[i] = frame->data[i] - frame->data[0]; ++ buf->info.strides[i] = frame->linesize[i]; ++ } ++ buf->mem_type = XCAM_MEM_TYPE_CPU; ++ ++ return 0; ++} ++ ++static int xcam_execute(FFFrameSync *fs) ++{ ++ AVFilterContext *ctx = fs->parent; ++ XCAMContext *s = fs->opaque; ++ AVFilterLink *outlink; ++ AVFrame *outframe, *frame; ++ XCamVideoBuffer *outbuf = NULL; ++ ++ XCamVideoFilterBuf **inbufs = s->inbufs; ++ for (int i = 0; i < ctx->nb_inputs; i++) { ++ int error = ff_framesync_get_frame(&s->fs, i, &frame, 0); ++ if (error < 0) ++ return error; ++ if (init_xcambuf_info(s, &inbufs[i]->buf, frame) != 0) ++ return AVERROR(EINVAL); ++ fill_xcambuf_from_avframe(inbufs[i], frame); ++ } ++ ++ if (xcam_handle_execute(s->handle, (XCamVideoBuffer **)inbufs, &outbuf) != XCAM_RETURN_NO_ERROR) { ++ av_log(ctx, AV_LOG_ERROR, "execute xcam handler failed\n"); ++ return AVERROR(EINVAL); ++ } ++ ++ outlink = ctx->outputs[0]; ++ if (!(outframe = ff_get_video_buffer(outlink, outlink->w, outlink->h))) { ++ av_frame_free(&frame); ++ return AVERROR(ENOMEM); ++ } ++ av_frame_copy_props(outframe, frame); ++ ++ fill_avframe_from_xcambuf(outframe, outbuf); ++ xcam_video_buffer_unref(outbuf); ++ ++ return ff_filter_frame(outlink, outframe); ++} ++ ++static int xcam_query_formats(AVFilterContext *ctx) ++{ ++ XCAMContext *s = ctx->priv; ++ AVFilterFormats *formats = NULL; ++ ++ static const enum AVPixelFormat nv12_fmts[] = {AV_PIX_FMT_NV12, AV_PIX_FMT_NONE}; ++ static const enum AVPixelFormat yuv420_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE}; ++ static const enum AVPixelFormat auto_fmts[] = {AV_PIX_FMT_NV12, AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE}; ++ ++ const enum AVPixelFormat *pix_fmts = NULL; ++ if (!av_strcasecmp(s->fmt, "nv12")) ++ pix_fmts = nv12_fmts; ++ else if (!av_strcasecmp(s->fmt, "yuv420")) ++ pix_fmts = yuv420_fmts; ++ else ++ pix_fmts = auto_fmts; ++ ++ if (!(formats = ff_make_format_list(pix_fmts))) ++ return AVERROR(ENOMEM); ++ ++ return ff_set_common_formats(ctx, formats); ++} ++ ++static int xcam_config_output(AVFilterLink *outlink) ++{ ++ AVFilterContext *ctx = outlink->src; ++ XCAMContext *s = ctx->priv; ++ AVFilterLink *inlink = ctx->inputs[0]; ++ int ret = 0; ++ ++ s->v4l2_fmt = avfmt_to_v4l2fmt(inlink->format); ++ if (s->w && s->h) { ++ outlink->w = s->w; ++ outlink->h = s->h; ++ } else { ++ outlink->w = inlink->w; ++ outlink->h = inlink->h; ++ } ++ ++ set_parameters(ctx, inlink, outlink); ++ if (xcam_handle_init(s->handle) != XCAM_RETURN_NO_ERROR) { ++ av_log(ctx, AV_LOG_ERROR, "init xcam handler failed\n"); ++ return AVERROR(EINVAL); ++ } ++ ++ if ((ret = ff_framesync_init(&s->fs, ctx, ctx->nb_inputs)) < 0) ++ return ret; ++ s->fs.opaque = s; ++ s->fs.on_event = xcam_execute; ++ for (int i = 0; i < ctx->nb_inputs; i++) { ++ FFFrameSyncIn *in = &s->fs.in[i]; ++ in->time_base = ctx->inputs[i]->time_base; ++ in->sync = 1; ++ in->before = EXT_STOP; ++ in->after = EXT_STOP; ++ } ++ ret = ff_framesync_configure(&s->fs); ++ outlink->time_base = s->fs.time_base; ++ ++ return ret; ++} ++ ++static av_cold int xcam_init(AVFilterContext *ctx) ++{ ++ XCAMContext *s = ctx->priv; ++ int ret = 0; ++ ++ s->handle = xcam_create_handle(s->name); ++ if (!s->handle) { ++ av_log(ctx, AV_LOG_ERROR, "create xcam handler failed\n"); ++ return AVERROR(EINVAL); ++ } ++ ++ for (int i = 0; i < s->nb_inputs; i++) { ++ s->inbufs[i] = av_mallocz_array(1, sizeof(*s->inbufs[i])); ++ if (!s->inbufs[i]) ++ return AVERROR(ENOMEM); ++ s->inbufs[i]->buf.ref = xcambuf_ref; ++ s->inbufs[i]->buf.unref = xcambuf_unref; ++ s->inbufs[i]->buf.map = xcambuf_map; ++ s->inbufs[i]->buf.unmap = xcambuf_unmap; ++ s->inbufs[i]->buf.get_fd = xcambuf_get_fd; ++ } ++ ++ for (int i = 0; i < s->nb_inputs; i++) { ++ AVFilterPad pad = { .type = AVMEDIA_TYPE_VIDEO }; ++ pad.name = av_asprintf("input%d", i); ++ if (!pad.name) ++ return AVERROR(ENOMEM); ++ ++ if ((ret = ff_insert_inpad(ctx, i, &pad)) < 0) { ++ av_freep(&pad.name); ++ return ret; ++ } ++ } ++ ++ return 0; ++} ++ ++static av_cold void xcam_uninit(AVFilterContext *ctx) ++{ ++ XCAMContext *s = ctx->priv; ++ ++ ff_framesync_uninit(&s->fs); ++ for (int i = 0; i < s->nb_inputs; i++) { ++ if (s->inbufs[i]) ++ av_freep(&s->inbufs[i]); ++ if (ctx->input_pads) ++ av_freep(&ctx->input_pads[i].name); ++ } ++ ++ xcam_destroy_handle(s->handle); ++ s->handle = NULL; ++} ++ ++static int xcam_activate(AVFilterContext *ctx) ++{ ++ XCAMContext *s = ctx->priv; ++ return ff_framesync_activate(&s->fs); ++} ++ ++#define OFFSET(x) offsetof(XCAMContext, x) ++#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM ++#define CONST_STRING(name, help, unit) \ ++ { name, help, 0, AV_OPT_TYPE_CONST, { .str=name }, 0, 0, FLAGS, unit } ++ ++static const AVOption xcam_options[] = { ++ { "inputs", "number of inputs", OFFSET(nb_inputs), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, XCAM_MAX_INPUTS_NUM, FLAGS }, ++ { "w", "output width", OFFSET(w), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS }, ++ { "h", "output height", OFFSET(h), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS }, ++ { "fmt", "pixel format", OFFSET(fmt), AV_OPT_TYPE_STRING, { .str = "auto" }, 0, 0, FLAGS, "fmt" }, ++ CONST_STRING("auto", "automatic format negotiation", "fmt"), ++ CONST_STRING("nv12", "NV12 format", "fmt"), ++ CONST_STRING("yuv420", "YUV420 format", "fmt"), ++ { "name", "handler name", OFFSET(name), AV_OPT_TYPE_STRING, { .str = "stitch" }, 0, 0, FLAGS, "name" }, ++ CONST_STRING("3dnr", "3d denoising", "name"), ++ CONST_STRING("waveletnr", "wavelet denoising", "name"), ++ CONST_STRING("fisheye", "fisheye calibration", "name"), ++ CONST_STRING("defog", "fog removal", "name"), ++ CONST_STRING("dvs", "digital video stabilizer", "name"), ++ CONST_STRING("stitch", "soft/GLES/Vulkan stitching", "name"), ++ CONST_STRING("stitchcl", "OpenCL stitching", "name"), ++ { "allocoutbuf", "alloc output buffer", OFFSET(allocoutbuf), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS }, ++ { "params", "private parameters for each handle, usage: params=help=1 field0=value0 field1=value1 ...", ++ OFFSET(params), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS }, ++ { NULL } ++}; ++ ++AVFILTER_DEFINE_CLASS(xcam); + -+static const AVFilterPad avfilter_vf_transform_inputs[] = { ++static const AVFilterPad xcam_outputs[] = { + { + .name = "default", + .type = AVMEDIA_TYPE_VIDEO, -+ .filter_frame = filter_frame, ++ .config_props = xcam_config_output + }, + { NULL } +}; + -+static const AVFilterPad avfilter_vf_transform_outputs[] = { -+ { -+ .name = "default", -+ .type = AVMEDIA_TYPE_VIDEO, -+ .config_props = config_output, -+ }, -+ { NULL } ++AVFilter ff_vf_xcam = { ++ .name = "xcam", ++ .description = NULL_IF_CONFIG_SMALL("Apply image processing using libxcam"), ++ .priv_size = sizeof(XCAMContext), ++ .priv_class = &xcam_class, ++ .init = xcam_init, ++ .query_formats = xcam_query_formats, ++ .outputs = xcam_outputs, ++ .activate = xcam_activate, ++ .uninit = xcam_uninit, ++ .flags = AVFILTER_FLAG_DYNAMIC_INPUTS +}; + -+AVFilter ff_vf_transform360 = { -+ .name = "transform360", -+ .description = NULL_IF_CONFIG_SMALL("Transforms equirectangular input video to the other format."), -+ .init_dict = init_dict, -+ .uninit = uninit, -+ .priv_size = sizeof(TransformContext), -+ .priv_class = &transform360_class, -+ .inputs = avfilter_vf_transform_inputs, -+ .outputs = avfilter_vf_transform_outputs, -+}; -diff --git a/FFmpeg/libavformat/Makefile b/FFmpeg/libavformat/Makefile -index e99e915..4225abe 100644 ---- a/FFmpeg/libavformat/Makefile -+++ b/FFmpeg/libavformat/Makefile -@@ -145,6 +145,8 @@ OBJS-$(CONFIG_DATA_DEMUXER) += rawdec.o - OBJS-$(CONFIG_DATA_MUXER) += rawenc.o - OBJS-$(CONFIG_DASH_MUXER) += dash.o dashenc.o hlsplaylist.o - OBJS-$(CONFIG_DASH_DEMUXER) += dash.o dashdec.o -+OBJS-$(CONFIG_TILE_DASH_DEMUXER) += tiled_dash_dec.o -+OBJS-$(CONFIG_LIBVROMAFPACKING) += omaf_packing_enc.o - OBJS-$(CONFIG_DAUD_DEMUXER) += dauddec.o - OBJS-$(CONFIG_DAUD_MUXER) += daudenc.o - OBJS-$(CONFIG_DCSTR_DEMUXER) += dcstr.o -diff --git a/FFmpeg/libavformat/allformats.c b/FFmpeg/libavformat/allformats.c -index 9e41718..e9acbd1 100644 ---- a/FFmpeg/libavformat/allformats.c -+++ b/FFmpeg/libavformat/allformats.c -@@ -164,6 +164,8 @@ extern AVInputFormat ff_gdv_demuxer; - extern AVInputFormat ff_genh_demuxer; - extern AVInputFormat ff_gif_demuxer; - extern AVOutputFormat ff_gif_muxer; -+extern AVOutputFormat ff_tile_dash_muxer; +diff -urN FFmpeg/libavformat/allformats.c FFmpeg-patched/libavformat/allformats.c +--- FFmpeg/libavformat/allformats.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavformat/allformats.c 2020-09-27 13:35:13.518526539 +0800 +@@ -187,6 +187,8 @@ + extern AVOutputFormat ff_hds_muxer; + extern AVInputFormat ff_hevc_demuxer; + extern AVOutputFormat ff_hevc_muxer; +extern AVOutputFormat ff_omaf_packing_muxer; - extern AVInputFormat ff_gsm_demuxer; - extern AVOutputFormat ff_gsm_muxer; - extern AVInputFormat ff_gxf_demuxer; -@@ -402,6 +404,9 @@ extern AVOutputFormat ff_tgp_muxer; - extern AVInputFormat ff_thp_demuxer; - extern AVInputFormat ff_threedostr_demuxer; - extern AVInputFormat ff_tiertexseq_demuxer; +extern AVInputFormat ff_tile_dash_demuxer; -+extern AVOutputFormat ff_tile_dash_muxer; -+extern AVOutputFormat ff_omaf_packing_muxer; - extern AVOutputFormat ff_mkvtimestamp_v2_muxer; - extern AVInputFormat ff_tmv_demuxer; - extern AVInputFormat ff_truehd_demuxer; -diff --git a/FFmpeg/libavformat/flv.h b/FFmpeg/libavformat/flv.h -index df5ce3d..97324cc 100644 ---- a/FFmpeg/libavformat/flv.h -+++ b/FFmpeg/libavformat/flv.h -@@ -109,6 +109,7 @@ enum { - FLV_CODECID_H264 = 7, - FLV_CODECID_REALH263= 8, - FLV_CODECID_MPEG4 = 9, -+ FLV_CODECID_HEVC = 12, - }; - - enum { -diff --git a/FFmpeg/libavformat/flvdec.c b/FFmpeg/libavformat/flvdec.c -index a2dea46..e28d62b 100644 ---- a/FFmpeg/libavformat/flvdec.c -+++ b/FFmpeg/libavformat/flvdec.c -@@ -293,6 +293,8 @@ static int flv_same_video_codec(AVCodecParameters *vpar, int flags) + extern AVInputFormat ff_hls_demuxer; + extern AVOutputFormat ff_hls_muxer; + extern AVInputFormat ff_hnm_demuxer; +diff -urN FFmpeg/libavformat/flvdec.c FFmpeg-patched/libavformat/flvdec.c +--- FFmpeg/libavformat/flvdec.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavformat/flvdec.c 2020-09-27 13:35:13.527526539 +0800 +@@ -318,6 +318,8 @@ return vpar->codec_id == AV_CODEC_ID_VP6A; case FLV_CODECID_H264: return vpar->codec_id == AV_CODEC_ID_H264; @@ -3876,7 +4624,7 @@ index a2dea46..e28d62b 100644 default: return vpar->codec_tag == flv_codecid; } -@@ -342,6 +344,11 @@ static int flv_set_video_codec(AVFormatContext *s, AVStream *vstream, +@@ -367,6 +369,11 @@ par->codec_id = AV_CODEC_ID_MPEG4; ret = 3; break; @@ -3888,7 +4636,7 @@ index a2dea46..e28d62b 100644 default: avpriv_request_sample(s, "Video codec (%x)", flv_codecid); par->codec_tag = flv_codecid; -@@ -1157,6 +1164,7 @@ retry_duration: +@@ -1222,6 +1229,7 @@ if (st->codecpar->codec_id == AV_CODEC_ID_AAC || st->codecpar->codec_id == AV_CODEC_ID_H264 || @@ -3896,7 +4644,7 @@ index a2dea46..e28d62b 100644 st->codecpar->codec_id == AV_CODEC_ID_MPEG4) { int type = avio_r8(s->pb); size--; -@@ -1166,8 +1174,8 @@ retry_duration: +@@ -1231,8 +1239,8 @@ goto leave; } @@ -3907,7 +4655,7 @@ index a2dea46..e28d62b 100644 int32_t cts = (avio_rb24(s->pb) + 0xff800000) ^ 0xff800000; pts = dts + cts; if (cts < 0) { // dts might be wrong -@@ -1182,7 +1190,7 @@ retry_duration: +@@ -1247,7 +1255,7 @@ } } if (type == 0 && (!st->codecpar->extradata || st->codecpar->codec_id == AV_CODEC_ID_AAC || @@ -3916,20 +4664,18 @@ index a2dea46..e28d62b 100644 AVDictionaryEntry *t; if (st->codecpar->extradata) { -diff --git a/FFmpeg/libavformat/flvenc.c b/FFmpeg/libavformat/flvenc.c -index e4863f1..aa0d52f 100644 ---- a/FFmpeg/libavformat/flvenc.c -+++ b/FFmpeg/libavformat/flvenc.c -@@ -34,7 +34,7 @@ - #include "libavutil/opt.h" - #include "libavcodec/put_bits.h" - #include "libavcodec/aacenctab.h" -- +diff -urN FFmpeg/libavformat/flvenc.c FFmpeg-patched/libavformat/flvenc.c +--- FFmpeg/libavformat/flvenc.c 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavformat/flvenc.c 2020-09-27 13:35:13.527526539 +0800 +@@ -29,6 +29,7 @@ + #include "avc.h" + #include "avformat.h" + #include "flv.h" +#include "hevc.h" - - static const AVCodecTag flv_video_codec_ids[] = { - { AV_CODEC_ID_FLV1, FLV_CODECID_H263 }, -@@ -46,6 +46,7 @@ static const AVCodecTag flv_video_codec_ids[] = { + #include "internal.h" + #include "metadata.h" + #include "libavutil/opt.h" +@@ -46,6 +47,7 @@ { AV_CODEC_ID_VP6, FLV_CODECID_VP6 }, { AV_CODEC_ID_VP6A, FLV_CODECID_VP6A }, { AV_CODEC_ID_H264, FLV_CODECID_H264 }, @@ -3937,7 +4683,7 @@ index e4863f1..aa0d52f 100644 { AV_CODEC_ID_NONE, 0 } }; -@@ -491,7 +492,7 @@ static void flv_write_codec_header(AVFormatContext* s, AVCodecParameters* par, i +@@ -491,7 +493,7 @@ FLVContext *flv = s->priv_data; if (par->codec_id == AV_CODEC_ID_AAC || par->codec_id == AV_CODEC_ID_H264 @@ -3946,7 +4692,7 @@ index e4863f1..aa0d52f 100644 int64_t pos; avio_w8(pb, par->codec_type == AVMEDIA_TYPE_VIDEO ? -@@ -537,7 +538,11 @@ static void flv_write_codec_header(AVFormatContext* s, AVCodecParameters* par, i +@@ -537,7 +539,11 @@ avio_w8(pb, par->codec_tag | FLV_FRAME_KEY); // flags avio_w8(pb, 0); // AVC sequence header avio_wb24(pb, 0); // composition time @@ -3959,7 +4705,7 @@ index e4863f1..aa0d52f 100644 } data_size = avio_tell(pb) - pos; avio_seek(pb, -data_size - 10, SEEK_CUR); -@@ -840,7 +845,7 @@ end: +@@ -844,7 +850,7 @@ AVCodecParameters *par = s->streams[i]->codecpar; FLVStreamContext *sc = s->streams[i]->priv_data; if (par->codec_type == AVMEDIA_TYPE_VIDEO && @@ -3968,7 +4714,7 @@ index e4863f1..aa0d52f 100644 put_avc_eos_tag(pb, sc->last_ts); } } -@@ -891,13 +896,12 @@ static int flv_write_packet(AVFormatContext *s, AVPacket *pkt) +@@ -895,7 +901,7 @@ if (par->codec_id == AV_CODEC_ID_VP6F || par->codec_id == AV_CODEC_ID_VP6A || par->codec_id == AV_CODEC_ID_VP6 || par->codec_id == AV_CODEC_ID_AAC) flags_size = 2; @@ -3977,14 +4723,7 @@ index e4863f1..aa0d52f 100644 flags_size = 5; else flags_size = 1; - -- if (par->codec_id == AV_CODEC_ID_AAC || par->codec_id == AV_CODEC_ID_H264 -- || par->codec_id == AV_CODEC_ID_MPEG4) { -+ if (par->codec_id == AV_CODEC_ID_AAC || par->codec_id == AV_CODEC_ID_H264 || par->codec_id == AV_CODEC_ID_MPEG4) { - int side_size = 0; - uint8_t *side = av_packet_get_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, &side_size); - if (side && side_size > 0 && (side_size != par->extradata_size || memcmp(side, par->extradata, side_size))) { -@@ -913,6 +917,24 @@ static int flv_write_packet(AVFormatContext *s, AVPacket *pkt) +@@ -913,6 +919,24 @@ } } @@ -4009,7 +4748,7 @@ index e4863f1..aa0d52f 100644 if (flv->delay == AV_NOPTS_VALUE) flv->delay = -pkt->dts; -@@ -960,6 +982,10 @@ static int flv_write_packet(AVFormatContext *s, AVPacket *pkt) +@@ -966,6 +990,10 @@ if (par->extradata_size > 0 && *(uint8_t*)par->extradata != 1) if ((ret = ff_avc_parse_nal_units_buf(pkt->data, &data, &size)) < 0) return ret; @@ -4020,7 +4759,7 @@ index e4863f1..aa0d52f 100644 } else if (par->codec_id == AV_CODEC_ID_AAC && pkt->size > 2 && (AV_RB16(pkt->data) & 0xfff0) == 0xfff0) { if (!s->streams[pkt->stream_index]->nb_frames) { -@@ -1029,9 +1055,9 @@ static int flv_write_packet(AVFormatContext *s, AVPacket *pkt) +@@ -1036,9 +1064,9 @@ else avio_w8(pb, ((FFALIGN(par->width, 16) - par->width) << 4) | (FFALIGN(par->height, 16) - par->height)); @@ -4032,12 +4771,33 @@ index e4863f1..aa0d52f 100644 avio_w8(pb, 1); // AVC NALU avio_wb24(pb, pkt->pts - pkt->dts); } -diff --git a/FFmpeg/libavformat/omaf_packing_enc.c b/FFmpeg/libavformat/omaf_packing_enc.c -new file mode 100644 -index 0000000..0a66efa ---- /dev/null -+++ b/FFmpeg/libavformat/omaf_packing_enc.c -@@ -0,0 +1,414 @@ +diff -urN FFmpeg/libavformat/flv.h FFmpeg-patched/libavformat/flv.h +--- FFmpeg/libavformat/flv.h 2020-07-09 17:17:46.000000000 +0800 ++++ FFmpeg-patched/libavformat/flv.h 2020-09-27 13:35:13.527526539 +0800 +@@ -110,6 +110,7 @@ + FLV_CODECID_H264 = 7, + FLV_CODECID_REALH263= 8, + FLV_CODECID_MPEG4 = 9, ++ FLV_CODECID_HEVC = 12, + }; + + enum { +diff -urN FFmpeg/libavformat/Makefile FFmpeg-patched/libavformat/Makefile +--- FFmpeg/libavformat/Makefile 2020-07-11 18:39:30.000000000 +0800 ++++ FFmpeg-patched/libavformat/Makefile 2020-09-27 13:35:13.519526539 +0800 +@@ -148,6 +148,8 @@ + OBJS-$(CONFIG_DATA_MUXER) += rawenc.o + OBJS-$(CONFIG_DASH_MUXER) += dash.o dashenc.o hlsplaylist.o + OBJS-$(CONFIG_DASH_DEMUXER) += dash.o dashdec.o ++OBJS-$(CONFIG_TILE_DASH_DEMUXER) += tiled_dash_dec.o ++OBJS-$(CONFIG_LIBVROMAFPACKING) += omaf_packing_enc.o + OBJS-$(CONFIG_DAUD_DEMUXER) += dauddec.o + OBJS-$(CONFIG_DAUD_MUXER) += daudenc.o + OBJS-$(CONFIG_DCSTR_DEMUXER) += dcstr.o +diff -urN FFmpeg/libavformat/omaf_packing_enc.c FFmpeg-patched/libavformat/omaf_packing_enc.c +--- FFmpeg/libavformat/omaf_packing_enc.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavformat/omaf_packing_enc.c 2020-09-27 13:35:13.543526537 +0800 +@@ -0,0 +1,730 @@ +/* + * Intel tile Dash muxer + * @@ -4077,6 +4837,9 @@ index 0000000..0a66efa + +#include "360SCVPAPI.h" +#include "VROmafPackingAPI.h" ++#include "common_data.h" ++ ++static uint32_t min_loglevel = 2; + +typedef struct { + int streamIdx; @@ -4089,8 +4852,24 @@ index 0000000..0a66efa + InitialInfo *initInfo; + int inStreamsNum; + ++ const char *proj_type; ++ const char *face_file; ++ int viewport_w; ++ int viewport_h; ++ float viewport_yaw; ++ float viewport_pitch; ++ float viewport_fov_hor; ++ float viewport_fov_ver; + int window_size; + int extra_window_size; ++ int has_extractor; ++ const char *packingPluginPath; ++ const char *packingPluginName; ++ bool fixedPackedPicRes; ++ const char *videoPluginPath; ++ const char *videoPluginName; ++ const char *audioPluginPath; ++ const char *audioPluginName; + int need_buffered_frames; + uint16_t extractors_per_thread; + int64_t seg_duration; @@ -4106,8 +4885,99 @@ index 0000000..0a66efa + int64_t frameNum; + BufferedFrame bufferedFrames[1024]; + int bufferedFramesNum; ++ bool need_external_log; ++ int min_log_level; ++ bool first_audio_input; +} OMAFContext; + ++static uint8_t convert_face_index(char *face_name) ++{ ++ if (0 == strncmp(face_name, "PY", 2)) ++ return 0; ++ else if (0 == strncmp(face_name, "PX", 2)) ++ return 1; ++ else if (0 == strncmp(face_name, "NY", 2)) ++ return 2; ++ else if (0 == strncmp(face_name, "NZ", 2)) ++ return 3; ++ else if (0 == strncmp(face_name, "NX", 2)) ++ return 4; ++ else if (0 == strncmp(face_name, "PZ", 2)) ++ return 5; ++ else ++ return 255; ++} ++ ++static E_TransformType convert_transform_type(char *transform_name) ++{ ++ if (0 == strncmp(transform_name, "NO_TRANSFORM", 12)) ++ return NO_TRANSFORM; ++ else if (0 == strncmp(transform_name, "MIRRORING_HORIZONTALLY", 22)) ++ return MIRRORING_HORIZONTALLY; ++ else if (0 == strncmp(transform_name, "ROTATION_180_ANTICLOCKWISE", 26)) ++ return ROTATION_180_ANTICLOCKWISE; ++ else if (0 == strncmp(transform_name, "ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR", 46)) ++ return ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR; ++ else if (0 == strncmp(transform_name, "ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR", 46)) ++ return ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR; ++ else if (0 == strncmp(transform_name, "ROTATION_90_ANTICLOCKWISE", 25)) ++ return ROTATION_90_ANTICLOCKWISE; ++ else if (0 == strncmp(transform_name, "ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR", 47)) ++ return ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR; ++ else if (0 == strncmp(transform_name, "ROTATION_270_ANTICLOCKWISE", 26)) ++ return ROTATION_270_ANTICLOCKWISE; ++ else ++ return NO_TRANSFORM; ++} ++ ++static void ffmpeg_log_callback(LogLevel log_level, const char* file_name, uint64_t line_num, const char* fmt, ...) ++{ ++ va_list vl; ++ va_start(vl, fmt); ++ ++ switch (log_level) ++ { ++ case LOG_INFO: ++ { ++ if(min_loglevel == 0) ++ { ++ av_vlog(NULL, AV_LOG_INFO, fmt, vl); ++ } ++ break; ++ } ++ case LOG_WARNING: ++ { ++ if(min_loglevel <= 1) ++ { ++ av_vlog(NULL, AV_LOG_WARNING, fmt, vl); ++ } ++ break; ++ } ++ case LOG_ERROR: ++ { ++ if(min_loglevel <= 2) ++ { ++ av_vlog(NULL, AV_LOG_ERROR, fmt, vl); ++ } ++ break; ++ } ++ case LOG_FATAL: ++ { ++ if(min_loglevel <= 3) ++ { ++ av_vlog(NULL, AV_LOG_FATAL, fmt, vl); ++ } ++ break; ++ } ++ default: ++ { ++ av_log(NULL, AV_LOG_ERROR, "Invalid log level !"); ++ break; ++ } ++ } ++ va_end(vl); ++} ++ +static int omaf_init(AVFormatContext *s) +{ + OMAFContext *c = s->priv_data; @@ -4142,21 +5012,63 @@ index 0000000..0a66efa + } + } + -+ if (s->nb_streams == 1) ++ if (initInfo->bsNumVideo > 2) ++ { ++ c->has_extractor = 0; ++ } ++ ++ initInfo->videoProcessPluginPath = c->videoPluginPath; ++ initInfo->videoProcessPluginName = c->videoPluginName; ++ ++ if (initInfo->bsNumAudio) ++ { ++ if ((0 == strncmp(c->audioPluginPath, "NULL", 4)) || ++ (0 == strncmp(c->audioPluginName, "NULL", 4))) ++ { ++ av_log(s, AV_LOG_ERROR, "No audio stream process plugin is set but there is indeed audio stream input !\n"); ++ return AVERROR_INVALIDDATA; ++ } ++ ++ initInfo->audioProcessPluginPath = c->audioPluginPath; ++ initInfo->audioProcessPluginName = c->audioPluginName; ++ } ++ ++ if (c->has_extractor) ++ { ++ initInfo->packingPluginPath = c->packingPluginPath; ++ if (initInfo->bsNumVideo == 1) ++ { ++ initInfo->packingPluginName = "SingleVideoPacking"; ++ } ++ else if (initInfo->bsNumVideo == 2) ++ { ++ initInfo->packingPluginName = c->packingPluginName; ++ } ++ else ++ { ++ av_log(s, AV_LOG_ERROR, "Not correct video streams number for VR OMAF Packing \n"); ++ return AVERROR(EINVAL); ++ } ++ initInfo->fixedPackedPicRes = c->fixedPackedPicRes; ++ } ++ else + { -+ initInfo->tilesMergingType = OnlyOneVideo; ++ initInfo->packingPluginPath = NULL; ++ initInfo->packingPluginName = NULL; ++ initInfo->fixedPackedPicRes = false; + } -+ else if (s->nb_streams == 2) ++ ++ min_loglevel = c->min_log_level; ++ if (c->need_external_log) + { -+ initInfo->tilesMergingType = TwoResTilesMerging; ++ initInfo->logFunction = (void*)(ffmpeg_log_callback); + } + else + { -+ av_log(s, AV_LOG_ERROR, "Not correct video streams number for VR OMAF Packing \n"); -+ return AVERROR(EINVAL); ++ initInfo->logFunction = NULL; + } + -+ initInfo->bsBuffers = (BSBuffer*)malloc(sizeof(BSBuffer) * initInfo->bsNumVideo); ++ initInfo->bsBuffers = (BSBuffer*)malloc(sizeof(BSBuffer) * (initInfo->bsNumVideo + initInfo->bsNumAudio)); + if (!(initInfo->bsBuffers)) + { + av_log(s, AV_LOG_ERROR, "Failed to malloc memory for video bitstream buffer \n"); @@ -4170,14 +5082,25 @@ index 0000000..0a66efa + return AVERROR(ENOMEM); + } + memset(initInfo->viewportInfo, 0, sizeof(ViewportInformation)); -+ initInfo->viewportInfo->viewportWidth = 1024; -+ initInfo->viewportInfo->viewportHeight = 1024; -+ initInfo->viewportInfo->viewportPitch = 0; -+ initInfo->viewportInfo->viewportYaw = 90; -+ initInfo->viewportInfo->horizontalFOVAngle = 80; -+ initInfo->viewportInfo->verticalFOVAngle = 90; ++ initInfo->viewportInfo->viewportWidth = c->viewport_w; ++ initInfo->viewportInfo->viewportHeight = c->viewport_h; ++ initInfo->viewportInfo->viewportPitch = c->viewport_pitch; ++ initInfo->viewportInfo->viewportYaw = c->viewport_yaw; ++ initInfo->viewportInfo->horizontalFOVAngle = c->viewport_fov_hor; ++ initInfo->viewportInfo->verticalFOVAngle = c->viewport_fov_ver; + initInfo->viewportInfo->outGeoType = E_SVIDEO_VIEWPORT; -+ initInfo->viewportInfo->inGeoType = E_SVIDEO_EQUIRECT; ++ if (0 == strncmp(c->proj_type, "ERP", 3)) ++ { ++ initInfo->viewportInfo->inGeoType = E_SVIDEO_EQUIRECT; ++ } ++ else if (0 == strncmp(c->proj_type, "Cube", 4)) ++ { ++ initInfo->viewportInfo->inGeoType = E_SVIDEO_CUBEMAP; ++ } ++ else if (0 == strncmp(c->proj_type, "Planar", 6)) ++ { ++ initInfo->viewportInfo->inGeoType = E_SVIDEO_PLANAR; ++ } + + initInfo->segmentationInfo = (SegmentationInfo*)malloc(sizeof(SegmentationInfo)); + if (!(initInfo->segmentationInfo)) @@ -4204,6 +5127,77 @@ index 0000000..0a66efa + initInfo->segmentationInfo->splitTile = c->split_tile; + initInfo->segmentationInfo->hasMainAS = true; + ++ if (0 == strncmp(c->proj_type, "ERP", 3)) ++ { ++ initInfo->projType = E_SVIDEO_EQUIRECT; ++ initInfo->cubeMapInfo = NULL; ++ } ++ else if (0 == strncmp(c->proj_type, "Cube", 4)) ++ { ++ initInfo->projType = E_SVIDEO_CUBEMAP; ++ } ++ else if (0 == strncmp(c->proj_type, "Planar", 6)) ++ { ++ initInfo->projType = E_SVIDEO_PLANAR; ++ } ++ ++ if (initInfo->projType == E_SVIDEO_CUBEMAP) ++ { ++ if (!(c->face_file)) ++ { ++ av_log(s, AV_LOG_ERROR, ++ "face_file should not be null when input source is from Cubemap projection! \n"); ++ return AVERROR(EINVAL); ++ } ++ ++ initInfo->cubeMapInfo = (InputCubeMapInfo*)malloc(sizeof(InputCubeMapInfo)); ++ memset(initInfo->cubeMapInfo, 0, sizeof(InputCubeMapInfo)); ++ FILE *fp = fopen(c->face_file, "r"); ++ if (!fp) ++ { ++ av_log(s, AV_LOG_ERROR, ++ "Failed to open cubemap face file !\n"); ++ return AVERROR(ENOMEM); ++ } ++ char face_name[128] = { 0 }; ++ char transform_name[128] = { 0 }; ++ fscanf(fp, "%s %s", face_name, transform_name); ++ initInfo->cubeMapInfo->face0MapInfo.mappedStandardFaceId = convert_face_index(face_name); ++ initInfo->cubeMapInfo->face0MapInfo.transformType = convert_transform_type(transform_name); ++ ++ memset(face_name, 0, 128); ++ memset(transform_name, 0, 128); ++ fscanf(fp, "%s %s", face_name, transform_name); ++ initInfo->cubeMapInfo->face1MapInfo.mappedStandardFaceId = convert_face_index(face_name); ++ initInfo->cubeMapInfo->face1MapInfo.transformType = convert_transform_type(transform_name); ++ ++ memset(face_name, 0, 128); ++ memset(transform_name, 0, 128); ++ fscanf(fp, "%s %s", face_name, transform_name); ++ initInfo->cubeMapInfo->face2MapInfo.mappedStandardFaceId = convert_face_index(face_name); ++ initInfo->cubeMapInfo->face2MapInfo.transformType = convert_transform_type(transform_name); ++ ++ memset(face_name, 0, 128); ++ memset(transform_name, 0, 128); ++ fscanf(fp, "%s %s", face_name, transform_name); ++ initInfo->cubeMapInfo->face3MapInfo.mappedStandardFaceId = convert_face_index(face_name); ++ initInfo->cubeMapInfo->face3MapInfo.transformType = convert_transform_type(transform_name); ++ ++ memset(face_name, 0, 128); ++ memset(transform_name, 0, 128); ++ fscanf(fp, "%s %s", face_name, transform_name); ++ initInfo->cubeMapInfo->face4MapInfo.mappedStandardFaceId = convert_face_index(face_name); ++ initInfo->cubeMapInfo->face4MapInfo.transformType = convert_transform_type(transform_name); ++ ++ memset(face_name, 0, 128); ++ memset(transform_name, 0, 128); ++ fscanf(fp, "%s %s", face_name, transform_name); ++ initInfo->cubeMapInfo->face5MapInfo.mappedStandardFaceId = convert_face_index(face_name); ++ initInfo->cubeMapInfo->face5MapInfo.transformType = convert_transform_type(transform_name); ++ ++ fclose(fp); ++ fp = NULL; ++ } + memset(c->bufferedFrames, 0, 1024 * sizeof(BufferedFrame)); + c->bufferedFramesNum = 0; + c->inStreamsNum = 0; @@ -4220,7 +5214,7 @@ index 0000000..0a66efa + + if (c->initInfo->bsBuffers) + { -+ for (int i = 0; i < c->initInfo->bsNumVideo; i++) ++ for (int i = 0; i < (c->initInfo->bsNumVideo + c->initInfo->bsNumAudio); i++) + { + if (c->initInfo->bsBuffers[i].data) + { @@ -4250,6 +5244,12 @@ index 0000000..0a66efa + c->initInfo->segmentationInfo = NULL; + } + ++ if (c->initInfo->cubeMapInfo) ++ { ++ free(c->initInfo->cubeMapInfo); ++ c->initInfo->cubeMapInfo = NULL; ++ } ++ + if(c->initInfo) + { + free(c->initInfo); @@ -4274,14 +5274,31 @@ index 0000000..0a66efa + int i = pkt->stream_index; + AVStream *st = s->streams[i]; + -+ c->initInfo->bsBuffers[i].dataSize = pkt->side_data->size; -+ c->initInfo->bsBuffers[i].data = (uint8_t*)malloc(c->initInfo->bsBuffers[i].dataSize * sizeof(uint8_t)); -+ if (!(c->initInfo->bsBuffers[i].data)) ++ if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts == 0)) + { -+ av_log(s, AV_LOG_ERROR, "Failed to malloc memory for holding bitstream header data \n"); -+ return AVERROR(ENOMEM); ++ c->initInfo->bsBuffers[i].dataSize = pkt->side_data->size; ++ ++ c->initInfo->bsBuffers[i].data = (uint8_t*)malloc(c->initInfo->bsBuffers[i].dataSize * sizeof(uint8_t)); ++ if (!(c->initInfo->bsBuffers[i].data)) ++ { ++ av_log(s, AV_LOG_ERROR, "Failed to malloc memory for holding bitstream header data \n"); ++ return -1; ++ } ++ memcpy(c->initInfo->bsBuffers[i].data, pkt->side_data->data, c->initInfo->bsBuffers[i].dataSize); ++ ++ } ++ else if ((st->codecpar->codec_id == AV_CODEC_ID_AAC) && !c->first_audio_input) ++ { ++ c->initInfo->bsBuffers[i].dataSize = pkt->size; ++ ++ c->initInfo->bsBuffers[i].data = (uint8_t*)malloc(c->initInfo->bsBuffers[i].dataSize * sizeof(uint8_t)); ++ if (!(c->initInfo->bsBuffers[i].data)) ++ { ++ av_log(s, AV_LOG_ERROR, "Failed to malloc memory for holding bitstream header data \n"); ++ return -1; ++ } ++ memcpy(c->initInfo->bsBuffers[i].data, pkt->data, c->initInfo->bsBuffers[i].dataSize); + } -+ memcpy(c->initInfo->bsBuffers[i].data, pkt->side_data->data, c->initInfo->bsBuffers[i].dataSize); + + if (st->codecpar->codec_id == AV_CODEC_ID_H264) + { @@ -4291,6 +5308,10 @@ index 0000000..0a66efa + { + c->initInfo->bsBuffers[i].codecId = CODEC_ID_H265; + } ++ else if (st->codecpar->codec_id == AV_CODEC_ID_AAC) ++ { ++ c->initInfo->bsBuffers[i].codecId = CODEC_ID_AAC; ++ } + + c->initInfo->bsBuffers[i].bitRate = st->codecpar->bit_rate; + c->initInfo->bsBuffers[i].frameRate.num = st->avg_frame_rate.num; @@ -4303,6 +5324,9 @@ index 0000000..0a66efa + break; + case AVMEDIA_TYPE_AUDIO: + c->initInfo->bsBuffers[i].mediaType = AUDIOTYPE; ++ c->initInfo->bsBuffers[i].audioObjType = st->codecpar->profile ; ++ c->initInfo->bsBuffers[i].sampleRate = st->codecpar->sample_rate; ++ c->initInfo->bsBuffers[i].channelNum = st->codecpar->channels; + break; + case AVMEDIA_TYPE_SUBTITLE: + c->initInfo->bsBuffers[i].mediaType = SUBTITLETYPE; @@ -4311,40 +5335,76 @@ index 0000000..0a66efa + break; + } + -+ c->inStreamsNum++; ++ if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts == 0)) ++ { ++ c->inStreamsNum++; ++ } ++ else if ((st->codecpar->codec_id == AV_CODEC_ID_AAC) && !c->first_audio_input) ++ { ++ c->inStreamsNum++; ++ c->first_audio_input = true; ++ } + + FrameBSInfo* frameInfo = (FrameBSInfo*)malloc(sizeof(FrameBSInfo)); -+ frameInfo->dataSize = pkt->size - pkt->side_data->size; ++ memset(frameInfo, 0, sizeof(FrameBSInfo)); ++ if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts == 0)) ++ { ++ frameInfo->dataSize = pkt->size - pkt->side_data->size; ++ } ++ else if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts != 0)) ++ { ++ frameInfo->dataSize = pkt->size; ++ } ++ else if (st->codecpar->codec_id == AV_CODEC_ID_AAC) ++ { ++ frameInfo->dataSize = pkt->size; ++ } ++ + frameInfo->data = (uint8_t*)malloc(frameInfo->dataSize * sizeof(uint8_t)); + if (!(frameInfo->data)) + { + av_log(s, AV_LOG_ERROR, "Failed to malloc memory for buffered frame data \n"); -+ return AVERROR(ENOMEM); ++ return -1; ++ } ++ if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts == 0)) ++ { ++ memcpy(frameInfo->data, pkt->data + pkt->side_data->size, frameInfo->dataSize); ++ frameInfo->isKeyFrame = (pkt->flags & AV_PKT_FLAG_KEY); ++ } ++ else if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts != 0)) ++ { ++ memcpy(frameInfo->data, pkt->data, frameInfo->dataSize); ++ frameInfo->isKeyFrame = (pkt->flags & AV_PKT_FLAG_KEY); ++ } ++ else if (st->codecpar->codec_id == AV_CODEC_ID_AAC) ++ { ++ memcpy(frameInfo->data, pkt->data, frameInfo->dataSize); ++ frameInfo->isKeyFrame = true; + } -+ memcpy(frameInfo->data, pkt->data + pkt->side_data->size, frameInfo->dataSize); -+ frameInfo->isKeyFrame = (pkt->flags & AV_PKT_FLAG_KEY); + frameInfo->pts = pkt->pts; + + c->bufferedFrames[c->bufferedFramesNum].streamIdx = pkt->stream_index; + c->bufferedFrames[c->bufferedFramesNum].frameBSInfo = frameInfo; + c->bufferedFramesNum++; + -+ free(pkt->side_data->data); -+ pkt->side_data->data = NULL; -+ pkt->side_data->size = 0; -+ free(pkt->side_data); -+ pkt->side_data = NULL; -+ pkt->side_data_elems = 0; ++ if (((st->codecpar->codec_id == AV_CODEC_ID_HEVC) || (st->codecpar->codec_id == AV_CODEC_ID_H264)) && (pkt->pts == 0)) ++ { ++ free(pkt->side_data->data); ++ pkt->side_data->data = NULL; ++ pkt->side_data->size = 0; ++ free(pkt->side_data); ++ pkt->side_data = NULL; ++ pkt->side_data_elems = 0; ++ } + -+ if (c->inStreamsNum == c->initInfo->bsNumVideo) ++ if (c->inStreamsNum == (c->initInfo->bsNumVideo + c->initInfo->bsNumAudio)) + { + c->handler = VROmafPackingInit(c->initInfo); + if (!(c->handler)) + { + av_log(s, AV_LOG_ERROR, "Failed to create VR Omaf Packing handler \n"); -+ return AVERROR(EINVAL); ++ return -1; + } -+ + c->frameNum++; + } + } @@ -4412,6 +5472,14 @@ index 0000000..0a66efa +#define OFFSET(x) offsetof(OMAFContext, x) +#define E AV_OPT_FLAG_ENCODING_PARAM +static const AVOption options[] = { ++ { "packing_proj_type", "input source projection type, ERP or Cubemap", OFFSET(proj_type), AV_OPT_TYPE_STRING, { .str = "ERP" }, 0, 0, E }, ++ { "cubemap_face_file", "configure input cubemap face relation to face layout defined in OMAF for cube-3x2", OFFSET(face_file), AV_OPT_TYPE_STRING, { 0 }, 0, 0, E }, ++ { "viewport_w", "set viewport width", OFFSET(viewport_w), AV_OPT_TYPE_INT, { .i64 = 1024 }, 0, INT_MAX, E }, ++ { "viewport_h", "set viewport height", OFFSET(viewport_h), AV_OPT_TYPE_INT, { .i64 = 1024 }, 0, INT_MAX, E }, ++ { "viewport_yaw", "set viewport yaw angle, which is the angle around y axis", OFFSET(viewport_yaw), AV_OPT_TYPE_FLOAT, { .dbl = 90 }, 0, 180, E }, ++ { "viewport_pitch", "set viewport pitch angle, which is the angle around x axis", OFFSET(viewport_pitch), AV_OPT_TYPE_FLOAT, { .dbl = 0 }, 0, 100, E }, ++ { "viewport_fov_hor", "set horizontal angle of field of view (FOV)", OFFSET(viewport_fov_hor), AV_OPT_TYPE_FLOAT, { .dbl = 80 }, 0, 180, E }, ++ { "viewport_fov_ver", "set vertical angle of field of view (FOV)", OFFSET(viewport_fov_ver), AV_OPT_TYPE_FLOAT, { .dbl = 80 }, 0, 100, E }, + { "window_size", "number of segments kept in the manifest", OFFSET(window_size), AV_OPT_TYPE_INT, { .i64 = 5 }, 0, INT_MAX, E }, + { "extra_window_size", "number of segments kept outside of the manifest before removing from disk", OFFSET(extra_window_size), AV_OPT_TYPE_INT, { .i64 = 15 }, 0, INT_MAX, E }, + { "split_tile", "need split the stream to tiles if input is tile-based hevc stream", OFFSET(split_tile), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 2, E }, @@ -4425,6 +5493,16 @@ index 0000000..0a66efa + { "out_name", "name prefix for all dash output files", OFFSET(out_name), AV_OPT_TYPE_STRING, {.str = "dash-stream"}, 0, 0, E }, + { "need_buffered_frames", "needed buffered frames number before packing starts", OFFSET(need_buffered_frames), AV_OPT_TYPE_INT, { .i64 = 15 }, 0, INT_MAX, E }, + { "extractors_per_thread", "extractor tracks per segmentation thread", OFFSET(extractors_per_thread), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, E }, ++ { "has_extractor", "Enable/Disable OMAF extractor tracks", OFFSET(has_extractor), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, INT_MAX, E }, ++ { "packing_plugin_path", "OMAF Packing plugin path", OFFSET(packingPluginPath), AV_OPT_TYPE_STRING, {.str = "/usr/local/lib"}, 0, 0, E }, ++ { "packing_plugin_name", "OMAF Packing plugin name", OFFSET(packingPluginName), AV_OPT_TYPE_STRING, {.str = "HighResPlusFullLowResPacking"}, 0, 0, E }, ++ { "video_plugin_path", "Video stream process plugin path", OFFSET(videoPluginPath), AV_OPT_TYPE_STRING, {.str = "/usr/local/lib"}, 0, 0, E }, ++ { "video_plugin_name", "Video stream process plugin name", OFFSET(videoPluginName), AV_OPT_TYPE_STRING, {.str = "HevcVideoStreamProcess"}, 0, 0, E }, ++ { "audio_plugin_path", "Audio stream process plugin path", OFFSET(audioPluginPath), AV_OPT_TYPE_STRING, {.str = "NULL"}, 0, 0, E }, ++ { "audio_plugin_name", "Audio stream process plugin name", OFFSET(audioPluginName), AV_OPT_TYPE_STRING, {.str = "NULL"}, 0, 0, E }, ++ { "fixed_extractors_res", "whether extractor track needs the fixed resolution", OFFSET(fixedPackedPicRes), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, E }, ++ { "need_external_log", "whether external log callback is needed", OFFSET(need_external_log), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, E }, ++ { "min_log_level", "Minimal log level of output [0: INFO, 1: WARNING, 2: ERROR, 3: FATAL]", OFFSET(min_log_level), AV_OPT_TYPE_INT, { .i64 = 2 }, 0, 3, E }, + { NULL }, +}; + @@ -4450,14 +5528,10 @@ index 0000000..0a66efa + .deinit = omaf_free, + .priv_class = &omaf_class, +}; -+ -+ -diff --git a/FFmpeg/libavformat/tiled_dash_dec.c b/FFmpeg/libavformat/tiled_dash_dec.c -new file mode 100644 -index 0000000..8e469af ---- /dev/null -+++ b/FFmpeg/libavformat/tiled_dash_dec.c -@@ -0,0 +1,309 @@ +diff -urN FFmpeg/libavformat/tiled_dash_dec.c FFmpeg-patched/libavformat/tiled_dash_dec.c +--- FFmpeg/libavformat/tiled_dash_dec.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavformat/tiled_dash_dec.c 2020-09-27 13:35:13.559526536 +0800 +@@ -0,0 +1,408 @@ +/* + * Intel tile Dash Demuxer + * @@ -4496,6 +5570,8 @@ index 0000000..8e469af +#include "avio_internal.h" +#include "tiled_dash_dec.h" + ++uint64_t frameCnt = 0; ++ +int tiled_dash_ViewPort_update(AVFormatContext *s, bool isVertical, double move) +{ + int ret = 0; @@ -4539,47 +5615,115 @@ index 0000000..8e469af +{ + int ret = 0; + TiledDASHDecContext *c = s->priv_data; ++ frameCnt++; + // TODO: read packet for one stream once?? -+ int streamId = 0; ++ for (int streamId = 0; streamId < c->mInfo.stream_count; streamId++) ++ { ++ //int streamId = 0; ++ //frameCnt++; + -+ DashStreamInfo stInfo = c->mInfo.stream_info[streamId]; ++ DashStreamInfo stInfo = c->mInfo.stream_info[streamId]; + -+ DashPacket dashPkt[5]; -+ memset(dashPkt, 0, 5 * sizeof(DashPacket)); -+ int dashPktNum = 0; ++ DashPacket dashPkt[5]; ++ memset(dashPkt, 0, 5 * sizeof(DashPacket)); ++ int dashPktNum = 0; + -+ ret = OmafAccess_GetPacket(c->hdl, streamId, &(dashPkt[0]), &dashPktNum, &(pkt->pts), c->needHeaders, c->mClearBuf); -+ if(ret != ERROR_NONE){ -+ //av_log(s, AV_LOG_ERROR, "OmafAccess_GetPacket get null packet\ n" ); -+ //av_packet_unref(pkt); -+ } ++ if (stInfo.stream_type == MediaType_Video) ++ { ++ ret = OmafAccess_GetPacket(c->hdl, streamId, &(dashPkt[0]), &dashPktNum, &(pkt->pts), c->needHeaders, c->mClearBuf); ++ if(ret != ERROR_NONE){ ++ //av_log(s, AV_LOG_ERROR, "OmafAccess_GetPacket get null packet\ n" ); ++ //av_packet_unref(pkt); ++ } + -+ if(dashPktNum && dashPkt[0].buf && dashPkt[0].size) -+ { -+ int size = dashPkt[0].size; -+ if (av_new_packet(pkt, size) < 0) -+ return AVERROR(ENOMEM); ++ if ((frameCnt % 50) == 0) ++ { ++ HeadPose newPose; ++ newPose.yaw = 45; ++ newPose.pitch = 90; ++ OmafAccess_ChangeViewport(c->hdl, &newPose); ++ } ++ else if ((frameCnt % 75) == 0) ++ { ++ OmafAccess_ChangeViewport(c->hdl, &(c->pose)); ++ } ++ ++ if(dashPktNum && dashPkt[0].buf && dashPkt[0].size) ++ { ++ int size = dashPkt[0].size; ++ if (av_new_packet(pkt, size) < 0) ++ return AVERROR(ENOMEM); ++ ++ memcpy(pkt->data, dashPkt[0].buf, size); ++ pkt->size = size; + -+ memcpy(pkt->data, dashPkt[0].buf, size); -+ pkt->size = size; ++ free(dashPkt[0].buf); ++ dashPkt[0].buf = NULL; ++ if (dashPkt[0].rwpk != NULL) ++ { ++ if (dashPkt[0].rwpk->rectRegionPacking != NULL) ++ { ++ free(dashPkt[0].rwpk->rectRegionPacking); ++ dashPkt[0].rwpk->rectRegionPacking = NULL; ++ } ++ free(dashPkt[0].rwpk); ++ dashPkt[0].rwpk = NULL; ++ } ++ if (dashPkt[0].qtyResolution) ++ { ++ free(dashPkt[0].qtyResolution); ++ dashPkt[0].qtyResolution = NULL; ++ } ++ if(c->needHeaders){c->needHeaders = false;} ++ } + -+ free(dashPkt[0].buf); -+ dashPkt[0].buf = NULL; -+ if (dashPkt[0].rwpk != NULL) ++ for (int pktIdx = 1; pktIdx < dashPktNum; pktIdx++) ++ { ++ if (dashPkt[pktIdx].buf && dashPkt[pktIdx].size) ++ { ++ free(dashPkt[pktIdx].buf); ++ dashPkt[pktIdx].buf = NULL; ++ ++ if (dashPkt[pktIdx].rwpk != NULL) ++ { ++ if (dashPkt[pktIdx].rwpk->rectRegionPacking != NULL) ++ { ++ free(dashPkt[pktIdx].rwpk->rectRegionPacking); ++ dashPkt[pktIdx].rwpk->rectRegionPacking = NULL; ++ } ++ free(dashPkt[pktIdx].rwpk); ++ dashPkt[pktIdx].rwpk = NULL; ++ } ++ if (dashPkt[pktIdx].qtyResolution) ++ { ++ free(dashPkt[pktIdx].qtyResolution); ++ dashPkt[pktIdx].qtyResolution = NULL; ++ } ++ } ++ } ++ } ++ else if (stInfo.stream_type == MediaType_Audio) + { -+ if (dashPkt[0].rwpk->rectRegionPacking != NULL) ++ uint64_t audio_pts = 0; ++ ret = OmafAccess_GetPacket(c->hdl, streamId, &(dashPkt[0]), &dashPktNum, &(audio_pts), c->needHeaders, c->mClearBuf); ++ if(ret == ERROR_NULL_PACKET) ++ { ++ av_log(s, AV_LOG_INFO, "OmafAccess_GetPacket get null packet\n" ); ++ } ++ ++ if(dashPktNum && dashPkt[0].buf && dashPkt[0].size) + { -+ free(dashPkt[0].rwpk->rectRegionPacking); -+ dashPkt[0].rwpk->rectRegionPacking = NULL; ++ FILE *audioFP = fopen("dumpedAAC.aac", "ab+"); ++ if (!audioFP) ++ { ++ av_log(s, AV_LOG_ERROR, "Failed to open dumpedAAC.m4a !\n" ); ++ } ++ fwrite(dashPkt[0].buf, 1, dashPkt[0].size, audioFP); ++ fclose(audioFP); ++ audioFP = NULL; + } -+ free(dashPkt[0].rwpk); -+ dashPkt[0].rwpk = NULL; + } -+ if(c->needHeaders){c->needHeaders = false;} + } -+ -+ if(c->mClearBuf){c->mClearBuf = false;} -+ + return ret; +} + @@ -4590,8 +5734,6 @@ index 0000000..8e469af + return -1; + } + -+ clientInfo->input_geoType = E_SVIDEO_EQUIRECT; -+ clientInfo->output_geoType = E_SVIDEO_VIEWPORT; + clientInfo->pose = (HeadPose*)malloc(sizeof(HeadPose)); + clientInfo->pose->yaw = -90; + clientInfo->pose->pitch = 0; @@ -4612,8 +5754,14 @@ index 0000000..8e469af + c->mClearBuf = false; + c->needHeaders = true; + c->client = (DashStreamingClient *)malloc(sizeof(DashStreamingClient)); ++ memset(c->client, 0, sizeof(DashStreamingClient)); ++ memset(&c->client->omaf_params.proxy, 0, sizeof(OmafHttpProxy)); ++ memset(&c->client->omaf_params.predictor_params, 0, sizeof(OmafPredictorParams)); ++ c->client->omaf_params.max_decode_width = 2560; ++ c->client->omaf_params.max_decode_height = 2560; + c->client->source_type = MultiResSource;//DefaultSource; + c->client->media_url = s->filename; ++ c->client->enable_extractor = c->enable_extractor; + if(!c->cache_path) + { + c->client->cache_path = "./cache"; @@ -4631,12 +5779,24 @@ index 0000000..8e469af + ret = SetupHeadSetInfo(&(c->HSInfo)); + ret = OmafAccess_SetupHeadSetInfo(c->hdl, &(c->HSInfo)); + -+ ret = OmafAccess_OpenMedia(c->hdl, c->client, false); ++ ret = OmafAccess_OpenMedia(c->hdl, c->client, false, "", ""); ++ ret = OmafAccess_StartStreaming(c->hdl); + + c->lastPose.yaw = c->HSInfo.pose->yaw; + c->lastPose.pitch = c->HSInfo.pose->pitch; + + ret = OmafAccess_GetMediaInfo(c->hdl, &(c->mInfo)); ++ printf("Media streams cnt is %d\n", c->mInfo.stream_count); ++ bool hasAudio = false; ++ for(int i = 0 ; i < c->mInfo.stream_count ; i++) ++ { ++ DashStreamInfo stInfo = c->mInfo.stream_info[i]; ++ if (stInfo.stream_type == MediaType_Audio) ++ { ++ hasAudio = true; ++ break; ++ } ++ } + + for(int i = 0 ; i < c->mInfo.stream_count ; i++) + { @@ -4652,15 +5812,24 @@ index 0000000..8e469af + if(stInfo.stream_type == MediaType_Video) + { + st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; -+ c->n_videos = c->mInfo.stream_count; ++ if (hasAudio) ++ { ++ c->n_videos = c->mInfo.stream_count - 1; ++ } ++ else ++ { ++ c->n_videos = c->mInfo.stream_count; ++ } ++ st->codecpar->codec_id = AV_CODEC_ID_HEVC; + } + else if(stInfo.stream_type == MediaType_Audio) + { + st->codecpar->codec_type = AVMEDIA_TYPE_AUDIO; -+ c->n_audios = c->mInfo.stream_count; ++ c->n_audios = 1; ++ st->codecpar->codec_id = AV_CODEC_ID_AAC; + } + -+ st->codecpar->codec_id = AV_CODEC_ID_HEVC; ++ // st->codecpar->codec_id = AV_CODEC_ID_HEVC; + + st->codecpar->width = stInfo.width; + st->codecpar->height = stInfo.height; @@ -4745,6 +5914,10 @@ index 0000000..8e469af + OFFSET(cache_path), AV_OPT_TYPE_STRING, + {.str = NULL}, + INT_MIN, INT_MAX, FLAGS}, ++ {"enable_extractor", "whether to enable extractor track in OMAF Dash Access engine", ++ OFFSET(enable_extractor), AV_OPT_TYPE_INT, ++ {.i64 = 1}, ++ INT_MIN, INT_MAX, FLAGS}, + {NULL} +}; + @@ -4767,12 +5940,10 @@ index 0000000..8e469af + .read_seek = tiled_dash_read_seek, + .flags = AVFMT_NO_BYTE_SEEK, +}; -diff --git a/FFmpeg/libavformat/tiled_dash_dec.h b/FFmpeg/libavformat/tiled_dash_dec.h -new file mode 100644 -index 0000000..20e4f53 ---- /dev/null -+++ b/FFmpeg/libavformat/tiled_dash_dec.h -@@ -0,0 +1,64 @@ +diff -urN FFmpeg/libavformat/tiled_dash_dec.h FFmpeg-patched/libavformat/tiled_dash_dec.h +--- FFmpeg/libavformat/tiled_dash_dec.h 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavformat/tiled_dash_dec.h 2020-09-27 13:35:13.557526536 +0800 +@@ -0,0 +1,65 @@ +/* + * Intel tile Dash muxer + * @@ -4831,17 +6002,16 @@ index 0000000..20e4f53 + HeadPose lastPose; + bool mClearBuf; + bool needHeaders; ++ int enable_extractor; +}TiledDASHDecContext; + +int tiled_dash_ViewPort_update(AVFormatContext *s, bool isVertical, double move); + +#endif + -diff --git a/FFmpeg/libavformat/tiled_dash_enc.c b/FFmpeg/libavformat/tiled_dash_enc.c -new file mode 100644 -index 0000000..9ece06d ---- /dev/null -+++ b/FFmpeg/libavformat/tiled_dash_enc.c +diff -urN FFmpeg/libavformat/tiled_dash_enc.c FFmpeg-patched/libavformat/tiled_dash_enc.c +--- FFmpeg/libavformat/tiled_dash_enc.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavformat/tiled_dash_enc.c 2020-09-27 13:35:13.557526536 +0800 @@ -0,0 +1,359 @@ +/* + * Intel tile Dash muxer @@ -5202,11 +6372,9 @@ index 0000000..9ece06d +}; + + -diff --git a/FFmpeg/libavformat/tiled_dash_parse.c b/FFmpeg/libavformat/tiled_dash_parse.c -new file mode 100644 -index 0000000..a762508 ---- /dev/null -+++ b/FFmpeg/libavformat/tiled_dash_parse.c +diff -urN FFmpeg/libavformat/tiled_dash_parse.c FFmpeg-patched/libavformat/tiled_dash_parse.c +--- FFmpeg/libavformat/tiled_dash_parse.c 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavformat/tiled_dash_parse.c 2020-09-27 13:35:13.558526536 +0800 @@ -0,0 +1,1624 @@ +/* + * Intel tile Dash muxer @@ -6832,11 +8000,9 @@ index 0000000..a762508 + + return 0; +} -diff --git a/FFmpeg/libavformat/tiled_dash_parse.h b/FFmpeg/libavformat/tiled_dash_parse.h -new file mode 100644 -index 0000000..38857fe ---- /dev/null -+++ b/FFmpeg/libavformat/tiled_dash_parse.h +diff -urN FFmpeg/libavformat/tiled_dash_parse.h FFmpeg-patched/libavformat/tiled_dash_parse.h +--- FFmpeg/libavformat/tiled_dash_parse.h 1970-01-01 08:00:00.000000000 +0800 ++++ FFmpeg-patched/libavformat/tiled_dash_parse.h 2020-09-27 13:35:13.558526536 +0800 @@ -0,0 +1,168 @@ +/* + * Intel tile Dash muxer diff --git a/src/isolib/CMakeLists.txt b/src/isolib/CMakeLists.txt new file mode 100644 index 00000000..c478a180 --- /dev/null +++ b/src/isolib/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 2.8) + +project(isolib) + +add_subdirectory(dash_writer) +add_subdirectory(dash_parser) diff --git a/src/isolib/atoms/Atom.cpp b/src/isolib/atoms/Atom.cpp new file mode 100644 index 00000000..bb437317 --- /dev/null +++ b/src/isolib/atoms/Atom.cpp @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Atom.cpp +//! \brief: Atom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include "Atom.h" +#include +#include "Stream.h" + +VCD_MP4_BEGIN + +Atom::Atom(FourCCInt AtomType) + : m_size(8) + , m_type(AtomType) + , m_userType() + , m_startLocation(0) + , m_largeSize(false) +{ +} + +void Atom::SetLargeSize() +{ + m_largeSize = true; +} + +bool Atom::GetLargeSize() const +{ + return m_largeSize; +} + +void Atom::WriteAtomHeader(Stream& str) const +{ + m_startLocation = str.GetSize(); + + if (m_largeSize) + { + str.Write32(1); + } + else + { + str.Write32(static_cast(m_size)); + } + + str.Write32(m_type.GetUInt32()); + + // Note that serialized size values will be dummy values until UpdateSize() is called. + if (m_largeSize) + { + str.Write64(m_size); + } + + if (m_type == "uuid") + { + str.WriteArray(m_userType, 16); + } +} + +void Atom::UpdateSize(Stream& str) const +{ + m_size = str.GetSize() - m_startLocation; + + if ((m_size > std::numeric_limits::max()) && (m_largeSize == false)) + { + ISO_LOG(LOG_ERROR, "Atom::UpdateSize(): Atom size exceeds 4GB but large size for 64-bit size field was not set.\n"); + throw Exception(); + } + + // Write updated size to the bitstream. + if (m_largeSize) + { + str.SetByte(m_startLocation + 8, (m_size >> 56) & 0xff); + str.SetByte(m_startLocation + 9, (m_size >> 48) & 0xff); + str.SetByte(m_startLocation + 10, (m_size >> 40) & 0xff); + str.SetByte(m_startLocation + 11, (m_size >> 32) & 0xff); + str.SetByte(m_startLocation + 12, (m_size >> 24) & 0xff); + str.SetByte(m_startLocation + 13, (m_size >> 16) & 0xff); + str.SetByte(m_startLocation + 14, (m_size >> 8) & 0xff); + str.SetByte(m_startLocation + 15, m_size & 0xff); + } + else + { + str.SetByte(m_startLocation + 0, (m_size >> 24) & 0xff); + str.SetByte(m_startLocation + 1, (m_size >> 16) & 0xff); + str.SetByte(m_startLocation + 2, (m_size >> 8) & 0xff); + str.SetByte(m_startLocation + 3, m_size & 0xff); + } +} + +void Atom::ParseAtomHeader(Stream& str) +{ + m_size = str.Read32(); + m_type = str.Read32(); + + if (m_size == 1) + { + m_size = str.Read64(); + m_largeSize = true; + } + + if (m_type == "uuid") + { + m_userType.clear(); + for (uint8_t i = 0; i < 16; ++i) + { + m_userType.push_back(str.Read8()); + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/Atom.h b/src/isolib/atoms/Atom.h new file mode 100644 index 00000000..a6655080 --- /dev/null +++ b/src/isolib/atoms/Atom.h @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Atom.h +//! \brief: Basic Atom class. +//! \detail: Basic ISOBMFF Atom definition. +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef _ATOM_H_ +#define _ATOM_H_ + +#include "../include/Common.h" +#include +#include "FormAllocator.h" +#include "FourCCInt.h" + +#define ABMAX_SAMP_CNT (1 << 22) + +VCD_MP4_BEGIN + +class Stream; + +class Atom +{ +public: + + //! + //! \brief Set and Get function for m_type member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_type + //! m_type member in class + //! \param [in] Type + //! m_type name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_type, Type, const); + + //! + //! \brief Set 64-bit size + //! + //! \return void + //! + void SetLargeSize(); + + //! + //! \brief Get 64-bit size + //! + //! \return bool + //! + bool GetLargeSize() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) = 0; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) = 0; + +protected: + + //! + //! \brief Constructor + //! + Atom(FourCCInt boxType); + + //! + //! \brief Destructor + //! + virtual ~Atom() = default; + + //! + //! \brief Set and Get function for m_size member + //! + //! \param [in] uint64_t + //! value to set + //! \param [in] m_size + //! m_size member in class + //! \param [in] Size + //! m_size name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint64_t, m_size, Size, const); + + //! + //! \brief Set and Get function for m_size member + //! + //! \param [in] std::vector& + //! value to set + //! \param [in] m_userType + //! m_userType member in class + //! \param [in] UserType + //! m_userType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::vector&, m_userType, UserType, ); + + //! + //! \brief Write atom header information + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void WriteAtomHeader(Stream& str) const; + + //! + //! \brief Parse atom header information + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ParseAtomHeader(Stream& str); + + //! + //! \brief Update total atom size. + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void UpdateSize(Stream& str) const; + +private: + + mutable std::uint64_t m_size; //!< atom size + FourCCInt m_type; //!< atom type + std::vector m_userType; //!< atom user type + mutable std::uint64_t m_startLocation; //!< start position of bitstream + bool m_largeSize; //!< Use large size(64 bit) or not +}; + +VCD_MP4_END; +#endif /* ATOM_H */ diff --git a/src/isolib/atoms/AudSampEntryAtom.cpp b/src/isolib/atoms/AudSampEntryAtom.cpp new file mode 100644 index 00000000..9e7f2f56 --- /dev/null +++ b/src/isolib/atoms/AudSampEntryAtom.cpp @@ -0,0 +1,274 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AudSampEntryAtom.cpp +//! \brief: AudSampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "AudSampEntryAtom.h" +#include +#include + + +VCD_MP4_BEGIN + +AudioSampleEntryAtom::AudioSampleEntryAtom(FourCCInt codingname) + : SampleEntryAtom(codingname) + , m_version(0) + , m_channelNumber(0) + , m_sampleSize(0) + , m_sampleRate(0) + , m_hasChannelLayoutAtom(false) + , m_hasSamplingRateAtom(false) + , m_channelLayoutAtom() + , m_samplingRateAtom() +{ +} + +AudioSampleEntryAtom::AudioSampleEntryAtom(const AudioSampleEntryAtom& Atom) + : SampleEntryAtom(Atom) + , m_version(Atom.m_version) + , m_channelNumber(Atom.m_channelNumber) + , m_sampleSize(Atom.m_sampleSize) + , m_sampleRate(Atom.m_sampleRate) + , m_hasChannelLayoutAtom(Atom.m_hasChannelLayoutAtom) + , m_hasSamplingRateAtom(Atom.m_hasSamplingRateAtom) + , m_channelLayoutAtom(Atom.m_channelLayoutAtom) + , m_samplingRateAtom(Atom.m_samplingRateAtom) +{ +} + + +void AudioSampleEntryAtom::SetVersion(std::uint16_t version) +{ + if (version == 0 || version == 1) + { + m_version = version; + } + else + { + ISO_LOG(LOG_ERROR, "AudioSampleEntryAtom::SetVersion Error: trying to set value other than 0 or 1\n"); + throw Exception(); + } +} + +std::uint16_t AudioSampleEntryAtom::GetVersion() const +{ + return m_version; +} + +void AudioSampleEntryAtom::SetChannelCount(std::uint16_t channelCnt) +{ + m_channelNumber = channelCnt; +} + +std::uint16_t AudioSampleEntryAtom::GetChannelCount() const +{ + return m_channelNumber; +} + +void AudioSampleEntryAtom::SetSampleSize(std::uint16_t sampSize) +{ + m_sampleSize = sampSize; +} + +std::uint16_t AudioSampleEntryAtom::GetSampleSize() const +{ + return m_sampleSize; +} + +std::uint32_t AudioSampleEntryAtom::GetSampleRate() const +{ + if (m_version == 1 && m_hasSamplingRateAtom) + { + return m_samplingRateAtom.GetSamplingRate(); + } + else + { + return m_sampleRate; + } +} + +void AudioSampleEntryAtom::SetSampleRate(std::uint32_t samplerate) +{ + m_sampleRate = samplerate; +} + +bool AudioSampleEntryAtom::HasChannelLayoutAtom() +{ + return m_hasChannelLayoutAtom; +} + +ChannelLayoutAtom& AudioSampleEntryAtom::GetChannelLayoutAtom() +{ + return m_channelLayoutAtom; +} + +void AudioSampleEntryAtom::SetChannelLayoutAtom(ChannelLayoutAtom& channelLayoutAtom) +{ + m_channelLayoutAtom = channelLayoutAtom; + m_hasChannelLayoutAtom = true; +} + +bool AudioSampleEntryAtom::HasSamplingRateAtom() +{ + if (m_version == 1) + { + return m_hasSamplingRateAtom; + } + else + { + return false; + } +} + +SamplingRateAtom& AudioSampleEntryAtom::GetSamplingRateAtom() +{ + if (m_version == 1) + { + return m_samplingRateAtom; + } + else + { + ISO_LOG(LOG_ERROR, "AudioSampleEntryAtom::GetSamplingRateAtom Error: trying to GetSamplingRateAtom from version other than 1\n"); + throw Exception(); + } +} + +void AudioSampleEntryAtom::SetSamplingRateAtom(SamplingRateAtom& samplingRateAtom) +{ + this->SetVersion(1); + m_samplingRateAtom = samplingRateAtom; + m_hasSamplingRateAtom = true; +} + +void AudioSampleEntryAtom::ToStream(Stream& str) +{ + SampleEntryAtom::ToStream(str); + + if (m_version == 1) + { + str.Write16(m_version); + str.Write16(0); // reserved = 0 + } + else + { + str.Write32(0); // reserved = 0 + } + str.Write32(0); // reserved = 0 + str.Write16(m_channelNumber); // number of channels 1 (mono) or 2 (stereo) + str.Write16(m_sampleSize); // in bits and takes default value of 16 + str.Write16(0); // pre_defined = 0 + str.Write16(0); // reserved = 0 + str.Write32(m_sampleRate << 16); // 32bit field expressed as 16.16 fixed-point number (hi.lo) + + if (m_version == 1) + { + if (m_hasSamplingRateAtom) + { + m_samplingRateAtom.ToStream(str); + } + } + + if (m_hasChannelLayoutAtom) + { + m_channelLayoutAtom.ToStream(str); + } + + UpdateSize(str); +} + +void AudioSampleEntryAtom::FromStream(Stream& str) +{ + SampleEntryAtom::FromStream(str); + + m_version = str.Read16(); // in case of v0 Atom this is first half of 32bit reserved = 0 + if (m_version != 1 && m_version != 0) + { + ISO_LOG(LOG_ERROR, "AudioSampleEntryV1Atom::FromStreamAtom Error: trying to read version other than 0 or 1\n"); + throw Exception(); + } + + str.Read16(); + str.Read32(); + m_channelNumber = str.Read16(); + m_sampleSize = str.Read16(); + str.Read16(); + str.Read16(); + m_sampleRate = (str.Read32() >> 16); + + std::uint64_t revertOffset = ~0u; + + while (str.BytesRemain() > 0) + { + const std::uint64_t startOffset = str.GetPos(); + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (m_version == 1 && AtomType == "srat") + { + m_hasSamplingRateAtom = true; + m_samplingRateAtom.FromStream(subBitstr); + } + else if (AtomType == "chnl") + { + m_hasChannelLayoutAtom = true; + m_channelLayoutAtom.SetChannelNumber(m_channelNumber); + m_channelLayoutAtom.FromStream(subBitstr); + } + else if (AtomType == "esds") + { + revertOffset = startOffset; + } + } + + if (revertOffset != ~0u) + { + str.SetPos(revertOffset); + } +} + +AudioSampleEntryAtom* AudioSampleEntryAtom::Clone() const +{ + return nullptr; +} + +const Atom* AudioSampleEntryAtom::GetConfigurationAtom() const +{ + ISO_LOG(LOG_ERROR, "AudioSampleEntryAtom::GetConfigurationAtom() not impelmented \n"); + return nullptr; +} + +const DecoderConfigurationRecord* AudioSampleEntryAtom::GetConfigurationRecord() const +{ + ISO_LOG(LOG_ERROR, "AudioSampleEntryAtom::GetConfigurationRecord() not impelmented \n"); + return nullptr; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/AudSampEntryAtom.h b/src/isolib/atoms/AudSampEntryAtom.h new file mode 100644 index 00000000..d020dccd --- /dev/null +++ b/src/isolib/atoms/AudSampEntryAtom.h @@ -0,0 +1,260 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AudSampEntryAtom.h +//! \brief: AudSampEntryAtom class. +//! \detail: atom sample contains audio information. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _AUDIOSAMPLEENTRYATOM_H_ +#define _AUDIOSAMPLEENTRYATOM_H_ + +#include "Stream.h" +#include "ChannelLayoutAtom.h" +#include "FormAllocator.h" +#include "SampEntryAtom.h" +#include "SampRateAtom.h" + +VCD_MP4_BEGIN + +class AudioSampleEntryAtom : public SampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + AudioSampleEntryAtom(FourCCInt codingname); + AudioSampleEntryAtom(const AudioSampleEntryAtom& Atom); + + AudioSampleEntryAtom& operator=(const AudioSampleEntryAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~AudioSampleEntryAtom() = default; + + //! + //! \brief Set version + //! + //! \param [in] std::uint16_t + //! version value + //! + //! \return void + //! + void SetVersion(std::uint16_t version); + + //! + //! \brief Get version + //! + //! \return std::uint16_t + //! version + //! + std::uint16_t GetVersion() const; + + //! + //! \brief Set and Get function for m_channelNumber member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_channelNumber + //! m_channelNumber member in class + //! \param [in] ChannelNumber + //! m_channelNumber name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + //MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_channelNumber, ChannelNumber, const); + + //! + //! \brief Set and Get function for m_sampleSize member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_sampleSize + //! m_sampleSize member in class + //! \param [in] SampleSize + //! m_sampleSize name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + //MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_sampleSize, SampleSize, const); + + //! + //! \brief Get ChannelCount + //! + //! \return std::uint16_t + //! ChannelCount + //! + std::uint16_t GetChannelCount() const; + + //! + //! \brief Set ChannelCount + //! + //! \param [in] std::uint16_t + //! ChannelCount value + //! + //! \return void + //! + void SetChannelCount(std::uint16_t channelCnt); + + void SetSampleSize(std::uint16_t sampSize); + + std::uint16_t GetSampleSize() const; + //! + //! \brief Get SampleRate + //! + //! \return std::uint32_t + //! SampleRate + //! + std::uint32_t GetSampleRate() const; + + //! + //! \brief Set SampleRate + //! + //! \param [in] std::uint32_t + //! SampleRate value + //! + //! \return void + //! + void SetSampleRate(std::uint32_t height); + + //! + //! \brief Has ChannelLayout Atom or not + //! + //! \return bool + //! has or not + //! + bool HasChannelLayoutAtom(); + + //! + //! \brief Get ChannelLayout Atom + //! + //! \return ChannelLayoutAtom& + //! ChannelLayout Atom + //! + ChannelLayoutAtom& GetChannelLayoutAtom(); + + //! + //! \brief Set ChannelLayout Atom + //! + //! \param [in] ChannelLayoutAtom& + //! ChannelLayout Atom + //! + //! \return void + //! + void SetChannelLayoutAtom(ChannelLayoutAtom& channelLayoutAtom); + + //! + //! \brief Has SamplingRate Atom or not + //! + //! \return bool + //! has or not + //! + bool HasSamplingRateAtom(); + + //! + //! \brief Get SamplingRate Atom + //! + //! \return SamplingRateAtom& + //! SamplingRate Atom + //! + SamplingRateAtom& GetSamplingRateAtom(); + + //! + //! \brief Set SamplingRate Atom + //! + //! \param [in] SamplingRateAtom& + //! SamplingRate Atom + //! + //! \return void + //! + void SetSamplingRateAtom(SamplingRateAtom& samplingRateAtom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& bitstr) override; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& bitstr) override; + + //! + //! \brief Get Copy of AudioSampleEntryAtom + //! + //! \return AudioSampleEntryAtom* + //! AudioSampleEntry Atom + //! + virtual AudioSampleEntryAtom* Clone() const override; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: + std::uint16_t m_version; //!< atom version + std::uint16_t m_channelNumber; //!< number of channel + std::uint16_t m_sampleSize; //!< sampile size + std::uint32_t m_sampleRate; //!< sample rate + bool m_hasChannelLayoutAtom; //!< has ChannelLayoutAtom or not + bool m_hasSamplingRateAtom; //!< has SamplingRateAtom or not + ChannelLayoutAtom m_channelLayoutAtom; //!< ChannelLayoutAtom + SamplingRateAtom m_samplingRateAtom; //!< SamplingRateAtom +}; + +VCD_MP4_END; +#endif /* _AUDIOSAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/AvcConfigAtom.cpp b/src/isolib/atoms/AvcConfigAtom.cpp new file mode 100644 index 00000000..ef7607e4 --- /dev/null +++ b/src/isolib/atoms/AvcConfigAtom.cpp @@ -0,0 +1,336 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AvcConfigAtom.cpp +//! \brief: AvcConfigAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "AvcConfigAtom.h" +#include "Stream.h" +#include "AvcParser.h" +#include "NalUtil.h" + +#include + +VCD_MP4_BEGIN + +AvcConfigurationAtom::AvcConfigurationAtom() + : Atom("avcC") + , m_avcConfig() +{ +} + +AvcConfigurationAtom::AvcConfigurationAtom(const AvcConfigurationAtom& atom) + : Atom(atom.GetType()) + , m_avcConfig(atom.m_avcConfig) +{ +} + +void AvcConfigurationAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + m_avcConfig.WriteDecConfigRec(str); + UpdateSize(str); +} + +void AvcConfigurationAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + m_avcConfig.ParseConfig(str); +} + +AvcDecoderConfigurationRecord::AvcDecoderConfigurationRecord() + : m_configVersion(1) + , m_avcProfileIdc(66) + , m_profileCompat(128) + , m_avcLevelIdc(30) + , m_lengthSizeMinus1(3) + , m_chromaFormat(0) + , m_bitDepthLuma(0) + , m_bitDepthChroma(0) + , m_picWidth(0) + , m_picHeight(0) + , m_nalArray() +{ +} + +bool AvcDecoderConfigurationRecord::ConfigSPS(const std::vector& sps) +{ + const std::vector rbsp = TransferStreamToRBSP(sps); + Stream str(rbsp); + SPSCfgs pConfig; + + str.Read8(); + if (!parseSPS(str, pConfig)) + { + return false; + } + + m_avcProfileIdc = pConfig.profileIdc; + m_profileCompat = pConfig.profileCompatibility; + m_avcLevelIdc = pConfig.levelIdc; + m_chromaFormat = static_cast(pConfig.chromaFormatIdc); + m_bitDepthLuma = static_cast(pConfig.bitDepthLumaMinus8); + m_bitDepthChroma = static_cast(pConfig.bitDepthChromaMinus8); + + m_picWidth = (uint16_t)(pConfig.picWidthInMbsMinus1 + 1) * 16 - + (pConfig.frameCropLeftOffset + pConfig.frameCropRightOffset) * 2; + m_picHeight = (uint16_t)(2 - static_cast(pConfig.frameMbsOnlyFlag)) * + (pConfig.picHeightInMapUnitsMinus1 + 1) * 16 - + (pConfig.frameCropTopOffset + pConfig.frameCropBottomOffset) * 2; + + return true; +} + +void AvcDecoderConfigurationRecord::AddNalUnit(const std::vector& nalUnit, + const AvcNalDefs nalUnitType, + const uint8_t arrayCompleteness) +{ + NALs* nals = nullptr; + std::vector tmpNalUnit; + unsigned int pLen; + + for (auto& i : m_nalArray) + { + if (static_cast(nalUnitType) == static_cast(i.nalUnitType)) + { + nals = &i; + ISO_LOG(LOG_INFO, "find nal array existed!\n"); + break; + } + } + + if (nals == nullptr) + { + NALs pNalarray; + pNalarray.arrayCompleteness = arrayCompleteness; + pNalarray.nalUnitType = nalUnitType; + m_nalArray.push_back(pNalarray); + nals = &m_nalArray.back(); + } + + pLen = FindStartCodeLen(nalUnit); + tmpNalUnit.insert(tmpNalUnit.begin(), nalUnit.cbegin() + static_cast(pLen), + nalUnit.cend()); + + nals->nalList.push_back(tmpNalUnit); +} + +void AvcDecoderConfigurationRecord::WriteDecConfigRec(Stream& str) const +{ + str.Write1(m_configVersion, 8); + str.Write1(m_avcProfileIdc, 8); + str.Write1(m_profileCompat, 8); + str.Write1(m_avcLevelIdc, 8); + + str.Write1(0xff, 6); + str.Write1(m_lengthSizeMinus1, 2); + + str.Write1(0xff, 3); + const NALs* nalArray = GetNALs(AvcNalDefs::SPS); + unsigned int cnt = static_cast(nalArray ? nalArray->nalList.size() : 0); + + if (!(cnt < (1 << 6))) + { + ISO_LOG(LOG_ERROR, "count invalid\n"); + return; + } + str.Write1(cnt, 5); + + if (cnt) + { + for (const auto& nal : nalArray->nalList) + { + str.Write1(static_cast(nal.size()), 16); + str.WriteArray(nal, static_cast(nal.size())); + } + } + + // PPS NALS + nalArray = GetNALs(AvcNalDefs::PPS); + cnt = static_cast(nalArray ? nalArray->nalList.size() : 0); + + if (!(cnt < (1 << 9))) + { + ISO_LOG(LOG_ERROR, "count invalid\n"); + return; + } + str.Write1(cnt, 8); + + if (cnt) + { + for (const auto& nal : nalArray->nalList) + { + str.Write1(static_cast(nal.size()), 16); + str.WriteArray(nal, static_cast(nal.size())); + } + } + + if (m_avcProfileIdc == 100 || m_avcProfileIdc == 110 || m_avcProfileIdc == 122 || + m_avcProfileIdc == 144) + { + str.Write1(0xff, 6); + str.Write1(m_chromaFormat, 2); + str.Write1(0xff, 5); + str.Write1(m_bitDepthLuma, 3); + str.Write1(0xff, 5); + str.Write1(m_bitDepthChroma, 3); + + nalArray = GetNALs(AvcNalDefs::SPS_EXT); + cnt = static_cast(nalArray ? nalArray->nalList.size() : 0); + + if (!(cnt < (1 << 9))) + { + ISO_LOG(LOG_ERROR, "count invalid\n"); + return; + } + str.Write1(cnt, 8); + + if (cnt) + { + for (const auto& nal : nalArray->nalList) + { + str.Write1(static_cast(nal.size()), 16); + str.WriteArray(nal, static_cast(nal.size())); + } + } + } +} + +void AvcDecoderConfigurationRecord::ParseConfig(Stream& str) +{ + m_configVersion = static_cast(str.Read1(8)); + m_avcProfileIdc = static_cast(str.Read1(8)); + m_profileCompat = static_cast(str.Read1(8)); + m_avcLevelIdc = static_cast(str.Read1(8)); + + str.Read1(6); // reserved = '111111'b + m_lengthSizeMinus1 = static_cast(str.Read1(2)); + + // SPS NALS + str.Read1(3); // reserved = '111'b + unsigned int cnt = static_cast(str.Read1(5)); + + for (unsigned int nal = 0; nal < cnt; ++nal) + { + unsigned int nalSize = str.Read1(16); + + std::vector nalData; + nalData.clear(); + str.ReadArray(nalData, nalSize); // read parameter set NAL unit + AddNalUnit(nalData, AvcNalDefs::SPS); + } + + // PPS NALS + cnt = static_cast(str.Read1(8)); + + for (unsigned int nal = 0; nal < cnt; ++nal) + { + unsigned int nalSize = str.Read1(16); + + std::vector nalData; + nalData.clear(); + str.ReadArray(nalData, nalSize); // read parameter set NAL unit + AddNalUnit(nalData, AvcNalDefs::PPS); + } + + if (str.GetSize() == str.GetPos()) + { + ISO_LOG(LOG_INFO, "Stop reading if there is no more data\n"); + return; + } + + if (m_avcProfileIdc == 100 || m_avcProfileIdc == 110 || m_avcProfileIdc == 122 || + m_avcProfileIdc == 144) + { + str.Read1(6); // reserved = '111111'b + m_chromaFormat = static_cast(str.Read1(2)); + str.Read1(5); // reserved = '11111'b + m_bitDepthLuma = static_cast(str.Read1(3)); + str.Read1(5); // reserved = '11111'b + m_bitDepthChroma = static_cast(str.Read1(3)); + + // SPS EXT NALS + cnt = static_cast(str.Read1(8)); + + for (unsigned int nal = 0; nal < cnt; ++nal) + { + unsigned int nalSize = str.Read1(16); + + std::vector nalData; + nalData.clear(); + str.ReadArray(nalData, nalSize); // Read parameter set NAL unit. + AddNalUnit(nalData, AvcNalDefs::SPS_EXT); + } + } +} + +const AvcDecoderConfigurationRecord::NALs* +AvcDecoderConfigurationRecord::GetNALs(AvcNalDefs nalUnitType) const +{ + for (const auto& array : m_nalArray) + { + if (array.nalUnitType == nalUnitType) + { + return &array; // Found + } + } + + return nullptr; // Not found +} + +void AvcDecoderConfigurationRecord::GetOneParameterSet(std::vector& byteStream, + const AvcNalDefs nalUnitType) const +{ + const NALs* nalArray = GetNALs(nalUnitType); + + if (nalArray && nalArray->nalList.size() > 0) + { + // Add start code (0x00000001) before the NAL unit. + byteStream.push_back(0); + byteStream.push_back(0); + byteStream.push_back(0); + byteStream.push_back(1); + byteStream.insert(byteStream.end(), nalArray->nalList.at(0).cbegin(), nalArray->nalList.at(0).cend()); + } +} + +void AvcDecoderConfigurationRecord::GetConfigurationMap(ConfigurationMap& aMap) const +{ + std::vector sps; + std::vector pps; + GetOneParameterSet(sps, AvcNalDefs::SPS); + GetOneParameterSet(pps, AvcNalDefs::PPS); + + aMap.clear(); + aMap.insert({DecParam::AVC_SPS, move(sps)}); + aMap.insert({DecParam::AVC_PPS, move(pps)}); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/AvcConfigAtom.h b/src/isolib/atoms/AvcConfigAtom.h new file mode 100644 index 00000000..06e34dfe --- /dev/null +++ b/src/isolib/atoms/AvcConfigAtom.h @@ -0,0 +1,420 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AvcConfigAtom.h +//! \brief: AVC Configuration Atom class +//! \detail: 'avcC' Atom implementation. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _AVCCONFIGURATIONATOM_H_ +#define _AVCCONFIGURATIONATOM_H_ + +#include "Atom.h" +#include "FormAllocator.h" +#include "DecConfigRecord.h" +#include "Stream.h" + +VCD_MP4_BEGIN + +class Stream; + +/** @brief Common enumeration definitions for decoder configuration record */ +enum class AvcNalDefs : std::uint8_t +{ + UNSPECIFIED_0 = 0, + CODED_SLICE_NON_IDR, // 1 + CODED_SLICE_DPAR_A, + CODED_SLICE_DPAR_B, + CODED_SLICE_DPAR_C, + CODED_SLICE_IDR, // 5 + SEI, // 6 + SPS, // 7 + PPS, // 8 + ACCESS_UNIT_DELIMITER, + EOS, // 10 + EOB, // 11 + FILLER_DATA, + SPS_EXT, + PREFIX_NALU, + SUB_SPS, + DPS, + RESERVED_17, + RESERVED_18, + SLICE_AUX_NOPAR, + SLICE_EXT, + SLICE_EXT_3D, + RESERVED_22, + RESERVED_23, + UNSPECIFIED_24, + UNSPECIFIED_25, + UNSPECIFIED_26, + UNSPECIFIED_27, + UNSPECIFIED_28, + UNSPECIFIED_29, + UNSPECIFIED_30, + UNSPECIFIED_31, + INVALID +}; + +class AvcDecoderConfigurationRecord : public DecoderConfigurationRecord +{ +public: + + //! + //! \brief Constructor + //! + AvcDecoderConfigurationRecord(); + + //! + //! \brief Destructor + //! + ~AvcDecoderConfigurationRecord() = default; + + //! + //! \brief Parse configuration information from a SPS NAL unit + //! + //! \param [in] const std::vector& + //! sps value + //! + //! \return bool + //! read success or not + //! + bool ConfigSPS(const std::vector& sps); + + //! + //! \brief Add NAL unit to the NAL unit array + //! + //! \param [in] const std::vector& + //! sps value + //! \param [in] AvcNalDefs + //! nal unit type defs + //! \param [in] std::uint8_t + //! arrayCompleteness + //! + //! \return void + //! + void AddNalUnit(const std::vector& sps, AvcNalDefs nalUnitType, std::uint8_t arrayCompleteness = 0); + + //! + //! \brief Write Decoder Configuration Record + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + void WriteDecConfigRec(Stream& str) const; + + //! + //! \brief Parse Decoder Configuration Record + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + void ParseConfig(Stream& str); + + //! + //! \brief get one parameters set + //! + //! \param [in] std::vector& + //! byte stream + //! \param [in] AvcNalDefs + //! nal unit type defs + //! + //! \return void + //! + void GetOneParameterSet(std::vector& byteStream, AvcNalDefs nalUnitType) const; + + //! + //! \brief Set and Get function for m_picWidth member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_picWidth + //! m_picWidth member in class + //! \param [in] PicWidth + //! m_picWidth name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_picWidth, PicWidth, const); + + //! + //! \brief Set and Get function for m_picHeight member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_picHeight + //! m_picHeight member in class + //! \param [in] PicHeight + //! m_picHeight name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_picHeight, PicHeight, const); + + //! + //! \brief Set and Get function for m_configVersion member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_configVersion + //! m_configVersion member in class + //! \param [in] ConfigurationVersion + //! m_configVersion name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_configVersion, ConfigurationVersion, const); + + //! + //! \brief Set and Get function for m_avcProfileIdc member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_avcProfileIdc + //! m_avcProfileIdc member in class + //! \param [in] AvcProfileIndication + //! m_avcProfileIdc name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_avcProfileIdc, AvcProfileIndication, const); + + //! + //! \brief Set and Get function for m_profileCompat member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_profileCompat + //! m_profileCompat member in class + //! \param [in] ProfileCompatibility + //! m_profileCompat name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_profileCompat, ProfileCompatibility, const); + + //! + //! \brief Set and Get function for m_avcLevelIdc member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_avcLevelIdc + //! m_avcLevelIdc member in class + //! \param [in] AvcLevelIndication + //! m_avcLevelIdc name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_avcLevelIdc, AvcLevelIndication, const); + + //! + //! \brief Set and Get function for m_lengthSizeMinus1 member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_lengthSizeMinus1 + //! m_lengthSizeMinus1 member in class + //! \param [in] LengthSizeMinus1 + //! m_lengthSizeMinus1 name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_lengthSizeMinus1, LengthSizeMinus1, const); + + //! + //! \brief Set and Get function for m_chromaFormat member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_chromaFormat + //! m_chromaFormat member in class + //! \param [in] ChromaFormat + //! m_chromaFormat name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_chromaFormat, ChromaFormat, const); + + //! + //! \brief Set and Get function for m_bitDepthLuma member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_bitDepthLuma + //! m_bitDepthLuma member in class + //! \param [in] BitDepthLumaMinus8 + //! m_bitDepthLuma name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_bitDepthLuma, BitDepthLumaMinus8, const); + + //! + //! \brief Set and Get function for m_bitDepthChroma member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_bitDepthChroma + //! m_bitDepthChroma member in class + //! \param [in] BitDepthChromaMinus8 + //! m_bitDepthChroma name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_bitDepthChroma, BitDepthChromaMinus8, const); + + //! + //! \brief Parse Decoder Configuration Record + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + virtual void GetConfigurationMap(ConfigurationMap& aMap) const override; + +private: + + struct NALs + { + std::uint8_t arrayCompleteness = 0; + AvcNalDefs nalUnitType = AvcNalDefs::INVALID; + std::vector> nalList; + }; + + std::uint8_t m_configVersion; //!< configuration version + std::uint8_t m_avcProfileIdc; //!< avc profile indication + std::uint8_t m_profileCompat; //!< profile compatibility + std::uint8_t m_avcLevelIdc; //!< avc level indication + std::uint8_t m_lengthSizeMinus1; //!< length size - 1 + + std::uint8_t m_chromaFormat; //!< chroma format + std::uint8_t m_bitDepthLuma; //!< bit depth luma + std::uint8_t m_bitDepthChroma; //!< bit depth chroma + + std::uint16_t m_picWidth; //!< picture width + std::uint16_t m_picHeight; //!< picture height + + std::vector m_nalArray; //!< nal unit array + + //! + //! \brief Get Nal array according to nal unit type + //! + //! \param [in] AvcNalDefs + //! nalUnitType + //! + //! \return const NALs* + //! nal array + //! + const NALs* GetNALs(AvcNalDefs nalUnitType) const; +}; + +class AvcConfigurationAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + AvcConfigurationAtom(); + AvcConfigurationAtom(const AvcConfigurationAtom& Atom); + + AvcConfigurationAtom& operator=(const AvcConfigurationAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~AvcConfigurationAtom() = default; + + //! + //! \brief Set and Get function for m_avcConfig member + //! + //! \param [in] const AvcDecoderConfigurationRecord& + //! value to set + //! \param [in] m_avcConfig + //! m_avcConfig member in class + //! \param [in] Configuration + //! m_avcConfig name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const AvcDecoderConfigurationRecord&, m_avcConfig, Configuration, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + AvcDecoderConfigurationRecord m_avcConfig; //!< avc configuration +}; + +VCD_MP4_END; +#endif /* _AVCCONFIGURATIONATOM_H_ */ diff --git a/src/isolib/atoms/AvcParser.cpp b/src/isolib/atoms/AvcParser.cpp new file mode 100644 index 00000000..b064788d --- /dev/null +++ b/src/isolib/atoms/AvcParser.cpp @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AvcParser.cpp +//! \brief: AvcParser class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "AvcParser.h" +#include "Stream.h" + +VCD_MP4_BEGIN + +bool parseHRD(Stream& str, HRDParams& retHdr) +{ + HRDParams params{}; + + params.cpbCntMinus1 = str.ReadExpGolombCode(); // 0 | 5 ue(v) + params.bitRateScale = static_cast(str.Read1(4)); // 0 | 5 u(4) + params.cpbSizeScale = static_cast(str.Read1(4)); // 0 | 5 u(4) + params.bitRateValueMinus1.resize(params.cpbCntMinus1 + 1); + params.cpbSizeValueMinus1.resize(params.cpbCntMinus1 + 1); + params.cbrFlag.resize(params.cpbCntMinus1 + 1); + for (size_t idx = 0; idx <= params.cpbCntMinus1; idx++) + { + params.bitRateValueMinus1[idx] = str.ReadExpGolombCode(); // 0 | 5 ue(v) + params.cpbSizeValueMinus1[idx] = str.ReadExpGolombCode(); // 0 | 5 ue(v) + params.cbrFlag[idx] = static_cast(str.Read1(1)); // 0 | 5 u(1) + } + params.initialCpbRemovalDelayLengthMinus1 = static_cast(str.Read1(5)); // 0 | 5 u(5) + params.cpbRemovalDelayLengthMinus1 = static_cast(str.Read1(5)); // 0 | 5 u(5) + params.dpbOutputDelayLengthMinus1 = static_cast(str.Read1(5)); // 0 | 5 u(5) + params.timeOffsetLength = static_cast(str.Read1(5)); // 0 | 5 u(5) + + retHdr = params; + return true; +} + +bool parseVUI(Stream& str, VUIParams& retVui) +{ + VUIParams params{}; + params.aspectRatioInfoPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.aspectRatioInfoPresentFlag) + { + params.aspectRatioIdc = static_cast(str.Read1(8)); // 0 u(8) + if (params.aspectRatioIdc == 255 /* Extended_SAR */) + { + params.sarWidth = static_cast(str.Read1(16)); // 0 u(16) + params.sarHeight = static_cast(str.Read1(16)); // 0 u(16) + }; + }; + params.overscanInfoPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.overscanInfoPresentFlag) + params.overscanAppropriateFlag = static_cast(str.Read1(1)); // 0 u(1) + params.videoSignalTypePresentFlag = static_cast(str.Read1(1)); + ; // 0 u(1) + if (params.videoSignalTypePresentFlag) + { + params.videoFormat = static_cast(str.Read1(3)); // 0 u(3) + params.videoFullRangeFlag = static_cast(str.Read1(1)); // 0 u(1) + params.colourDescriptionPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.colourDescriptionPresentFlag) + { + params.colourPrimaries = static_cast(str.Read1(8)); // 0 u(8) + params.transferCharacteristics = static_cast(str.Read1(8)); // 0 u(8) + params.matrixCoefficients = static_cast(str.Read1(8)); // 0 u(8) + }; + }; + params.chromaLocInfoPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.chromaLocInfoPresentFlag) + { + params.chromaSampleLocTypeTopField = static_cast(str.ReadExpGolombCode()); // 0 ue(v) + params.chromaSampleLocTypeBottomField = static_cast(str.ReadExpGolombCode()); // 0 ue(v) + }; + params.timingInfoPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.timingInfoPresentFlag) + { + params.numUnitsInTick = str.Read32(); // 0 u(32) + params.timeScale = str.Read32(); // 0 u(32) + params.fixedFrameRateFlag = static_cast(str.Read1(1)); // 0 u(1) + }; + params.nalHrdParametersPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.nalHrdParametersPresentFlag) + { + if (!parseHRD(str, params.nalHrdParameters)) + { + return false; + } + } + params.vclHrdParametersPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.vclHrdParametersPresentFlag) + { + if (!parseHRD(str, params.vclHrdParameters)) + { + return false; + } + } + if (params.vclHrdParametersPresentFlag || params.vclHrdParametersPresentFlag) + { + params.lowDelayHrdFlag = static_cast(str.Read1(1)); // 0 u(1); + } + params.picStructPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + params.bitstreamRestrictionFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.bitstreamRestrictionFlag) + { + params.motionVectorsOverPicBoundariesFlag = static_cast(str.Read1(1)); // 0 u(1) + params.maxBytesPerPicDenom = str.ReadExpGolombCode(); // 0 ue(v) + params.maxBitsPerMbDenom = str.ReadExpGolombCode(); // 0 ue(v) + params.log2MaxMvLengthHorizontal = str.ReadExpGolombCode(); // 0 ue(v) + params.log2MaxMvLengthVertical = str.ReadExpGolombCode(); // 0 ue(v) + params.maxNumReorderFrames = str.ReadExpGolombCode(); // 0 ue(v) + params.maxDecFrameBuffering = str.ReadExpGolombCode(); // 0 ue(v) + } + + retVui = params; + return true; +} + +bool parseSPS(Stream& str, SPSCfgs& retSps) +{ + SPSCfgs params{}; + params.profileIdc = static_cast(str.Read1(8)); // 0 u(8) + params.profileCompatibility = static_cast(str.Read1(8)); // contains a bunch of flags + params.levelIdc = static_cast(str.Read1(8)); // 0 u(8) + params.seqParameterSetId = str.ReadExpGolombCode(); // 0 ue(v) + if (params.profileIdc == 100 || params.profileIdc == 110 || params.profileIdc == 122 || params.profileIdc == 244 || + params.profileIdc == 44 || params.profileIdc == 83 || params.profileIdc == 86 || params.profileIdc == 118 || + params.profileIdc == 128 || params.profileIdc == 138 || params.profileIdc == 139 || params.profileIdc == 134 || + params.profileIdc == 135) + { + params.chromaFormatIdc = str.ReadExpGolombCode(); // 0 ue(v) + if (params.chromaFormatIdc == 3) + { + params.separateColourPlaneFlag = static_cast(str.Read1(1)); // 0 u(1) + } + params.bitDepthChromaMinus8 = str.ReadExpGolombCode(); // 0 ue(v) + params.qpprimeYZeroTransformBypassFlag = static_cast(str.Read1(1)); // 0 u(1) + params.seqScalingMatrixPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.seqScalingMatrixPresentFlag) + { + ISO_LOG(LOG_ERROR, "seq scale matrix is not supported!!\n"); + return false; + } + } + params.log2MaxFrameNumMinus4 = str.ReadExpGolombCode(); // 0 ue(v) + params.picOrderCntType = str.ReadExpGolombCode(); // 0 ue(v) + if (params.picOrderCntType == 0) + { + params.log2MaxPicOrderCntLsbMinus4 = str.ReadExpGolombCode(); // 0 ue(v) + } + else + { + if (params.picOrderCntType == 1) + { + params.deltaPicOrderAlwaysZeroFlag = static_cast(str.Read1(1)); // 0 u(1) + params.offsetForNonRefPic = str.ReadSignedExpGolombCode(); // 0 se(v) + params.offsetForTopToBottomField = str.ReadSignedExpGolombCode(); // 0 se(v) + params.numRefFramesInPicOrderCntCycle = str.ReadExpGolombCode(); // 0 ue(v) + params.offsetForRefFrame.resize(params.numRefFramesInPicOrderCntCycle); + for (size_t i = 0; i < params.numRefFramesInPicOrderCntCycle; i++) + { + params.offsetForRefFrame[i] = str.ReadSignedExpGolombCode(); // 0 se(v) + } + } + } + params.maxNumRefFrames = str.ReadExpGolombCode(); // 0 ue(v) + params.gapsInFrameNumValueAllowedFlag = static_cast(str.Read1(1)); // 0 u(1) + params.picWidthInMbsMinus1 = str.ReadExpGolombCode(); // 0 ue(v) + params.picHeightInMapUnitsMinus1 = str.ReadExpGolombCode(); // 0 ue(v) + params.frameMbsOnlyFlag = static_cast(str.Read1(1)); // 0 u(1) + if (!params.frameMbsOnlyFlag) + { + params.mbAdaptiveFrameFieldFlag = static_cast(str.Read1(1)); // 0 u(1) + } + params.direct8x8InferenceFlag = static_cast(str.Read1(1)); // 0 u(1) + params.frameCroppingFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.frameCroppingFlag) + { + params.frameCropLeftOffset = str.ReadExpGolombCode(); // 0 ue(v) + params.frameCropRightOffset = str.ReadExpGolombCode(); // 0 ue(v) + params.frameCropTopOffset = str.ReadExpGolombCode(); // 0 ue(v) + params.frameCropBottomOffset = str.ReadExpGolombCode(); // 0 ue(v) + } + params.vuiParametersPresentFlag = static_cast(str.Read1(1)); // 0 u(1) + if (params.vuiParametersPresentFlag) + { + ISO_LOG(LOG_ERROR, "the feather doesn't work!!\n"); + } + retSps = params; + return true; +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/AvcParser.h b/src/isolib/atoms/AvcParser.h new file mode 100644 index 00000000..fcea9ccf --- /dev/null +++ b/src/isolib/atoms/AvcParser.h @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AvcParser.h +//! \brief: AvcParser class +//! \detail: AVC parser difinition +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _AVCPARSER_H_ +#define _AVCPARSER_H_ + +#include + +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class Stream; + +struct HRDParams //!< HRD Parameters +{ + uint32_t cpbCntMinus1; + uint8_t bitRateScale; + uint8_t cpbSizeScale; + std::vector bitRateValueMinus1; + std::vector cpbSizeValueMinus1; + std::vector cbrFlag; + uint8_t initialCpbRemovalDelayLengthMinus1; + uint8_t cpbRemovalDelayLengthMinus1; + uint8_t dpbOutputDelayLengthMinus1; + uint8_t timeOffsetLength; +}; + +struct VUIParams //!< VUI Parameters +{ + uint8_t aspectRatioInfoPresentFlag; + uint8_t aspectRatioIdc; + uint16_t sarWidth; + uint16_t sarHeight; + uint8_t overscanInfoPresentFlag; + uint8_t overscanAppropriateFlag; + uint8_t videoSignalTypePresentFlag; + uint8_t videoFormat; + uint8_t videoFullRangeFlag; + uint8_t colourDescriptionPresentFlag; + uint8_t colourPrimaries; + uint8_t transferCharacteristics; + uint8_t matrixCoefficients; + uint8_t chromaLocInfoPresentFlag; + uint32_t chromaSampleLocTypeTopField; + uint32_t chromaSampleLocTypeBottomField; + uint8_t timingInfoPresentFlag; + uint32_t numUnitsInTick; + uint32_t timeScale; + uint8_t fixedFrameRateFlag; + uint8_t nalHrdParametersPresentFlag; + HRDParams nalHrdParameters; + uint8_t vclHrdParametersPresentFlag; + HRDParams vclHrdParameters; + uint8_t lowDelayHrdFlag; + uint8_t picStructPresentFlag; + uint8_t bitstreamRestrictionFlag; + uint8_t motionVectorsOverPicBoundariesFlag; + uint32_t maxBytesPerPicDenom; + uint32_t maxBitsPerMbDenom; + uint32_t log2MaxMvLengthHorizontal; + uint32_t log2MaxMvLengthVertical; + uint32_t maxNumReorderFrames; + uint32_t maxDecFrameBuffering; +}; + +struct SPSCfgs //!< SPS configuration +{ + uint8_t profileIdc; + uint8_t profileCompatibility; + uint8_t levelIdc; + uint32_t seqParameterSetId; + uint32_t chromaFormatIdc; + uint8_t separateColourPlaneFlag; + uint32_t bitDepthLumaMinus8; + uint32_t bitDepthChromaMinus8; + uint8_t qpprimeYZeroTransformBypassFlag; + uint8_t seqScalingMatrixPresentFlag; + uint32_t log2MaxFrameNumMinus4; + uint32_t picOrderCntType; + uint32_t log2MaxPicOrderCntLsbMinus4; + uint8_t deltaPicOrderAlwaysZeroFlag; + int32_t offsetForNonRefPic; + int32_t offsetForTopToBottomField; + uint32_t numRefFramesInPicOrderCntCycle; + std::vector offsetForRefFrame; + uint32_t maxNumRefFrames; + uint8_t gapsInFrameNumValueAllowedFlag; + uint32_t picWidthInMbsMinus1; + uint32_t picHeightInMapUnitsMinus1; + uint8_t frameMbsOnlyFlag; + uint8_t mbAdaptiveFrameFieldFlag; + uint8_t direct8x8InferenceFlag; + uint8_t frameCroppingFlag; + uint32_t frameCropLeftOffset; + uint32_t frameCropRightOffset; + uint32_t frameCropTopOffset; + uint32_t frameCropBottomOffset; + uint8_t vuiParametersPresentFlag; + VUIParams vuiParameters; +}; + +//! +//! \brief Parse HRD Parameters +//! +//! \param [in] Stream& +//! bitstream +//! \param [out] HRDParams& +//! parsed HRD parameters +//! +//! \return bool +//! parse success or not +//! +bool parseHRD(Stream& str, HRDParams& retHdr); + +//! +//! \brief Parse VUI Parameters +//! +//! \param [in] Stream& +//! bitstream +//! \param [out] VUIParams& +//! parsed VUI parameters +//! +//! \return bool +//! parse success or not +//! +bool parseVUI(Stream& str, VUIParams& retVui); + +//! +//! \brief Parse SPS Parameters +//! +//! \param [in] Stream& +//! bitstream +//! \param [out] SPSCfgs& +//! parsed SPS parameters +//! +//! \return bool +//! parse success or not +//! +bool parseSPS(Stream& str, SPSCfgs& retSps); + +VCD_MP4_END; +#endif /* _AVCPARSER_H_ */ diff --git a/src/isolib/atoms/AvcSampEntry.cpp b/src/isolib/atoms/AvcSampEntry.cpp new file mode 100644 index 00000000..b51a0a61 --- /dev/null +++ b/src/isolib/atoms/AvcSampEntry.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AvcSampEntry.cpp +//! \brief: AvcSampEntry class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "AvcSampEntry.h" + +VCD_MP4_BEGIN + +AvcSampleEntry::AvcSampleEntry() + : VisualSampleEntryAtom("avc1", "AVC Coding") + , m_avcConfigurationAtom() + , m_isStereoscopic3DPresent(false) + , m_stereoscopic3DAtom() + , m_isSphericalVideoV2AtomPresent(false) + , m_sphericalVideoV2Atom() +{ +} + +AvcSampleEntry::AvcSampleEntry(const AvcSampleEntry& Atom) + : VisualSampleEntryAtom(Atom) + , m_avcConfigurationAtom(Atom.m_avcConfigurationAtom) + , m_isStereoscopic3DPresent(Atom.m_isStereoscopic3DPresent) + , m_stereoscopic3DAtom(Atom.m_stereoscopic3DAtom) + , m_isSphericalVideoV2AtomPresent(Atom.m_isSphericalVideoV2AtomPresent) + , m_sphericalVideoV2Atom(Atom.m_sphericalVideoV2Atom) +{ +} + +AvcConfigurationAtom& AvcSampleEntry::GetAvcConfigurationAtom() +{ + return m_avcConfigurationAtom; +} + +void AvcSampleEntry::CreateStereoscopic3DAtom() +{ + m_isStereoscopic3DPresent = true; +} + +void AvcSampleEntry::CreateSphericalVideoV2Atom() +{ + m_isSphericalVideoV2AtomPresent = true; +} + +const Stereoscopic3D* AvcSampleEntry::GetStereoscopic3DAtom() const +{ + return (m_isStereoscopic3DPresent ? &m_stereoscopic3DAtom : nullptr); +} + +const SphericalVideoV2Atom* AvcSampleEntry::GetSphericalVideoV2Atom() const +{ + return (m_isSphericalVideoV2AtomPresent ? &m_sphericalVideoV2Atom : nullptr); +} + +void AvcSampleEntry::ToStream(Stream& str) +{ + VisualSampleEntryAtom::ToStream(str); + + m_avcConfigurationAtom.ToStream(str); + + if (m_isStereoscopic3DPresent) + { + m_stereoscopic3DAtom.ToStream(str); + } + + if (m_isSphericalVideoV2AtomPresent) + { + m_sphericalVideoV2Atom.ToStream(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void AvcSampleEntry::FromStream(Stream& str) +{ + VisualSampleEntryAtom::FromStream(str); + + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + + // Handle this Atom based on the type + if (AtomType == "avcC") + { + m_avcConfigurationAtom.FromStream(subStream); + } + else if (AtomType == "st3d") + { + m_stereoscopic3DAtom.FromStream(subStream); + m_isStereoscopic3DPresent = true; + } + else if (AtomType == "sv3d") + { + m_sphericalVideoV2Atom.FromStream(subStream); + m_isSphericalVideoV2AtomPresent = true; + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping unknown Atom of type '%s' inside AvcSampleEntry\n", type); + } + } + + // @todo should have also CleanApertureAtom / PixelAspectRatioAtom reading here +} + +AvcSampleEntry* AvcSampleEntry::Clone() const +{ + return (new AvcSampleEntry(*this)); +} + +const DecoderConfigurationRecord* AvcSampleEntry::GetConfigurationRecord() const +{ + return &m_avcConfigurationAtom.GetConfiguration(); +} + +const Atom* AvcSampleEntry::GetConfigurationAtom() const +{ + return &m_avcConfigurationAtom; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/AvcSampEntry.h b/src/isolib/atoms/AvcSampEntry.h new file mode 100644 index 00000000..58f12bd4 --- /dev/null +++ b/src/isolib/atoms/AvcSampEntry.h @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AvcSampEntry.h +//! \brief: AVC Sample Entry class. +//! \detail: 'avc1' Atom implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _AVCSAMPLEENTRY_H_ +#define _AVCSAMPLEENTRY_H_ + +#include "AvcConfigAtom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "BasicVideoAtom.h" +#include "VisualSampEntryAtom.h" + +VCD_MP4_BEGIN + +class AvcSampleEntry : public VisualSampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + AvcSampleEntry(); + AvcSampleEntry(const AvcSampleEntry& Atom); + + AvcSampleEntry& operator=(const AvcSampleEntry&) = default; + + //! + //! \brief Destructor + //! + virtual ~AvcSampleEntry() = default; + + //! + //! \brief Get AvcConfiguration Atom + //! + //! \return AvcConfigurationAtom& + //! AvcConfiguration Atom + //! + AvcConfigurationAtom& GetAvcConfigurationAtom(); + + //! + //! \brief Create Stereoscopic3D Atom + //! + //! \return void + //! + void CreateStereoscopic3DAtom(); + + //! + //! \brief Create SphericalVideo Atom + //! + //! \return void + //! + void CreateSphericalVideoV2Atom(); + + //! + //! \brief Get Stereoscopic3D Atom + //! + //! \return const Stereoscopic3D* + //! Stereoscopic3D Atom + //! + virtual const Stereoscopic3D* GetStereoscopic3DAtom() const override; + + //! + //! \brief Get SphericalVideo Atom + //! + //! \return const SphericalVideoV2Atom* + //! Stereoscopic3D Atom + //! + virtual const SphericalVideoV2Atom* GetSphericalVideoV2Atom() const override; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) override; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) override; + + //! + //! \brief Get Copy of AvcSampleEntry + //! + //! \return AvcSampleEntry* + //! AvcSampleEntry Atom + //! + virtual AvcSampleEntry* Clone() const override; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: + AvcConfigurationAtom m_avcConfigurationAtom; //!< avc configuration atom + bool m_isStereoscopic3DPresent; //!< is Stereoscopic3D Present + Stereoscopic3D m_stereoscopic3DAtom; //!< stereoscopic3D Atom + bool m_isSphericalVideoV2AtomPresent; //!< is SphericalVideoV2Atom Present + SphericalVideoV2Atom m_sphericalVideoV2Atom; //!< spherical Video V2 Atom +}; + +VCD_MP4_END; +#endif /* _AVCSAMPLEENTRY_H_ */ diff --git a/src/isolib/atoms/BasicAudAtom.cpp b/src/isolib/atoms/BasicAudAtom.cpp new file mode 100644 index 00000000..e69b927e --- /dev/null +++ b/src/isolib/atoms/BasicAudAtom.cpp @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BasicAudAtom.cpp +//! \brief: BasicAudAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "BasicAudAtom.h" + +VCD_MP4_BEGIN + + +SpatialAudioAtom::SpatialAudioAtom() + : Atom("SA3D") + , m_version(0) + , m_type(0) + , m_order(1) + , m_channelOrder(0) + , m_norm(0) + , m_channelMap() +{ +} + +void SpatialAudioAtom::SetChannelMap(const std::vector& channelMap) +{ + m_channelMap.clear(); + m_channelMap = channelMap; +} + +std::vector SpatialAudioAtom::GetChannelMap() const +{ + return m_channelMap; +} + +void SpatialAudioAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + + str.Write8(m_version); + str.Write8(m_type); + str.Write32(m_order); + str.Write8(m_channelOrder); + str.Write8(m_norm); + str.Write32(static_cast(m_channelMap.size())); + for (uint32_t i = 0; i < m_channelMap.size(); i++) + { + str.Write32(m_channelMap.at(i)); + } + + UpdateSize(str); +} + +void SpatialAudioAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + m_version = str.Read8(); + m_type = str.Read8(); + m_order = str.Read32(); + m_channelOrder = str.Read8(); + m_norm = str.Read8(); + uint32_t numberOfChannels = str.Read32(); + m_channelMap.clear(); + for (uint32_t i = 0; i < numberOfChannels; i++) + { + uint32_t value = str.Read32(); + m_channelMap.push_back(value); + } +} + +NonDiegeticAudioAtom::NonDiegeticAudioAtom() + : Atom("SAND") + , m_version(0) +{ +} + +void NonDiegeticAudioAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + + str.Write8(m_version); + + UpdateSize(str); +} + +void NonDiegeticAudioAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + m_version = str.Read8(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/BasicAudAtom.h b/src/isolib/atoms/BasicAudAtom.h new file mode 100644 index 00000000..b0286d7c --- /dev/null +++ b/src/isolib/atoms/BasicAudAtom.h @@ -0,0 +1,242 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BasicAudAtom.h +//! \brief: Basic Audio Atom class. +//! \detail: Definitions for audio atoms. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _SPATIALAUDIOATOM_H_ +#define _SPATIALAUDIOATOM_H_ + +#include +#include +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class SpatialAudioAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + SpatialAudioAtom(); + + //! + //! \brief Destructor + //! + virtual ~SpatialAudioAtom() = default; + + //! + //! \brief Set and Get function for m_version member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_version + //! m_version member in class + //! \param [in] Version + //! m_version name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_version, Version, const); + + //! + //! \brief Set and Get function for m_type member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_type + //! m_type member in class + //! \param [in] AmbisonicType + //! m_type name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_type, AmbisonicType, const); + + //! + //! \brief Set and Get function for m_order member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_order + //! m_order member in class + //! \param [in] AmbisonicOrder + //! m_order name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_order, AmbisonicOrder, const); + + //! + //! \brief Set and Get function for m_channelOrder member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_channelOrder + //! m_channelOrder member in class + //! \param [in] AmbisonicChannelOrdering + //! m_channelOrder name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_channelOrder, AmbisonicChannelOrdering, const); + + //! + //! \brief Set and Get function for m_norm member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_norm + //! m_norm member in class + //! \param [in] AmbisonicNormalization + //! m_norm name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_norm, AmbisonicNormalization, const); + + //! + //! \brief Set Channel Map + //! + //! \param [in] const std::vector& + //! channel Map value + //! + //! \return void + //! + void SetChannelMap(const std::vector& channelMap); + + //! + //! \brief Get Channel Map + //! + //! \return std::vector + //! Channel Map + //! + std::vector GetChannelMap() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint8_t m_version; //!< version + uint8_t m_type; //!< type + uint32_t m_order; //!< order + uint8_t m_channelOrder; //!< channel order + uint8_t m_norm; //!< norm + std::vector m_channelMap; //!< size = number of channels +}; + +class NonDiegeticAudioAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + NonDiegeticAudioAtom(); + + //! + //! \brief Destructor + //! + virtual ~NonDiegeticAudioAtom() = default; + + //! + //! \brief Set and Get function for m_version member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_version + //! m_version member in class + //! \param [in] Version + //! m_version name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_version, Version, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint8_t m_version; //!< version +}; + +VCD_MP4_END; +#endif /* _SPATIALAUDIOATOM_H_ */ diff --git a/src/isolib/atoms/BasicProjAtom.cpp b/src/isolib/atoms/BasicProjAtom.cpp new file mode 100644 index 00000000..133583ba --- /dev/null +++ b/src/isolib/atoms/BasicProjAtom.cpp @@ -0,0 +1,215 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BasicProjAtom.cpp +//! \brief: BasicProjAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include "BasicProjAtom.h" +#include + +VCD_MP4_BEGIN + +ProjectionHeader::ProjectionHeader() + : FullAtom("prhd", 0, 0) + , m_pose{} +{ +} + +void ProjectionHeader::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(static_cast(m_pose.yaw)); + str.Write32(static_cast(m_pose.pitch)); + str.Write32(static_cast(m_pose.roll)); + UpdateSize(str); +} + +void ProjectionHeader::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_pose.yaw = static_cast(str.Read32()); + m_pose.pitch = static_cast(str.Read32()); + m_pose.roll = static_cast(str.Read32()); +} + +ProjectionDataAtom::ProjectionDataAtom(FourCCInt proj_type, uint8_t version, uint32_t flags) + : FullAtom(proj_type, version, flags) +{ +} + +void ProjectionDataAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + UpdateSize(str); +} + +void ProjectionDataAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); +} + +CubemapProjection::CubemapProjection() + : ProjectionDataAtom("cbmp", 0, 0) + , m_layout(0) + , m_padding(0) +{ +} + +void CubemapProjection::ToStream(Stream& str) +{ + ProjectionDataAtom::ToStream(str); + str.Write32(m_layout); + str.Write32(m_padding); + UpdateSize(str); +} + +void CubemapProjection::FromStream(Stream& str) +{ + ProjectionDataAtom::FromStream(str); + m_layout = str.Read32(); + m_padding = str.Read32(); +} + +EquirectangularProjection::EquirectangularProjection() + : ProjectionDataAtom("equi", 0, 0) + , m_projectionBounds{} +{ +} + +void EquirectangularProjection::ToStream(Stream& str) +{ + ProjectionDataAtom::ToStream(str); + str.Write32(m_projectionBounds.top_32FP); + str.Write32(m_projectionBounds.bottom_32FP); + str.Write32(m_projectionBounds.left_32FP); + str.Write32(m_projectionBounds.right_32FP); + UpdateSize(str); +} + +void EquirectangularProjection::FromStream(Stream& str) +{ + ProjectionDataAtom::FromStream(str); + m_projectionBounds.top_32FP = str.Read32(); + m_projectionBounds.bottom_32FP = str.Read32(); + m_projectionBounds.left_32FP = str.Read32(); + m_projectionBounds.right_32FP = str.Read32(); +} + +Projection::Projection() + : Atom("proj") + , m_projectionHeaderAtom() + , m_projectionFormat(ProjectFormat::UNKNOWN) + , m_cubemapProjectionAtom() + , m_equirectangularProjectionAtom() +{ +} + +Projection::ProjectFormat Projection::GetProjectFormat() const +{ + return m_projectionFormat; +} + +const CubemapProjection& Projection::GetCubemapProjectionAtom() const +{ + return m_cubemapProjectionAtom; +} + +void Projection::SetCubemapProjectionAtom(const CubemapProjection& projection) +{ + m_projectionFormat = ProjectFormat::CUBEMAP; + m_cubemapProjectionAtom = projection; +} + +const EquirectangularProjection& Projection::GetEquirectangularProjectionAtom() const +{ + return m_equirectangularProjectionAtom; +} + +void Projection::SetEquirectangularProjectionAtom(const EquirectangularProjection& projection) +{ + m_projectionFormat = ProjectFormat::ERP; + m_equirectangularProjectionAtom = projection; +} + +void Projection::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + m_projectionHeaderAtom.ToStream(str); + if (m_projectionFormat == ProjectFormat::CUBEMAP) + { + m_cubemapProjectionAtom.ToStream(str); + } + else if (m_projectionFormat == ProjectFormat::ERP) + { + m_equirectangularProjectionAtom.ToStream(str); + } + else + { + // MESH PROJECTION NOT SUPPORTED + assert(false); + } + + UpdateSize(str); +} + +void Projection::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + while (str.BytesRemain() >= 8) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + if (AtomType == "prhd") + { + m_projectionHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "cbmp") + { + m_projectionFormat = ProjectFormat::CUBEMAP; + m_cubemapProjectionAtom.FromStream(subBitstr); + } + else if (AtomType == "equi") + { + m_projectionFormat = ProjectFormat::ERP; + m_equirectangularProjectionAtom.FromStream(subBitstr); + } + else if (AtomType == "mshp") + { + m_projectionFormat = ProjectFormat::MESH; + // this projection type Atom parsing not supported so skip. + } + // unknown Atoms are skipped. + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/BasicProjAtom.h b/src/isolib/atoms/BasicProjAtom.h new file mode 100644 index 00000000..f4c41ca0 --- /dev/null +++ b/src/isolib/atoms/BasicProjAtom.h @@ -0,0 +1,384 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BasicProjAtom.h +//! \brief: Basic Projection Atom class +//! \detail: Definition for Projection Atoms. +//! +//! Created on October 14, 2019, 13:39 PM +//! + +#ifndef GOOGLEPROJECTIONATOM_H +#define GOOGLEPROJECTIONATOM_H + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class ProjectionHeader : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ProjectionHeader(); + + //! + //! \brief Destructor + //! + virtual ~ProjectionHeader() = default; + + struct PoseInDegrees_16_16_FP //!< head pose + { + int32_t yaw; + int32_t pitch; + int32_t roll; + }; + + //! + //! \brief Set and Get function for m_pose member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_pose + //! m_pose member in class + //! \param [in] Pose + //! m_pose name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const PoseInDegrees_16_16_FP&, m_pose, Pose, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + PoseInDegrees_16_16_FP m_pose; //!< head Pose values +}; + +class ProjectionDataAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ProjectionDataAtom(FourCCInt proj_type, uint8_t version, uint32_t flags); + + //! + //! \brief Destructor + //! + virtual ~ProjectionDataAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); +}; + +class CubemapProjection : public ProjectionDataAtom +{ +public: + + //! + //! \brief Constructor + //! + CubemapProjection(); + + //! + //! \brief Destructor + //! + virtual ~CubemapProjection() = default; + + //! + //! \brief Set and Get function for m_layout member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_layout + //! m_layout member in class + //! \param [in] Layout + //! m_layout name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_layout, Layout, const); + + //! + //! \brief Set and Get function for m_padding member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_padding + //! m_padding member in class + //! \param [in] m_padding + //! m_padding name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_padding, Padding, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint32_t m_layout; //!< layout + uint32_t m_padding; //!< padding +}; + +class EquirectangularProjection : public ProjectionDataAtom +{ +public: + + //! + //! \brief Constructor + //! + EquirectangularProjection(); + + //! + //! \brief Destructor + //! + virtual ~EquirectangularProjection() = default; + + struct ProjectionBounds //!< projection bounds + { + uint32_t top_32FP; + uint32_t bottom_32FP; + uint32_t left_32FP; + uint32_t right_32FP; + }; + + //! + //! \brief Set and Get function for m_projectionBounds member + //! + //! \param [in] const ProjectionBounds& + //! value to set + //! \param [in] m_projectionBounds + //! m_projectionBounds member in class + //! \param [in] ProjectionBounds + //! m_projectionBounds name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const ProjectionBounds&, m_projectionBounds, ProjectionBounds, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + ProjectionBounds m_projectionBounds; //!< projection bounds +}; + +class Projection : public Atom +{ +public: + + //! + //! \brief Constructor + //! + Projection(); + + //! + //! \brief Destructor + //! + virtual ~Projection() = default; + + enum class ProjectFormat : uint8_t //!< projection format + { + UNKNOWN = 0, + CUBEMAP = 1, + ERP = 2, + MESH = 3 + }; + + //! + //! \brief Set and Get function for m_projectionHeaderAtom member + //! + //! \param [in] const ProjectionHeader& + //! value to set + //! \param [in] m_projectionHeaderAtom + //! m_projectionHeaderAtom member in class + //! \param [in] ProjectionHeaderAtom + //! m_projectionHeaderAtom name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const ProjectionHeader&, m_projectionHeaderAtom, ProjectionHeaderAtom, const); + + //! + //! \brief Get ProjectFormat + //! + //! \return ProjectFormat + //! ProjectFormat + //! + ProjectFormat GetProjectFormat() const; + + //! + //! \brief Get CubemapProjection + //! + //! \return const CubemapProjection& + //! CubemapProjection + //! + const CubemapProjection& GetCubemapProjectionAtom() const; + + //! + //! \brief Set Cubemap Projection Atom + //! + //! \param [in] const CubemapProjection& + //! projection value + //! + //! \return void + //! + void SetCubemapProjectionAtom(const CubemapProjection& projection); + + //! + //! \brief Get Equirectangular Projection atom + //! + //! \return const EquirectangularProjection& + //! Equirectangular Projection atom + //! + const EquirectangularProjection& GetEquirectangularProjectionAtom() const; + + //! + //! \brief Set Equirectangular Projection atom + //! + //! \param [in] const EquirectangularProjection& + //! Equirectangular Projection atom value + //! + //! \return void + //! + void SetEquirectangularProjectionAtom(const EquirectangularProjection& projection); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + ProjectionHeader m_projectionHeaderAtom; //!< projection Header Atom + ProjectFormat m_projectionFormat; //!< projection Format + CubemapProjection m_cubemapProjectionAtom; //!< cubemap Projection Atom + EquirectangularProjection m_equirectangularProjectionAtom; //!< equirectangular Projection Atom +}; + +VCD_MP4_END; +#endif /* GOOGLEPROJECTIONATOM_H */ diff --git a/src/isolib/atoms/BasicVideoAtom.cpp b/src/isolib/atoms/BasicVideoAtom.cpp new file mode 100644 index 00000000..8ce5861d --- /dev/null +++ b/src/isolib/atoms/BasicVideoAtom.cpp @@ -0,0 +1,399 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BasicVideoAtom.cpp +//! \brief: BasicVideoAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include "BasicVideoAtom.h" +#include +#include + +VCD_MP4_BEGIN + +SphericalVideoHeader::SphericalVideoHeader() + : FullAtom("svhd", 0, 0) + , m_metadataSource("MP4 Tool " + std::string(MP4_BUILD_VERSION) + "\0") +{ +} + +void SphericalVideoHeader::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.WriteZeroEndString(m_metadataSource); + UpdateSize(str); +} + +void SphericalVideoHeader::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + str.ReadZeroEndString(m_metadataSource); +} + +SphericalVideoV1Atom::SphericalVideoV1Atom() + : Atom("uuid") + , m_xmlMetadata() + , m_globalMetadata{} +{ + std::vector uuid = SPHERICAL_VIDEOV1_GENERAL_UUID; + SetUserType(uuid); +} + +template +void SphericalVideoV1Atom::ReadTag(const std::string& tag, T& data) +{ + std::string xmltag = "<" + tag + ">"; + size_t offset = m_xmlMetadata.find(xmltag); + if (offset != std::string::npos) + { + size_t end = m_xmlMetadata.find(""); + if (end != std::string::npos) + { + if (end > offset && (end - offset - xmltag.length()) > 0) + { + std::string valueStr = + m_xmlMetadata.substr(offset + xmltag.length(), end - offset - xmltag.length()); + remove_if(valueStr.begin(), valueStr.end(), isspace); + data = static_cast(std::stoi(valueStr.c_str(), nullptr)); + } + } + } + return; +} + +template <> +void SphericalVideoV1Atom::ReadTag(const std::string& tag, bool& data) +{ + std::string xmltag = "<" + tag + ">"; + size_t offset = m_xmlMetadata.find(xmltag); + if (offset != std::string::npos) + { + size_t end = m_xmlMetadata.find(""); + if (end != std::string::npos) + { + if (end > offset && (end - offset - xmltag.length()) > 0) + { + std::string valueStr = + m_xmlMetadata.substr(offset + xmltag.length(), end - offset - xmltag.length()); + if (valueStr.find("true") != std::string::npos || valueStr.find("1") != std::string::npos) + { + data = true; + } + else if (valueStr.find("false") != std::string::npos || valueStr.find("0") != std::string::npos) + { + data = false; + } + else + { + ISO_LOG(LOG_WARNING, "Parsing Error in SphericalVideoV1Atom/%s data %s\n", tag.c_str(), valueStr.c_str()); + } + } + } + } +} + +template +void SphericalVideoV1Atom::WriteTag(Stream& str, const std::string& tag, const T data) +{ + std::string xml = "<" + tag + ">"; + str.WriteString(xml); + xml = std::to_string(data).c_str(); + str.WriteString(xml); + xml = "\n"; + str.WriteString(xml); +} + +template <> +void SphericalVideoV1Atom::WriteTag(Stream& str, const std::string& tag, const bool data) +{ + std::string xml = "<" + tag + ">"; + str.WriteString(xml); + xml = data ? "true" : "false"; + str.WriteString(xml); + xml = "\n"; + str.WriteString(xml); +} + +template <> +void SphericalVideoV1Atom::WriteTag(Stream& str, const std::string& tag, const std::string data) +{ + std::string xml = "<" + tag + ">"; + str.WriteString(xml); + str.WriteString(data); + xml = "\n"; + str.WriteString(xml); +} + +// This is so that character literals can be used as the data argument +template <> +void SphericalVideoV1Atom::WriteTag(Stream& str, const std::string& tag, const char* data) +{ + WriteTag(str, tag, std::string(data)); +} + +template <> +void SphericalVideoV1Atom::ReadTag(const std::string& tag, std::string& data) +{ + std::string xmltag = "<" + tag + ">"; + size_t offset = m_xmlMetadata.find(xmltag); + if (offset != std::string::npos) + { + size_t end = m_xmlMetadata.find(""); + if (end != std::string::npos) + { + if (end > offset && (end - offset - xmltag.length()) > 0) + { + data = m_xmlMetadata.substr(offset + xmltag.length(), end - offset - xmltag.length()); + } + } + } +} + +void SphericalVideoV1Atom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + std::string header = + ""; + str.WriteString(header); + + WriteTag(str, "GSpherical:Spherical", true); // must be true on v1.0 + WriteTag(str, "GSpherical:Stitched", true); // must be true on v1.0 + WriteTag(str, "GSpherical:StitchingSoftware", m_globalMetadata.stitchedSW); + WriteTag(str, "GSpherical:ProjectFormat", "equirectangular"); // must be "equirectangular" on v1.0 + + if (m_globalMetadata.stereoType != StereoTypeV1::UNDEFINED) + { + std::string stereoType; + if (m_globalMetadata.stereoType == StereoTypeV1::STEREO_TOP_BOTTOM) + { + stereoType = "top-bottom"; + } + else if (m_globalMetadata.stereoType == StereoTypeV1::STEREO_LEFT_RIGHT) + { + stereoType = "left-right"; + } + else + { + stereoType = "mono"; + } + WriteTag(str, "GSpherical:StereoMode", stereoType); + } + + if (m_globalMetadata.sourceCount) + { + WriteTag(str, "GSpherical:SourceCount", m_globalMetadata.sourceCount); + } + if (m_globalMetadata.initViewHead) + { + WriteTag(str, "GSpherical:InitialViewHeadingDegrees", m_globalMetadata.initViewHead); + } + if (m_globalMetadata.initViewPitch) + { + WriteTag(str, "GSpherical:InitialViewPitchDegrees", m_globalMetadata.initViewPitch); + } + if (m_globalMetadata.initViewRoll) + { + WriteTag(str, "GSpherical:InitialViewRollDegrees", m_globalMetadata.initViewRoll); + } + if (m_globalMetadata.timestamp) + { + WriteTag(str, "GSpherical:Timestamp", m_globalMetadata.timestamp); + } + if (m_globalMetadata.fullPanoWidth) + { + WriteTag(str, "GSpherical:FullPanoWidthPixels", m_globalMetadata.fullPanoWidth); + } + if (m_globalMetadata.fullPanoHeight) + { + WriteTag(str, "GSpherical:FullPanoHeightPixels", m_globalMetadata.fullPanoHeight); + } + if (m_globalMetadata.croppedAreaImageWidth) + { + WriteTag(str, "GSpherical:CroppedAreaImageWidthPixels", m_globalMetadata.croppedAreaImageWidth); + } + if (m_globalMetadata.croppedAreaImageHeight) + { + WriteTag(str, "GSpherical:CroppedAreaImageHeightPixels", m_globalMetadata.croppedAreaImageHeight); + } + if (m_globalMetadata.croppedAreaLeft) + { + WriteTag(str, "GSpherical:CroppedAreaLeftPixels", m_globalMetadata.croppedAreaLeft); + } + if (m_globalMetadata.croppedAreaTop) + { + WriteTag(str, "GSpherical:CroppedAreaTopPixels", m_globalMetadata.croppedAreaTop); + } + + std::string footer = ""; + str.WriteString(footer); + UpdateSize(str); +} + +void SphericalVideoV1Atom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + str.ReadStringWithLen(m_xmlMetadata, static_cast(str.BytesRemain())); + + std::string tag = "GSpherical:Spherical"; + ReadTag(tag, m_globalMetadata.isSpherical); + + tag = "GSpherical:Stitched"; + ReadTag(tag, m_globalMetadata.isStitched); + + tag = "GSpherical:StitchingSoftware"; + ReadTag(tag, m_globalMetadata.stitchedSW); + + tag = "GSpherical:ProjectFormat"; + std::string projection; + ReadTag(tag, projection); + std::transform(projection.begin(), projection.end(), projection.begin(), ::tolower); + if (projection == "equirectangular") + { + m_globalMetadata.projectionFormat = ProjectFormat::ERP; + } + else + { + ISO_LOG(LOG_WARNING, "Parsing Error in SphericalVideoV1Atom/%s data %s\n", tag.c_str(), projection.c_str()); + } + + tag = "GSpherical:StereoMode"; + std::string stereoType; + ReadTag(tag, stereoType); + std::transform(stereoType.begin(), stereoType.end(), stereoType.begin(), ::tolower); + if (stereoType == "mono") + { + m_globalMetadata.stereoType = StereoTypeV1::MONO_TYPE; + } + else if (stereoType == "top-bottom") + { + m_globalMetadata.stereoType = StereoTypeV1::STEREO_TOP_BOTTOM; + } + else if (stereoType == "left-right") + { + m_globalMetadata.stereoType = StereoTypeV1::STEREO_LEFT_RIGHT; + } + else + { + m_globalMetadata.stereoType = StereoTypeV1::UNDEFINED; + } + + tag = "GSpherical:SourceCount"; + ReadTag(tag, m_globalMetadata.sourceCount); + + tag = "GSpherical:InitialViewHeadingDegrees"; + ReadTag(tag, m_globalMetadata.initViewHead); + + tag = "GSpherical:InitialViewPitchDegrees"; + ReadTag(tag, m_globalMetadata.initViewPitch); + + tag = "GSpherical:InitialViewRollDegrees"; + ReadTag(tag, m_globalMetadata.initViewRoll); + + tag = "GSpherical:Timestamp"; + ReadTag(tag, m_globalMetadata.timestamp); + + tag = "GSpherical:FullPanoWidthPixels"; + ReadTag(tag, m_globalMetadata.fullPanoWidth); + + tag = "GSpherical:FullPanoHeightPixels"; + ReadTag(tag, m_globalMetadata.fullPanoHeight); + + tag = "GSpherical:CroppedAreaImageWidthPixels"; + ReadTag(tag, m_globalMetadata.croppedAreaImageWidth); + + tag = "GSpherical:CroppedAreaImageHeightPixels"; + ReadTag(tag, m_globalMetadata.croppedAreaImageHeight); + + tag = "GSpherical:CroppedAreaLeftPixels"; + ReadTag(tag, m_globalMetadata.croppedAreaLeft); + + tag = "GSpherical:CroppedAreaTopPixels"; + ReadTag(tag, m_globalMetadata.croppedAreaTop); +} + +SphericalVideoV2Atom::SphericalVideoV2Atom() + : Atom("sv3d") + , m_sphericalVideoHeaderAtom() + , m_projectionAtom() +{ +} + +void SphericalVideoV2Atom::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + m_sphericalVideoHeaderAtom.ToStream(str); + m_projectionAtom.ToStream(str); + + UpdateSize(str); +} + +void SphericalVideoV2Atom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + while (str.BytesRemain() >= 8) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + if (AtomType == "svhd") + { + m_sphericalVideoHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "proj") + { + m_projectionAtom.FromStream(subBitstr); + } + // unknown Atoms are skipped. + } +} + +Stereoscopic3D::Stereoscopic3D() + : FullAtom("st3d", 0, 0) + , m_stereoMode(StereoTypeV2::MONO_TYPE) +{ +} + +void Stereoscopic3D::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write8(static_cast(m_stereoMode)); + UpdateSize(str); +} + +void Stereoscopic3D::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_stereoMode = static_cast(str.Read8()); +} + +VCD_MP4_END diff --git a/src/isolib/atoms/BasicVideoAtom.h b/src/isolib/atoms/BasicVideoAtom.h new file mode 100644 index 00000000..dfbd91cb --- /dev/null +++ b/src/isolib/atoms/BasicVideoAtom.h @@ -0,0 +1,358 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BasicVideoAtom.h +//! \brief: Basic Video Atom class +//! \detail: Basic Video atoms definitions. +//! +//! Created on October 14, 2019, 13:39 PM +//! + +#ifndef _GOOGLEVIDEOATOM_H_ +#define _GOOGLEVIDEOATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "BasicProjAtom.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SphericalVideoHeader : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SphericalVideoHeader(); + + //! + //! \brief Destructor + //! + virtual ~SphericalVideoHeader() = default; + + //! + //! \brief Set and Get function for m_metadataSource member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_metadataSource + //! m_metadataSource member in class + //! \param [in] MetadataSource + //! m_metadataSource name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_metadataSource, MetadataSource, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::string m_metadataSource; //!< meta data source +}; + +#define SPHERICAL_VIDEOV1_GENERAL_UUID \ + { \ + 0xff, 0xcc, 0x82, 0x63, 0xf8, 0x55, 0x4a, 0x93, 0x88, 0x14, 0x58, 0x7a, 0x02, 0x52, 0x1f, 0xdd \ + } + +class SphericalVideoV1Atom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + SphericalVideoV1Atom(); + + //! + //! \brief Destructor + //! + virtual ~SphericalVideoV1Atom() = default; + + enum class StereoTypeV1 : uint8_t //!< stereo type + { + UNDEFINED = 0, + MONO_TYPE = 1, + STEREO_TOP_BOTTOM = 2, + STEREO_LEFT_RIGHT = 3 + }; + + enum class ProjectFormat : uint8_t //!< projection format + { + UNKNOWN = 0, + ERP = 1 + }; + + struct GeneralMetaData //!< general meta data + { + bool isSpherical; + bool isStitched; + std::string stitchedSW; + ProjectFormat projectionFormat; + StereoTypeV1 stereoType; + uint32_t sourceCount; + int32_t initViewHead; + int32_t initViewPitch; + int32_t initViewRoll; + uint64_t timestamp; + uint32_t fullPanoWidth; + uint32_t fullPanoHeight; + uint32_t croppedAreaImageWidth; + uint32_t croppedAreaImageHeight; + uint32_t croppedAreaLeft; + uint32_t croppedAreaTop; + }; + + //! + //! \brief Set and Get function for m_globalMetadata member + //! + //! \param [in] const GeneralMetaData& + //! value to set + //! \param [in] m_globalMetadata + //! m_globalMetadata member in class + //! \param [in] GeneralMetaData + //! m_globalMetadata name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const GeneralMetaData&, m_globalMetadata, GeneralMetaData, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + + //! + //! \brief Read Tag + //! + //! \param [in] const std::string& + //! tag + //! \param [out] T& + //! value + //! + //! \return void + //! + template + void ReadTag(const std::string& tag, T& value); + + //! + //! \brief Write Tag + //! + //! \param [in] Stream& str + //! bitstream + //! \param [in] const std::string& + //! tag + //! \param [in] const T + //! value + //! + //! \return void + //! + template + void WriteTag(Stream& str, const std::string& tag, const T value); + +private: + std::string m_xmlMetadata; //!< xml meta data + GeneralMetaData m_globalMetadata; //! global meta data +}; + +class SphericalVideoV2Atom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + SphericalVideoV2Atom(); + + //! + //! \brief Destructor + //! + virtual ~SphericalVideoV2Atom() = default; + + //! + //! \brief Set and Get function for m_sphericalVideoHeaderAtom member + //! + //! \param [in] const SphericalVideoHeader& + //! value to set + //! \param [in] m_sphericalVideoHeaderAtom + //! m_sphericalVideoHeaderAtom member in class + //! \param [in] SphericalVideoHeaderAtom + //! m_sphericalVideoHeaderAtom name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const SphericalVideoHeader&, m_sphericalVideoHeaderAtom, SphericalVideoHeaderAtom, const); + + //! + //! \brief Set and Get function for m_projectionAtom member + //! + //! \param [in] const Projection& + //! value to set + //! \param [in] m_projectionAtom + //! m_projectionAtom member in class + //! \param [in] ProjectionAtom + //! m_projectionAtom name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const Projection&, m_projectionAtom, ProjectionAtom, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + SphericalVideoHeader m_sphericalVideoHeaderAtom; //!< spherical Video Header Atom + Projection m_projectionAtom; //!< projection atom +}; + +class Stereoscopic3D : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + Stereoscopic3D(); + + //! + //! \brief Destructor + //! + virtual ~Stereoscopic3D() = default; + + enum class StereoTypeV2 : uint8_t //!< stereo type + { + MONO_TYPE = 0, + STEREO_TOPBOTTOM = 1, + STEREO_LEFTRIGHT = 2, + STEREO_STEREOCUSTOM = 3 + }; + + //! + //! \brief Set and Get function for m_stereoMode member + //! + //! \param [in] const StereoTypeV2& + //! value to set + //! \param [in] m_stereoMode + //! m_stereoMode member in class + //! \param [in] StereoMode + //! m_stereoMode name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const StereoTypeV2&, m_stereoMode, StereoMode, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + StereoTypeV2 m_stereoMode; //!< stereo mode +}; + +VCD_MP4_END; +#endif /* GOOGLESPHERICALVIDEOV1ATOM_H */ diff --git a/src/isolib/atoms/ChannelLayoutAtom.cpp b/src/isolib/atoms/ChannelLayoutAtom.cpp new file mode 100644 index 00000000..00655511 --- /dev/null +++ b/src/isolib/atoms/ChannelLayoutAtom.cpp @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ChannelLayoutAtom.cpp +//! \brief: ChannelLayoutAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "ChannelLayoutAtom.h" +#include + +VCD_MP4_BEGIN + +#define CHANNEL_STRUCTURED 1u +#define OBJECT_STRUCTURED 2u + +ChannelLayoutAtom::ChannelLayoutAtom() + : FullAtom("chnl", 0, 0) + , m_strStructured(0) + , m_definedLayout(0) + , m_omittedChannelsMap(0) + , m_objectCount(0) + , m_channelNumber(0) + , m_channelLayouts() +{ +} + +ChannelLayoutAtom::ChannelLayoutAtom(const ChannelLayoutAtom& atom) + : FullAtom(atom.GetType(), atom.GetVersion(), atom.GetFlags()) + , m_strStructured(atom.m_strStructured) + , m_definedLayout(atom.m_definedLayout) + , m_omittedChannelsMap(atom.m_omittedChannelsMap) + , m_objectCount(atom.m_objectCount) + , m_channelNumber(atom.m_channelNumber) + , m_channelLayouts(atom.m_channelLayouts) +{ +} + +std::vector ChannelLayoutAtom::GetChannelLayouts() const +{ + return m_channelLayouts; +} + +void ChannelLayoutAtom::AddChannelLayout(ChannelLayout& channelLayout) +{ + m_channelLayouts.push_back(channelLayout); + // set other member variables to mirror fact that m_channelLayouts is present + m_strStructured |= CHANNEL_STRUCTURED; // if channellayouts are present then stream is structured. + m_definedLayout = 0; // if hannellayouts are present then layout is defined. + m_omittedChannelsMap = 0; +} + +std::uint8_t ChannelLayoutAtom::GetStreamStructure() const +{ + return m_strStructured; +} + +std::uint8_t ChannelLayoutAtom::GetDefinedLayout() const +{ + return m_definedLayout; +} + +void ChannelLayoutAtom::SetDefinedLayout(std::uint8_t definedLayout) +{ + m_definedLayout = definedLayout; + // set other member variables to mirror fact that m_definedLayout is present + if (m_definedLayout) + { + m_channelLayouts.clear(); + } + m_strStructured |= CHANNEL_STRUCTURED; +} + +std::uint64_t ChannelLayoutAtom::GetOmittedChannelsMap() const +{ + return m_omittedChannelsMap; +} + +void ChannelLayoutAtom::SetOmittedChannelsMap(std::uint64_t omittedChannelsMap) +{ + m_omittedChannelsMap = omittedChannelsMap; + // set other member variables to mirror fact that m_omittedChannelsMap is present + m_channelLayouts.clear(); + m_strStructured |= CHANNEL_STRUCTURED; +} + +std::uint8_t ChannelLayoutAtom::GetObjectCount() const +{ + return m_objectCount; +} + +void ChannelLayoutAtom::SetObjectCount(std::uint8_t objectCount) +{ + m_objectCount = objectCount; + m_strStructured |= OBJECT_STRUCTURED; +} + +void ChannelLayoutAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write8(m_strStructured); + if (m_strStructured & CHANNEL_STRUCTURED) // channels + { + str.Write8(m_definedLayout); + if (m_definedLayout == 0) + { + if (m_channelLayouts.size() != m_channelNumber) + { + ISO_LOG(LOG_ERROR, "Size doesn't match in ChannelLayoutAtom\n"); + throw Exception(); + } + + for (std::uint16_t i = 0; i < m_channelLayouts.size(); i++) + { + str.Write8(m_channelLayouts.at(i).speakerPosition); + if (m_channelLayouts[i].speakerPosition == 126) + { + str.Write16(static_cast(m_channelLayouts.at(i).azimuthAngle)); + str.Write8(static_cast(m_channelLayouts.at(i).elevationAngle)); + } + } + } + else + { + str.Write64(m_omittedChannelsMap); + } + } + else if (m_strStructured & OBJECT_STRUCTURED) // objects + { + str.Write8(m_objectCount); + } + + UpdateSize(str); +} + +void ChannelLayoutAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_strStructured = str.Read8(); + + if (m_strStructured & CHANNEL_STRUCTURED) // Stream carries channels. + { + m_definedLayout = str.Read8(); + if (m_definedLayout == 0) + { + for (std::uint16_t i = 1; i <= m_channelNumber; i++) + { + ChannelLayout channelPosition; + channelPosition.speakerPosition = str.Read8(); + channelPosition.azimuthAngle = 0; + channelPosition.elevationAngle = 0; + if (channelPosition.speakerPosition == 126) // explicit position + { + channelPosition.azimuthAngle = static_cast(str.Read16()); + channelPosition.elevationAngle = static_cast(str.Read8()); + } + m_channelLayouts.push_back(channelPosition); + } + } + else + { + m_omittedChannelsMap = str.Read64(); + } + } + + if (m_strStructured & OBJECT_STRUCTURED) // Stream carries objects + { + m_objectCount = str.Read8(); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ChannelLayoutAtom.h b/src/isolib/atoms/ChannelLayoutAtom.h new file mode 100644 index 00000000..60bec8be --- /dev/null +++ b/src/isolib/atoms/ChannelLayoutAtom.h @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ChannelLayoutAtom.h +//! \brief: Channel Layout Atom class +//! \detail: 'chnl' Atom implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _CHANNELLAYOUTATOM_H_ +#define _CHANNELLAYOUTATOM_H_ + +#include +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +/** + * @brief Channel Layout Atom class + * @details 'chnl' Atom implementation as specified in the ISOBMFF specification. + */ +class ChannelLayoutAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ChannelLayoutAtom(); + ChannelLayoutAtom(const ChannelLayoutAtom& Atom); + + ChannelLayoutAtom& operator=(const ChannelLayoutAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~ChannelLayoutAtom() = default; + + /// A helper for Getting and setting class data channel layout + struct ChannelLayout + { + std::uint8_t speakerPosition = 127; // undefined / unknown + std::int16_t azimuthAngle = 0; + std::int8_t elevationAngle = 0; + }; + + //! + //! \brief Get ChannelLayouts + //! + //! \return std::vector + //! ChannelLayout array + //! + std::vector GetChannelLayouts() const; + + //! + //! \brief Add ChannelLayout + //! + //! \param [in] ChannelLayout& + //! ChannelLayout value + //! + //! \return void + //! + void AddChannelLayout(ChannelLayout& channelLayout); + + //! + //! \brief Get Stream Structure + //! + //! \return std::uint8_t + //! Stream Structure + //! + std::uint8_t GetStreamStructure() const; + + //! + //! \brief Get Defined Layout + //! + //! \return std::uint8_t + //! Defined Layout + //! + std::uint8_t GetDefinedLayout() const; + + //! + //! \brief Set Defined Layout + //! + //! \param [in] std::uint8_t + //! Defined Layout value + //! + //! \return void + //! + void SetDefinedLayout(std::uint8_t definedLayout); + + //! + //! \brief Get Defined Layout + //! + //! \return std::uint8_t + //! Defined Layout + //! + std::uint64_t GetOmittedChannelsMap() const; + + //! + //! \brief Set Omitted Channels Map + //! + //! \param [in] std::uint64_t + //! omitted Channels Map + //! + //! \return void + //! + void SetOmittedChannelsMap(std::uint64_t omittedChannelsMap); + + //! + //! \brief Get Object Count + //! + //! \return std::uint8_t + //! Object Count + //! + std::uint8_t GetObjectCount() const; + + //! + //! \brief Set Object Count + //! + //! \param [in] std::uint8_t + //! Object Count + //! + //! \return void + //! + void SetObjectCount(std::uint8_t objectCount); + + //! + //! \brief Set and Get function for m_channelNumber member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_channelNumber + //! m_channelNumber member in class + //! \param [in] ChannelNumber + //! m_channelNumber name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_channelNumber, ChannelNumber, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint8_t m_strStructured; //!< string structured + std::uint8_t m_definedLayout; //!< defined Layout + std::uint64_t m_omittedChannelsMap; //!< omitted Channels Map + std::uint8_t m_objectCount; //!< object Count + std::uint16_t m_channelNumber; //!< channel Number + std::vector m_channelLayouts; //!< channel Layouts +}; + +VCD_MP4_END; +#endif /* _CHANNELLAYOUTATOM_H_ */ diff --git a/src/isolib/atoms/ChunkOffsetAtom.cpp b/src/isolib/atoms/ChunkOffsetAtom.cpp new file mode 100644 index 00000000..4954e23a --- /dev/null +++ b/src/isolib/atoms/ChunkOffsetAtom.cpp @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ChunkOffsetAtom.cpp +//! \brief: ChunkOffsetAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "ChunkOffsetAtom.h" +#include +#include +#include "Stream.h" + +VCD_MP4_BEGIN + +ChunkOffsetAtom::ChunkOffsetAtom() + : FullAtom("stco", 0, 0) + , m_chunkOffsets() +{ +} + +void ChunkOffsetAtom::SetChunkOffsets(const std::vector& chunkOffsets) +{ + m_chunkOffsets = chunkOffsets; + if (*std::max_element(m_chunkOffsets.cbegin(), m_chunkOffsets.cend()) > std::numeric_limits::max()) + { + SetType("co64"); + } + else + { + SetType("stco"); + } +} + +std::vector ChunkOffsetAtom::GetChunkOffsets() +{ + return m_chunkOffsets; +} + +const std::vector ChunkOffsetAtom::GetChunkOffsets() const +{ + return m_chunkOffsets; +} + +void ChunkOffsetAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + + str.Write32(static_cast(m_chunkOffsets.size())); + if (GetType() == "stco") + { + for (uint32_t i = 0; i < m_chunkOffsets.size(); ++i) + { + str.Write32(static_cast(m_chunkOffsets.at(i))); + } + } + else + { + // This is a ChunkLargeOffsetAtom 'co64' with unsigned int (64) chunk_offsets. + for (uint32_t i = 0; i < m_chunkOffsets.size(); ++i) + { + str.Write64(m_chunkOffsets.at(i)); + } + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void ChunkOffsetAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + + const std::uint32_t entryCount = str.Read32(); + if (GetType() == "stco") + { + for (uint32_t i = 0; i < entryCount; ++i) + { + m_chunkOffsets.push_back(str.Read32()); + } + } + else // This is a ChunkLargeOffsetAtom 'co64' with unsigned int (64) chunk_offsets. + { + for (uint32_t i = 0; i < entryCount; ++i) + { + m_chunkOffsets.push_back(str.Read64()); + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ChunkOffsetAtom.h b/src/isolib/atoms/ChunkOffsetAtom.h new file mode 100644 index 00000000..26d85732 --- /dev/null +++ b/src/isolib/atoms/ChunkOffsetAtom.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ChunkOffsetAtom.h +//! \brief: Chunk Offset Atom and Large Chunk Offset Atom class +//! \detail: 'stco' and 'co64' Atom implementation. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _CHUNKOFFSETATOM_H_ +#define _CHUNKOFFSETATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class ChunkOffsetAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ChunkOffsetAtom(); + + //! + //! \brief Destructor + //! + virtual ~ChunkOffsetAtom() = default; + + //! + //! \brief Set Chunk Offsets + //! + //! \param [in] const std::vector& + //! Chunk Offsets + //! + //! \return void + //! + void SetChunkOffsets(const std::vector& chunkOffsets); + + //! + //! \brief Get Chunk Offsets + //! + //! \return std::vector + //! Chunk Offsets + //! + std::vector GetChunkOffsets(); + const std::vector GetChunkOffsets() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector m_chunkOffsets; //!< chunk Offsets +}; + +VCD_MP4_END; +#endif /* _CHUNKOFFSETATOM_H_ */ diff --git a/src/isolib/atoms/CleanApertureAtom.cpp b/src/isolib/atoms/CleanApertureAtom.cpp new file mode 100644 index 00000000..8f2c1868 --- /dev/null +++ b/src/isolib/atoms/CleanApertureAtom.cpp @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: CleanApertureAtom.cpp +//! \brief: CleanApertureAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "CleanApertureAtom.h" +#include "Stream.h" + +VCD_MP4_BEGIN + +CleanApertureAtom::CleanApertureAtom() + : Atom("clap") + , m_width() + , m_height() + , m_horizOffset() + , m_vertOffset() +{ +} + +void CleanApertureAtom::ToStream(Stream& output) +{ + WriteAtomHeader(output); + output.Write32(m_width.numerator); + output.Write32(m_width.denominator); + output.Write32(m_height.numerator); + output.Write32(m_height.denominator); + output.Write32(m_horizOffset.numerator); + output.Write32(m_horizOffset.denominator); + output.Write32(m_vertOffset.numerator); + output.Write32(m_vertOffset.denominator); + UpdateSize(output); +} + +void CleanApertureAtom::FromStream(Stream& input) +{ + ParseAtomHeader(input); + m_width.numerator = input.Read32(); + m_width.denominator = input.Read32(); + m_height.numerator = input.Read32(); + m_height.denominator = input.Read32(); + m_horizOffset.numerator = input.Read32(); + m_horizOffset.denominator = input.Read32(); + m_vertOffset.numerator = input.Read32(); + m_vertOffset.denominator = input.Read32(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/CleanApertureAtom.h b/src/isolib/atoms/CleanApertureAtom.h new file mode 100644 index 00000000..70449b4c --- /dev/null +++ b/src/isolib/atoms/CleanApertureAtom.h @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: CleanApertureAtom.h +//! \brief: Implementation of CleanApertureAtom +//! \detail: CleanAperture Atom is an item property +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _CLEANAPERTURE_H_ +#define _CLEANAPERTURE_H_ + +#include +#include "Atom.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class CleanApertureAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + CleanApertureAtom(); + + //! + //! \brief Destructor + //! + virtual ~CleanApertureAtom() = default; + + struct Rational // +#include "../include/Common.h" -CubeMapMesh::~CubeMapMesh() -{ - if (m_vertices != NULL) - { - delete m_vertices; - m_vertices = NULL; - } - if (m_texCoords != NULL) - { - delete m_texCoords; - m_texCoords = NULL; - } - if (m_indices != NULL) - { - delete m_indices; - m_indices = NULL; - } -} +VCD_MP4_BEGIN -RenderStatus CubeMapMesh::Create() +enum class ViewMode : std::uint8_t //!< view mode { - return RENDER_STATUS_OK; -} + MONO_TYPE = 0, + LEFT = 1, + RIGHT = 2, + LEFT_AND_RIGHT = 3, + INVALID = 0xff +}; -RenderStatus CubeMapMesh::Destroy() -{ - return RENDER_STATUS_OK; -} -RenderStatus CubeMapMesh::Bind(RenderBackend* renderBackend, uint32_t vertexAttrib, uint32_t texCoordAttrib) +struct SphereRegion //!< sphere region definition { - return RENDER_STATUS_OK; -} + std::int32_t centreAzimuth; + std::int32_t centreElevation; + std::int32_t centreTilt; + std::uint32_t azimuthRange; // not always used + std::uint32_t elevationRange; // not always used + bool interpolate; +}; -VCD_NS_END \ No newline at end of file +VCD_MP4_END; +#endif /* _COMMONTYPES_H_ */ diff --git a/src/isolib/atoms/CompOffsetAtom.cpp b/src/isolib/atoms/CompOffsetAtom.cpp new file mode 100644 index 00000000..4a45d4cc --- /dev/null +++ b/src/isolib/atoms/CompOffsetAtom.cpp @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: CompOffsetAtom.cpp +//! \brief: CompOffsetAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "CompOffsetAtom.h" +#include +#include "Stream.h" + +#include + +VCD_MP4_BEGIN + +CompositionOffsetAtom::CompositionOffsetAtom() + : FullAtom("ctts", 0, 0) +{ +} + +void CompositionOffsetAtom::AddEntryVersion0(const EntryVersion0& entry) +{ + if (m_entryVersion1.size() != 0 || GetVersion() != 0) + { + ISO_LOG(LOG_ERROR, "Invalid attempt to add version0 CompositionOffsetAtom entries.""Invalid attempt to add version0 CompositionOffsetAtom entries.\n"); + throw Exception(); + } + m_entryVersion0.push_back(entry); +} + +void CompositionOffsetAtom::AddEntryVersion1(const EntryVersion1& entry) +{ + if (m_entryVersion0.size() != 0 || GetVersion() != 1) + { + ISO_LOG(LOG_ERROR, "Invalid attempt to add version1 CompositionOffsetAtom entries.""Invalid attempt to add version0 CompositionOffsetAtom entries.\n"); + throw Exception(); + } + m_entryVersion1.push_back(entry); +} + +uint32_t CompositionOffsetAtom::GetSampleNum() +{ + uint64_t sampleNum = 0; + if (GetVersion() == 0) + { + for (const auto& entry : m_entryVersion0) + { + sampleNum += static_cast(entry.m_sampleNum); + if (sampleNum > std::numeric_limits::max()) + { + ISO_LOG(LOG_ERROR, "CompositionOffsetAtom::GetSampleNum >= 2^32\n"); + throw Exception(); + } + } + } + else if (GetVersion() == 1) + { + for (const auto& entry : m_entryVersion1) + { + sampleNum += static_cast(entry.m_sampleNum); + if (sampleNum > std::numeric_limits::max()) + { + ISO_LOG(LOG_ERROR, "CompositionOffsetAtom::GetSampleNum >= 2^32\n"); + throw Exception(); + } + } + } + return static_cast(sampleNum); +} + +std::vector CompositionOffsetAtom::GetSampleCompositionOffsets() const +{ + std::vector offsets; + if (GetVersion() == 0) + { + for (const auto& entry : m_entryVersion0) + { + for (unsigned int i = 0; i < entry.m_sampleNum; ++i) + { + offsets.push_back(static_cast(entry.m_sampleOffset)); + } + } + } + else if (GetVersion() == 1) + { + for (const auto& entry : m_entryVersion1) + { + for (unsigned int i = 0; i < entry.m_sampleNum; ++i) + { + offsets.push_back(entry.m_sampleOffset); + } + } + } + + return offsets; +} + +void CompositionOffsetAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + + if (m_entryVersion0.empty() == false) + { + str.Write32(static_cast(m_entryVersion0.size())); + for (const auto& entry : m_entryVersion0) + { + str.Write32(entry.m_sampleNum); + str.Write32(entry.m_sampleOffset); + } + } + else if (m_entryVersion1.empty() == false) + { + str.Write32(static_cast(m_entryVersion1.size())); + for (const auto& entry : m_entryVersion1) + { + str.Write32(entry.m_sampleNum); + str.Write32(static_cast(entry.m_sampleOffset)); + } + } + else + { + ISO_LOG(LOG_ERROR, "Can not write an empty CompositionOffsetAtom.\n"); + throw Exception(); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void CompositionOffsetAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + + const std::uint32_t entryCount = str.Read32(); + + if (GetVersion() == 0) + { + for (uint32_t i = 0; i < entryCount; ++i) + { + EntryVersion0 entryVersion0; + entryVersion0.m_sampleNum = str.Read32(); + entryVersion0.m_sampleOffset = str.Read32(); + m_entryVersion0.push_back(entryVersion0); + } + } + else if (GetVersion() == 1) + { + for (uint32_t i = 0; i < entryCount; ++i) + { + EntryVersion1 entryVersion1; + entryVersion1.m_sampleNum = str.Read32(); + entryVersion1.m_sampleOffset = static_cast(str.Read32()); + m_entryVersion1.push_back(entryVersion1); + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/CompOffsetAtom.h b/src/isolib/atoms/CompOffsetAtom.h new file mode 100644 index 00000000..db7451fe --- /dev/null +++ b/src/isolib/atoms/CompOffsetAtom.h @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: CompOffsetAtom.h +//! \brief: Composition Time to Sample Atom class +//! \detail: 'sttc' Atom implementation. +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef COMPOSITIONOFFSETATOM_H +#define COMPOSITIONOFFSETATOM_H + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class CompositionOffsetAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + CompositionOffsetAtom(); + + //! + //! \brief Destructor + //! + virtual ~CompositionOffsetAtom() = default; + + struct EntryVersion0 //!< entry version v0 + { + std::uint32_t m_sampleNum; + std::uint32_t m_sampleOffset; + }; + + struct EntryVersion1 //!< entry version v1 + { + std::uint32_t m_sampleNum; + std::int32_t m_sampleOffset; + }; + + //! + //! \brief Add Entry Version v0 + //! + //! \param [in] const EntryVersion0& + //! entry + //! + //! \return void + //! + void AddEntryVersion0(const EntryVersion0& entry); + + //! + //! \brief Add Entry Version v1 + //! + //! \param [in] const EntryVersion1& + //! entry + //! + //! \return void + //! + void AddEntryVersion1(const EntryVersion1& entry); + + //! + //! \brief Get Sample Number + //! + //! \return uint32_t + //! Sample Number + //! + uint32_t GetSampleNum(); + + //! + //! \brief Get Sample Composition Offsets + //! + //! \return std::vector + //! Sample Composition Offsets + //! + std::vector GetSampleCompositionOffsets() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + std::vector m_entryVersion0; //!< entry version v0 + std::vector m_entryVersion1; //!< entry version v1 +}; + +VCD_MP4_END; +#endif /* COMPOSITIONOFFSETATOM_H */ diff --git a/src/isolib/atoms/CompToDecAtom.cpp b/src/isolib/atoms/CompToDecAtom.cpp new file mode 100644 index 00000000..db569c58 --- /dev/null +++ b/src/isolib/atoms/CompToDecAtom.cpp @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: CompToDecodeAtom.cpp +//! \brief: CompToDecodeAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "CompToDecAtom.h" +#include +#include "Stream.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +CompositionToDecodeAtom::CompositionToDecodeAtom() + : FullAtom("cslg", 0, 0) + , m_dtsShift(0) + , m_leastDisplayDelta(0) + , m_greatestDisplayDelta(0) + , m_startTime(0) + , m_endTime(0) +{ +} + +bool static requires64Bits(const int64_t value) +{ + if ((value > std::numeric_limits::max()) || (value < std::numeric_limits::min())) + { + return true; + } + return false; +} + + +void CompositionToDecodeAtom::SetDtsShift(const std::int64_t compositionToDtsShift) +{ + m_dtsShift = compositionToDtsShift; + UpdateVersion(); +} + +std::int64_t CompositionToDecodeAtom::GetDtsShift() const +{ + return m_dtsShift; +} + +void CompositionToDecodeAtom::SetLeastDisplayDelta(const std::int64_t leastDecodeToDisplayDelta) +{ + m_leastDisplayDelta = leastDecodeToDisplayDelta; + UpdateVersion(); +} + +std::int64_t CompositionToDecodeAtom::GetLeastDisplayDelta() const +{ + return m_leastDisplayDelta; +} + +void CompositionToDecodeAtom::SetGreatestDisplayDelta(const std::int64_t greatestDecodeToDisplayDelta) +{ + m_greatestDisplayDelta = greatestDecodeToDisplayDelta; + UpdateVersion(); +} + +std::int64_t CompositionToDecodeAtom::GetGreatestDisplayDelta() const +{ + return m_greatestDisplayDelta; +} + +void CompositionToDecodeAtom::SetStartTime(const std::int64_t compositionStartTime) +{ + m_startTime = compositionStartTime; + UpdateVersion(); +} + +std::int64_t CompositionToDecodeAtom::GetStartTime() const +{ + return m_startTime; +} + +void CompositionToDecodeAtom::SetEndTime(const std::int64_t compositionEndTime) +{ + m_endTime = compositionEndTime; + UpdateVersion(); +} + +std::int64_t CompositionToDecodeAtom::GetEndTime() const +{ + return m_endTime; +} + +void CompositionToDecodeAtom::UpdateVersion() +{ + if (requires64Bits(m_dtsShift) || requires64Bits(m_leastDisplayDelta) || + requires64Bits(m_greatestDisplayDelta) || requires64Bits(m_startTime) || + requires64Bits(m_endTime)) + { + SetVersion(1); + } + else + { + SetVersion(0); + } +} + +void CompositionToDecodeAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + + if (GetVersion() == 0) + { + str.Write32(static_cast(m_dtsShift)); + str.Write32(static_cast(m_leastDisplayDelta)); + str.Write32(static_cast(m_greatestDisplayDelta)); + str.Write32(static_cast(m_startTime)); + str.Write32(static_cast(m_endTime)); + } + else + { + str.Write64(static_cast(m_dtsShift)); + str.Write64(static_cast(m_leastDisplayDelta)); + str.Write64(static_cast(m_greatestDisplayDelta)); + str.Write64(static_cast(m_startTime)); + str.Write64(static_cast(m_endTime)); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void CompositionToDecodeAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + + if (GetVersion() == 0) + { + m_dtsShift = static_cast(str.Read32()); + m_leastDisplayDelta = static_cast(str.Read32()); + m_greatestDisplayDelta = static_cast(str.Read32()); + m_startTime = static_cast(str.Read32()); + m_endTime = static_cast(str.Read32()); + } + else + { + m_dtsShift = static_cast(str.Read64()); + m_leastDisplayDelta = static_cast(str.Read64()); + m_greatestDisplayDelta = static_cast(str.Read64()); + m_startTime = static_cast(str.Read64()); + m_endTime = static_cast(str.Read64()); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/CompToDecAtom.h b/src/isolib/atoms/CompToDecAtom.h new file mode 100644 index 00000000..eee98cb9 --- /dev/null +++ b/src/isolib/atoms/CompToDecAtom.h @@ -0,0 +1,184 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: CompToDecodeAtom.h +//! \brief: Composition To Decode Atom class +//! \detail: 'cslg' Atom implementation. +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef COMPOSITIONTODECODEATOM_H +#define COMPOSITIONTODECODEATOM_H + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class CompositionToDecodeAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + CompositionToDecodeAtom(); + + //! + //! \brief Destructor + //! + virtual ~CompositionToDecodeAtom() = default; + + //! + //! \brief Set compositionTosDtsShift + //! + //! \param [in] std::int64_t + //! DtsShift + //! + //! \return void + //! + void SetDtsShift(std::int64_t compositionTosDtsShift); + + //! + //! \brief Get Dts Shift + //! + //! \return std::int64_t + //! Dts Shift + //! + std::int64_t GetDtsShift() const; + + //! + //! \brief Set Least Display Delta + //! + //! \param [in] std::int64_t + //! least Decode To Display Delta + //! + //! \return void + //! + void SetLeastDisplayDelta(std::int64_t leastDecodeToDisplayDelta); + + //! + //! \brief Get Least Display Delta + //! + //! \return std::int64_t + //! Least Display Delta + //! + std::int64_t GetLeastDisplayDelta() const; + + //! + //! \brief Set Greatest Display Delta + //! + //! \param [in] std::int64_t + //! greatest Decode To Display Delta + //! + //! \return void + //! + void SetGreatestDisplayDelta(std::int64_t greatestDecodeToDisplayDelta); + + //! + //! \brief Get Greatest Display Delta + //! + //! \return std::int64_t + //! greatest Decode To Display Delta + //! + std::int64_t GetGreatestDisplayDelta() const; + + //! + //! \brief Set Start Time + //! + //! \param [in] std::int64_t + //! Start Time + //! + //! \return void + //! + void SetStartTime(std::int64_t compositionStartTime); + + //! + //! \brief Get Start Time + //! + //! \return std::int64_t + //! Start Time + //! + std::int64_t GetStartTime() const; + + //! + //! \brief Set End Time + //! + //! \param [in] std::int64_t + //! End Time + //! + //! \return void + //! + void SetEndTime(std::int64_t compositionEndTime); + + //! + //! \brief Get End Time + //! + //! \return std::int64_t + //! End Time + //! + std::int64_t GetEndTime() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + + //! + //! \brief Update Version + //! + //! \return void + //! + void UpdateVersion(); + + std::int64_t m_dtsShift; //!< dts Shift + std::int64_t m_leastDisplayDelta; //!< least Display Delta + std::int64_t m_greatestDisplayDelta; //!< greatest Display Delta + std::int64_t m_startTime; //!< start Time + std::int64_t m_endTime; //!< end Time +}; + +VCD_MP4_END; +#endif /* COMPOSITIONTODECODEATOM_H */ diff --git a/src/isolib/atoms/DataInfoAtom.cpp b/src/isolib/atoms/DataInfoAtom.cpp new file mode 100644 index 00000000..9086eab0 --- /dev/null +++ b/src/isolib/atoms/DataInfoAtom.cpp @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DataInfoAtom.cpp +//! \brief: DataInfoAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "DataInfoAtom.h" + +#include + +VCD_MP4_BEGIN + +DataInformationAtom::DataInformationAtom() + : Atom("dinf") + , m_dataReferenceAtom() +{ +} + +std::uint16_t DataInformationAtom::AddDataEntryAtom(std::shared_ptr dataEntryAtom) +{ + return static_cast(m_dataReferenceAtom.AddEntry(dataEntryAtom)); +} + +void DataInformationAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + m_dataReferenceAtom.ToStream(str); + UpdateSize(str); +} + +void DataInformationAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + if (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + m_dataReferenceAtom.FromStream(subBitstr); + } + else + { + ISO_LOG(LOG_ERROR, "Read an empty dinf Atom.\n"); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/DataInfoAtom.h b/src/isolib/atoms/DataInfoAtom.h new file mode 100644 index 00000000..f3e3f290 --- /dev/null +++ b/src/isolib/atoms/DataInfoAtom.h @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DataInfoAtom.h +//! \brief: Data Information Atom class +//! \detail: 'dinf' Atom implementation. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _DATAINFORMATIONATOM_H_ +#define _DATAINFORMATIONATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "DataRefAtom.h" + +VCD_MP4_BEGIN + +class DataInformationAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + DataInformationAtom(); + + //! + //! \brief Destructor + //! + virtual ~DataInformationAtom() = default; + + //! + //! \brief Add Data Entry Atom + //! + //! \param [in] std::shared_ptr + //! entry + //! + //! \return std::uint16_t + //! data entry size + //! + std::uint16_t AddDataEntryAtom(std::shared_ptr dataEntryAtom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + DataReferenceAtom m_dataReferenceAtom; //!< Data Reference Atom +}; + +VCD_MP4_END; +#endif /* _DATAINFORMATIONATOM_H_ */ diff --git a/src/isolib/atoms/DataRefAtom.cpp b/src/isolib/atoms/DataRefAtom.cpp new file mode 100644 index 00000000..13c07670 --- /dev/null +++ b/src/isolib/atoms/DataRefAtom.cpp @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DataRefAtom.cpp +//! \brief: DataRefAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "DataRefAtom.h" + + +#include + +VCD_MP4_BEGIN + +DataEntryAtom::DataEntryAtom(FourCCInt AtomType, const std::uint8_t version, const std::uint32_t flags) + : FullAtom(AtomType, version, flags) + , m_location() +{ +} + +void DataEntryAtom::SetLocation(const std::string& location) +{ + m_location = location; +} + +const std::string DataEntryAtom::GetLocation() const +{ + return m_location; +} + +DataEntryUrlAtom::DataEntryUrlAtom(IsContained isContained) + : DataEntryAtom("url ", 0, isContained == NotContained ? 0 : 1) +{ +} + +void DataEntryUrlAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + if (!(GetFlags() & 1)) + { + str.WriteZeroEndString(GetLocation()); + } + + UpdateSize(str); +} + +void DataEntryUrlAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + if (!(GetFlags() & 1)) + { + std::string location; + str.ReadZeroEndString(location); + SetLocation(location); + } +} + +DataEntryUrnAtom::DataEntryUrnAtom() + : DataEntryAtom("urn ", 0, 0) + , m_name() +{ +} + +void DataEntryUrnAtom::SetName(const std::string& name) +{ + m_name = name; +} + +const std::string DataEntryUrnAtom::GetName() const +{ + return m_name; +} + +void DataEntryUrnAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.WriteZeroEndString(m_name); + str.WriteZeroEndString(GetLocation()); + UpdateSize(str); +} + +void DataEntryUrnAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + str.ReadZeroEndString(m_name); + std::string location; + str.ReadZeroEndString(location); + SetLocation(location); +} + +DataReferenceAtom::DataReferenceAtom() + : FullAtom("dref", 0, 0) + , m_dataEntries() +{ +} + +unsigned int DataReferenceAtom::AddEntry(std::shared_ptr dataEntryAtom) +{ + m_dataEntries.push_back(dataEntryAtom); + unsigned int ret = static_cast(m_dataEntries.size()); + return ret; +} + +void DataReferenceAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(static_cast(m_dataEntries.size())); + for (auto& entry : m_dataEntries) + { + entry->ToStream(str); + } + + UpdateSize(str); +} + +void DataReferenceAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + const unsigned int entryCount = str.Read32(); + for (unsigned int i = 0; i < entryCount; ++i) + { + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + + std::shared_ptr dataEntryAtom; + if (AtomType == "urn ") + { + dataEntryAtom = MakeShared(); + if (!dataEntryAtom) + { + ISO_LOG(LOG_ERROR, "NULL pointer !\n"); + throw Exception(); + } + dataEntryAtom->FromStream(subStream); + } + else if (AtomType == "url ") + { + dataEntryAtom = MakeShared(); + if (!dataEntryAtom) + { + ISO_LOG(LOG_ERROR, "NULL pointer !\n"); + throw Exception(); + } + dataEntryAtom->FromStream(subStream); + } + else + { + ISO_LOG(LOG_ERROR, "An unknown Atom inside dref\n"); + throw Exception(); + } + m_dataEntries.push_back(dataEntryAtom); + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/DataRefAtom.h b/src/isolib/atoms/DataRefAtom.h new file mode 100644 index 00000000..6337f35b --- /dev/null +++ b/src/isolib/atoms/DataRefAtom.h @@ -0,0 +1,248 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DataRefAtom.h +//! \brief: Data Entry Atom class. +//! \detail: Used as data reference basic atom. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _DATAREFERENCEATOM_H_ +#define _DATAREFERENCEATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include + +VCD_MP4_BEGIN + +class DataEntryAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + DataEntryAtom(FourCCInt AtomType, std::uint8_t version, std::uint32_t flags); + + //! + //! \brief Destructor + //! + virtual ~DataEntryAtom() = default; + + //! + //! \brief Set Location + //! + //! \param [in] const std::string& + //! Location value + //! + //! \return void + //! + void SetLocation(const std::string& location); + + //! + //! \brief Get Location + //! + //! \return const std::string + //! Location + //! + const std::string GetLocation() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) = 0; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) = 0; + +private: + std::string m_location; //!< data location +}; + +class DataEntryUrlAtom : public DataEntryAtom +{ +public: + enum IsContained + { + NotContained, + Contained + }; + + //! + //! \brief Constructor + //! + DataEntryUrlAtom(IsContained isContained = NotContained); + + //! + //! \brief Destructor + //! + virtual ~DataEntryUrlAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); +}; + +class DataEntryUrnAtom : public DataEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + DataEntryUrnAtom(); + + //! + //! \brief Destructor + //! + virtual ~DataEntryUrnAtom() = default; + + //! + //! \brief Set Name + //! + //! \param [in] const std::string& + //! Name value + //! + //! \return void + //! + void SetName(const std::string& name); + + //! + //! \brief Get Name + //! + //! \return const std::string + //! Name + //! + const std::string GetName() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::string m_name; //!< data name +}; + +class DataReferenceAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + DataReferenceAtom(); + + //! + //! \brief Destructor + //! + virtual ~DataReferenceAtom() = default; + + //! + //! \brief Add data Entry + //! + //! \param [in] std::shared_ptr + //! data Entry Atom value + //! + //! \return unsigned int + //! data entry size + //! + unsigned int AddEntry(std::shared_ptr dataEntryAtom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector> m_dataEntries; //!< data entry array +}; + +VCD_MP4_END; +#endif /* _DATAREFERENCEATOM_H_ */ \ No newline at end of file diff --git a/src/isolib/atoms/DecConfigRecord.h b/src/isolib/atoms/DecConfigRecord.h new file mode 100644 index 00000000..8382cdcc --- /dev/null +++ b/src/isolib/atoms/DecConfigRecord.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DecConfigRecord.h +//! \brief: DecConfigRecord class +//! \detail: Used for decoder configuration record. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _DECODERCONFIGRECORD_H_ +#define _DECODERCONFIGRECORD_H_ + +#include +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class DecoderConfigurationRecord +{ +public: + enum DecParam //!< decoder paramters + { + AVC_SPS = 0, + AVC_PPS, + HEVC_VPS, + HEVC_SPS, + HEVC_PPS, + AudioSpecificConfig + }; + + typedef std::map> ConfigurationMap; //!< Configuration Map + + //! + //! \brief Constructor + //! + DecoderConfigurationRecord() = default; + + //! + //! \brief Destructor + //! + virtual ~DecoderConfigurationRecord() = default; + + //! + //! \brief Get Configuration Map + //! + //! \param [out] ConfigurationMap& + //! map value + //! + //! \return void + //! + virtual void GetConfigurationMap(ConfigurationMap& aMap) const = 0; +}; + +VCD_MP4_END; +#endif /* _DECODERCONFIGRECORD_H_ */ diff --git a/src/isolib/atoms/DecPts.cpp b/src/isolib/atoms/DecPts.cpp new file mode 100644 index 00000000..9ca1b461 --- /dev/null +++ b/src/isolib/atoms/DecPts.cpp @@ -0,0 +1,440 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DecPts.cpp +//! \brief: DecPts class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "DecPts.h" +#include +#include +#include "CompOffsetAtom.h" +#include "CompToDecAtom.h" +#include "EditAtom.h" +#include "TimeToSampAtom.h" + +VCD_MP4_BEGIN + +DecodePts::DecodePts() + : m_editListAtom(nullptr) + , m_movieTimescale(0) + , m_mediaTimescale(0) + , m_timeToSampAtom(nullptr) + , m_compOffsetAtom(nullptr) + , m_compToDecodeAtom(nullptr) + , m_trackRunAtom(nullptr) + , m_movieOffset(0) +{ +} + +template +void DecodePts::SetEdit(T& entry) +{ + if (entry.m_mediaTime == -1) + { + SetEmptyEdit(entry); + } + if (entry.m_mediaRateInt == 0) + { + SetDwellEdit(entry); + } + if (entry.m_mediaTime >= 0 && entry.m_mediaRateInt != 0) + { + SetShiftEdit(entry); + } +} + +template +std::uint64_t DecodePts::FromMovieToMediaTS(T movieTS) const +{ + if (m_movieTimescale) + { + return static_cast(movieTS) * m_mediaTimescale / m_movieTimescale; + } + else + { + return static_cast(movieTS); + } +} + +template +void DecodePts::SetEmptyEdit(T& entry) +{ + m_movieOffset += FromMovieToMediaTS(entry.m_segDuration); +} + +template +void DecodePts::SetDwellEdit(T& entry) +{ + std::pair iters; + iters = m_mediaPts.equal_range(entry.m_mediaTime); + + if (iters.first->first == iters.second->first) + { + m_moviePts.insert(std::make_pair(m_movieOffset, std::prev(iters.first)->second)); + m_movieOffset += FromMovieToMediaTS(entry.m_segDuration); + } + else + { + m_moviePts.insert(std::make_pair(m_movieOffset, iters.first->second)); + m_movieOffset += FromMovieToMediaTS(entry.m_segDuration); + } +} + +std::uint64_t DecodePts::LastSampleDuration() const +{ + std::uint64_t lastSampleDuration = 0; + if (m_trackRunAtom) + { + auto& pSample = m_trackRunAtom->GetSampleDetails(); + if (pSample.size()) + { + auto& last = *pSample.rbegin(); + if (m_trackRunAtom->GetVersion() == 0) + { + lastSampleDuration = last.version0.pDuration; + } + else + { + lastSampleDuration = last.version1.pDuration; + } + } + } + else + { + const auto& sampleDeltas = m_timeToSampAtom->GetSampleDeltas(); + if (sampleDeltas.size()) + { + lastSampleDuration = *sampleDeltas.rbegin(); + } + } + return lastSampleDuration; +} + +template +void DecodePts::SetShiftEdit(T& entry) +{ + std::int64_t endTime(INT64_MAX); + + if (entry.m_segDuration != 0) + { + endTime = static_cast(static_cast(entry.m_mediaTime) + + (FromMovieToMediaTS(entry.m_segDuration))); + } + + if (m_mediaPts.size()) + { + m_movieOffset += static_cast(m_mediaPts.begin()->first + m_mediaOffset - entry.m_mediaTime); + } + + for (auto it = m_mediaPts.cbegin(); it != m_mediaPts.cend(); ++it) + { + if (it->first + m_mediaOffset >= static_cast(entry.m_mediaTime) && + it->first + m_mediaOffset < endTime) + { + if (it != m_mediaPts.cbegin() && + std::prev(it)->first + m_mediaOffset < static_cast(entry.m_mediaTime) && + it->first + m_mediaOffset != static_cast(entry.m_mediaTime)) + { + m_moviePts.insert(std::make_pair(m_movieOffset, std::prev(it)->second)); + m_movieOffset += static_cast( + it->first - (std::prev(it)->first + (entry.m_mediaTime - std::prev(it)->first))); + } + + m_moviePts.insert(std::make_pair(m_movieOffset, it->second)); + } + if (std::next(it) != m_mediaPts.cend()) + { + m_movieOffset += static_cast(std::next(it)->first - it->first); + } + else + { + m_movieOffset += LastSampleDuration(); + } + } + + if (m_mediaPts.size() && entry.m_segDuration != 0) + { + m_movieOffset -= static_cast(m_mediaPts.rbegin()->first + m_mediaOffset + + static_cast(LastSampleDuration()) - endTime); + } +} + +void DecodePts::SetAtom(const TimeToSampleAtom* atom) +{ + m_timeToSampAtom = atom; +} + +void DecodePts::SetAtom(const CompositionOffsetAtom* atom) +{ + m_compOffsetAtom = atom; +} + +void DecodePts::SetAtom(const CompositionToDecodeAtom* atom) +{ + m_compToDecodeAtom = atom; +} + +void DecodePts::SetAtom(const EditListAtom* atom, std::uint32_t movieTimescale, std::uint32_t mediaTimescale) +{ + m_editListAtom = atom; + m_movieTimescale = movieTimescale; + m_mediaTimescale = mediaTimescale; +} + +void DecodePts::SetAtom(const TrackRunAtom* atom) +{ + m_trackRunAtom = atom; +} + +void DecodePts::SetEditList() +{ + if (m_mediaPts.size()) + { + std::uint32_t version = m_editListAtom->GetVersion(); + for (std::uint32_t i = 0; i < m_editListAtom->numEntry(); i++) + { + if (version == 0) + { + SetEdit(m_editListAtom->GetEntry(i)); + } + else if (version == 1) + { + SetEdit(m_editListAtom->GetEntry(i)); + } + } + } +} + +bool DecodePts::Unravel() +{ + bool ret = true; + + std::vector pDts; + pDts = m_timeToSampAtom->GetSampleTimes(); + + std::vector mediaPtsTS; + if (m_compOffsetAtom != nullptr) + { + std::vector DeltaPts; + DeltaPts = m_compOffsetAtom->GetSampleCompositionOffsets(); + + if (DeltaPts.size() == pDts.size()) + { + std::transform(pDts.begin(), pDts.end(), DeltaPts.begin(), std::back_inserter(mediaPtsTS), + [](std::uint64_t theMediaDts, std::int32_t thePtsDelta) { + return std::uint64_t(theMediaDts + (std::uint64_t)(thePtsDelta)); + }); + } + } + else + { + std::copy(pDts.begin(), pDts.end(), std::back_inserter(mediaPtsTS)); + } + + if (ret) + { + std::uint64_t sampleId = 0; + for (auto pts : mediaPtsTS) + { + m_mediaPts.insert(std::make_pair(pts, sampleId++)); + } + + if (m_editListAtom != nullptr) + { + SetEditList(); + } + else + { + m_moviePts = m_mediaPts; + + if (m_moviePts.size() > 0) + { + auto last = std::prev(m_moviePts.end(), 1); + m_movieOffset = static_cast(last->first) + LastSampleDuration(); + } + else + { + m_movieOffset = 0; + } + } + } + + return ret; +} + +void DecodePts::UnravelTrackRun() +{ + std::vector pDts; + uint32_t time = 0; + bool timeOffsetFlag = false; + std::vector DeltaPts; + + if ((m_trackRunAtom->GetFlags() & TrackRunAtom::pSampleCompTimeOffsets) != 0) + { + timeOffsetFlag = true; + } + + const auto& pSample = m_trackRunAtom->GetSampleDetails(); + pDts.reserve(pSample.size()); + if (timeOffsetFlag) + { + DeltaPts.reserve(pSample.size()); + } + for (const auto& sample : pSample) + { + pDts.push_back(time); + time += sample.version0.pDuration; + + if (timeOffsetFlag) + { + if (m_trackRunAtom->GetVersion() == 0) + { + DeltaPts.push_back(static_cast(sample.version0.pCompTimeOffset)); + } + else + { + DeltaPts.push_back(sample.version1.pCompTimeOffset); + } + } + } + + std::vector pPts; + pPts.reserve(pDts.size()); + if (timeOffsetFlag) + { + std::transform(pDts.begin(), pDts.end(), DeltaPts.begin(), std::back_inserter(pPts), + [](std::uint32_t tmpDts, std::int32_t thePtsDelta) { + return std::uint32_t(std::int32_t(tmpDts) + thePtsDelta); + }); + } + else + { + std::copy(pDts.begin(), pDts.end(), std::back_inserter(pPts)); + } + + std::uint64_t sampleId = 0; + // m_mediaPts.reserve(m_mediaPts.size() + pPts.size()); + for (auto pts : pPts) + { + m_mediaPts.insert(std::make_pair(pts, sampleId++)); + } +} + +DecodePts::PMap DecodePts::GetTime(const std::uint32_t ts) const +{ + if (ts == 0) + { + ISO_LOG(LOG_ERROR, "timeScale == 0\n"); + throw Exception(); + } + PMap pMap; + for (const auto& entry : m_moviePts) + { + int64_t p1 = (entry.first * 1000) / ts; + uint64_t p2 = entry.second; + pMap.insert(std::make_pair(p1, p2)); + } + return pMap; +} + +DecodePts::PMapTS DecodePts::GetTimeTS() const +{ + PMapTS ts; + for (const auto& entry : m_moviePts) + { + ts.insert(std::make_pair(entry.first, entry.second)); + } + return ts; +} + +void DecodePts::GetTimeTrackRun(const std::uint32_t ts, PMap& oldPMap) const +{ + std::uint64_t idx = 0; + if (oldPMap.size()) + { + idx = oldPMap.rbegin()->second + 1; + } + for (const auto& entry : m_moviePts) + { + int64_t p1 = (entry.first * 1000) / ts; + uint64_t p2 = idx + entry.second; + oldPMap.insert(std::make_pair(p1, p2)); + } + return; +} + +void DecodePts::GetTimeTrackRunTS(PMapTS& oldPMapTS) const +{ + std::uint64_t idx = 0; + if (oldPMapTS.size()) + { + idx = oldPMapTS.rbegin()->second + 1; + } + for (const auto& entry : m_moviePts) + { + int64_t p1 = entry.first; + uint64_t p2 = idx + entry.second; + oldPMapTS.insert(std::make_pair(p1, p2)); + } + return; +} + +std::uint64_t DecodePts::GetSpan() const +{ + return m_movieOffset; +} + + +void DecodePts::SetLocalTime(std::uint64_t pOffset) +{ + if (m_editListAtom != nullptr) + { + m_mediaOffset = static_cast(pOffset); + SetEditList(); + } + else + { + for (const auto& entry : m_mediaPts) + { + m_moviePts.insert(std::make_pair(PresentTimeTS(pOffset) + entry.first, entry.second)); + } + + if (m_moviePts.size() > 0) + { + auto last = std::prev(m_moviePts.end(), 1); + m_movieOffset = static_cast(last->first) + LastSampleDuration(); + } + else + { + m_movieOffset = 0; + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/DecPts.h b/src/isolib/atoms/DecPts.h new file mode 100644 index 00000000..6e84cfe5 --- /dev/null +++ b/src/isolib/atoms/DecPts.h @@ -0,0 +1,245 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DecPts.h +//! \brief: DecPts class +//! \detail: Used for decode presentation time +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef DECODEPTS_H +#define DECODEPTS_H + +#include +#include +#include "CompOffsetAtom.h" +#include "CompToDecAtom.h" +#include "FormAllocator.h" +#include "EditAtom.h" +#include "TimeToSampAtom.h" +#include "TrackRunAtom.h" + +VCD_MP4_BEGIN + +class DecodePts +{ +public: + typedef std::int64_t PresentTime; //!< Sample presentation time + typedef std::int64_t PresentTimeTS; //!< Sample presentation time + typedef std::uint64_t SampleIndex; //!< sample index + + using PMap = std::map; + + using PMapTS = std::map; + +public: + + //! + //! \brief Constructor + //! + DecodePts(); + + //! + //! \brief Destructor + //! + virtual ~DecodePts() = default; + + //! + //! \brief Set Atom + //! + //! \param [in] const TimeToSampleAtom* + //! time To Sample Atom value + //! + //! \return void + //! + void SetAtom(const TimeToSampleAtom* timeToSampleAtom); + void SetAtom(const CompositionOffsetAtom* compositionOffsetAtom); + void SetAtom(const CompositionToDecodeAtom* compositionToDecodeAtom); + void SetAtom(const EditListAtom* editListAtom, std::uint32_t movieTimescale, std::uint32_t mediaTimescale); + void SetAtom(const TrackRunAtom* trackRunAtom); + + //! + //! \brief Generate presentation timestamps + //! + //! \return bool + //! unravel success or not + //! + bool Unravel(); + + //! + //! \brief Generate presentation timestamps in track run atom + //! + //! \return void + //! + void UnravelTrackRun(); + + //! + //! \brief Get Span + //! + //! \return std::uint64_t + //! Span value + //! + std::uint64_t GetSpan() const; + + //! + //! \brief Set Local Time + //! + //! \param [in] std::uint64_t + //! Local Time value + //! + //! \return void + //! + void SetLocalTime(std::uint64_t ptsOffset); + + //! + //! \brief Get Time + //! + //! \param [in] std::uint16_t + //! Time value + //! + //! \return PMap + //! Presentation timestamps + //! + PMap GetTime(std::uint32_t timeScale) const; + + //! + //! \brief Get Time TS + //! + //! \return PMapTS + //! Time TS + //! + PMapTS GetTimeTS() const; + + //! + //! \brief Get Time in Track Run atom + //! + //! \param [in] std::uint32_t + //! time Scale + //! \param [out] PMap& + //! Presentation timestamps + //! + //! \return void + //! + void GetTimeTrackRun(std::uint32_t timeScale, PMap& oldPMap) const; + + //! + //! \brief Get Time in Track Run atom TS + //! + //! \param [out] PMapTS& + //! Presentation timestamps + //! + //! \return void + //! + void GetTimeTrackRunTS(PMapTS& oldPMapTS) const; + +private: + const EditListAtom* m_editListAtom; //!< edit List Atom + std::uint32_t m_movieTimescale; //!< movie Time scale + std::uint32_t m_mediaTimescale; //!< media Time scale + const TimeToSampleAtom* m_timeToSampAtom; //!< time To Sample Atom + const CompositionOffsetAtom* m_compOffsetAtom; //!< comp Offset Atom + const CompositionToDecodeAtom* m_compToDecodeAtom; //!< comp To Decode Atom + const TrackRunAtom* m_trackRunAtom; //!< track Run Atom + std::uint64_t m_movieOffset; //!< movie Offset + std::int64_t m_mediaOffset = 0; //!< media Offset + PMapTS m_mediaPts; //!< media Pts + PMapTS m_moviePts; //!< movie Pts + + //! + //! \brief Determine the duration of the last sample + //! + //! \return std::uint64_t + //! duration + //! + std::uint64_t LastSampleDuration() const; + + //! + //! \brief Set Edit + //! + //! \param [in] Entry& + //! Edit value + //! + //! \return void + //! + template + void SetEdit(Entry& entry); + + //! + //! \brief Set Empty Edit + //! + //! \param [in] Entry& + //! Empty Edit value + //! + //! \return void + //! + template + void SetEmptyEdit(Entry& entry); + + //! + //! \brief Set Dwell Edit + //! + //! \param [in] Entry& + //! entry value + //! + //! \return void + //! + template + void SetDwellEdit(Entry& entry); + + //! + //! \brief Set Shift Edit + //! + //! \param [in] Entry& + //! entry + //! + //! \return void + //! + template + void SetShiftEdit(Entry& entry); + + //! + //! \brief From Movie To Media TS + //! + //! \param [in] Time + //! movieTS + //! + //! \return std::uint64_t + //! presentation time + //! + template + std::uint64_t FromMovieToMediaTS(Time movieTS) const; + + //! + //! \brief Set Edit List + //! + //! \return void + //! + void SetEditList(); +}; + +VCD_MP4_END; +#endif /* DECODEPTS_H */ diff --git a/src/isolib/atoms/EditAtom.cpp b/src/isolib/atoms/EditAtom.cpp new file mode 100644 index 00000000..3759a795 --- /dev/null +++ b/src/isolib/atoms/EditAtom.cpp @@ -0,0 +1,188 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: EditAtom.cpp +//! \brief: EditAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "EditAtom.h" + +#include + +VCD_MP4_BEGIN + +EditAtom::EditAtom() + : Atom("edts") + , m_editListAtom(nullptr) +{ +} + +void EditAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + if (m_editListAtom != nullptr) + { + m_editListAtom->ToStream(str); + } + UpdateSize(str); +} + +void EditAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + // Handle this Atom based on the type + if (AtomType == "elst") + { + m_editListAtom = MakeShared(); + m_editListAtom->FromStream(subBitstr); + } + } +} + +void EditAtom::SetEditListAtom(std::shared_ptr editListAtom) +{ + m_editListAtom = std::move(editListAtom); +} + +const EditListAtom* EditAtom::GetEditListAtom() const +{ + return m_editListAtom.get(); +} + +EditListAtom::EditListAtom() + : FullAtom("elst", 0, 0) +{ +} + +void EditListAtom::AddEntry(const EntryVersion0& entry) +{ + if (m_entryVersion1.size() != 0 || GetVersion() != 0) + { + ISO_LOG(LOG_ERROR, "Invalid attempt to add version0 EditListAtom entries.\n"); + throw Exception(); + } + m_entryVersion0.push_back(entry); +} + +void EditListAtom::AddEntry(const EntryVersion1& entry) +{ + if (m_entryVersion0.size() != 0 || GetVersion() != 1) + { + ISO_LOG(LOG_ERROR, "Invalid attempt to add version1 EditListAtom entries.\n"); + throw Exception(); + } + m_entryVersion1.push_back(entry); +} + +std::uint32_t EditListAtom::numEntry() const +{ + size_t size = 0; + switch (GetVersion()) + { + case 0: + size = m_entryVersion0.size(); + break; + case 1: + size = m_entryVersion1.size(); + break; + default: + ISO_LOG(LOG_ERROR, "Not supported EditListAtom entry version (only 0 and 1 are supported).\n"); + throw Exception(); + } + return static_cast(size); +} + +void EditListAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + if (m_entryVersion0.empty() == false) + { + str.Write32(static_cast(m_entryVersion0.size())); + for (const auto& entry : m_entryVersion0) + { + str.Write32(entry.m_segDuration); + str.Write32(static_cast(entry.m_mediaTime)); + str.Write16(entry.m_mediaRateInt); + str.Write16(entry.m_mediaRateFraction); + } + } + else if (m_entryVersion1.empty() == false) + { + str.Write32(static_cast(m_entryVersion1.size())); + for (const auto& entry : m_entryVersion0) + { + str.Write64(entry.m_segDuration); + str.Write64(static_cast(entry.m_mediaTime)); + str.Write16(entry.m_mediaRateInt); + str.Write16(entry.m_mediaRateFraction); + } + } + UpdateSize(str); +} + +void EditListAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + const std::uint32_t entryCount = str.Read32(); + + if (GetVersion() == 0) + { + for (std::uint32_t i = 0; i < entryCount; ++i) + { + EntryVersion0 entryVersion0; + entryVersion0.m_segDuration = str.Read32(); + entryVersion0.m_mediaTime = static_cast(str.Read32()); + entryVersion0.m_mediaRateInt = str.Read16(); + entryVersion0.m_mediaRateFraction = str.Read16(); + m_entryVersion0.push_back(entryVersion0); + } + } + else if (GetVersion() == 1) + { + for (uint32_t i = 0; i < entryCount; ++i) + { + EntryVersion1 entryVersion1; + entryVersion1.m_segDuration = str.Read64(); + entryVersion1.m_mediaTime = static_cast(str.Read64()); + entryVersion1.m_mediaRateInt = str.Read16(); + entryVersion1.m_mediaRateFraction = str.Read16(); + m_entryVersion1.push_back(entryVersion1); + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/EditAtom.h b/src/isolib/atoms/EditAtom.h new file mode 100644 index 00000000..bf594142 --- /dev/null +++ b/src/isolib/atoms/EditAtom.h @@ -0,0 +1,208 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: EditAtom.h +//! \brief: Edit List Atom class +//! \detail: 'elst' Atom implementation +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef EDITATOM_H +#define EDITATOM_H + +#include "Atom.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class EditListAtom : public FullAtom +{ +public: + struct EntryVersion0 //!< Edit List Entry Format version 0 + { + std::uint32_t m_segDuration; + std::int32_t m_mediaTime; + std::uint16_t m_mediaRateInt; + std::uint16_t m_mediaRateFraction; + }; + + struct EntryVersion1 //!< Edit List Entry Format version 1 + { + std::uint64_t m_segDuration; + std::int64_t m_mediaTime; + std::uint16_t m_mediaRateInt; + std::uint16_t m_mediaRateFraction; + }; + + //! + //! \brief Constructor + //! + EditListAtom(); + + //! + //! \brief Destructor + //! + virtual ~EditListAtom() = default; + + //! + //! \brief Add Entry + //! + //! \param [in] const EntryVersion0& + //! entry + //! + //! \return void + //! + void AddEntry(const EntryVersion0& entry); + + //! + //! \brief Add Entry + //! + //! \param [in] const EntryVersion1& + //! entry + //! + //! \return void + //! + void AddEntry(const EntryVersion1& entry); + + //! + //! \brief Get number of entries + //! + //! \return std::uint32_t + //! number of entries + //! + std::uint32_t numEntry() const; + + //! + //! \brief Get Entry + //! + //! \param [in] const std::uint32_t + //! index + //! + //! \return const T& + //! entry + //! + template + const T& GetEntry(const std::uint32_t index) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + std::vector m_entryVersion0; //!< vector of edit list entries of type verison 0 + std::vector m_entryVersion1; //!< vector of edit list entries of type verison 1 +}; + +template +const T& EditListAtom::GetEntry(const std::uint32_t index) const +{ + if (std::is_same::value) + { + return (const T&) m_entryVersion0.at(index); + } + + if (std::is_same::value) + { + return (const T&) m_entryVersion1.at(index); + } +} + +class EditAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + EditAtom(); + + //! + //! \brief Destructor + //! + virtual ~EditAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + + //! + //! \brief Set Edit List Atom + //! + //! \param [in] std::shared_ptr + //! edit List Atom + //! + //! \return void + //! + void SetEditListAtom(std::shared_ptr editListAtom); + + //! + //! \brief Get Edit List Atom + //! + //! \return const EditListAtom* + //! EditListAtom + //! + const EditListAtom* GetEditListAtom() const; + +private: + std::shared_ptr m_editListAtom; //!< Edit List Atom pointer +}; + +VCD_MP4_END; +#endif /* EDITATOM_H */ diff --git a/src/isolib/atoms/ElemStreamDescAtom.cpp b/src/isolib/atoms/ElemStreamDescAtom.cpp new file mode 100644 index 00000000..f2a42924 --- /dev/null +++ b/src/isolib/atoms/ElemStreamDescAtom.cpp @@ -0,0 +1,288 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ElemStreamDescAtom.cpp +//! \brief: ElemStreamDescAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "ElemStreamDescAtom.h" +#include +#include +#include "Stream.h" + +VCD_MP4_BEGIN + +ElementaryStreamDescriptorAtom::ElementaryStreamDescriptorAtom() + : FullAtom("esds", 0, 0) + , m_ES_Params() +{ +} + +ElementaryStreamDescriptorAtom::ElementaryStreamDescriptorAtom(const ElementaryStreamDescriptorAtom& atom) + : FullAtom(atom.GetType(), 0, 0) + , m_ES_Params(atom.m_ES_Params) + , m_otherinfo(atom.m_otherinfo) +{ +} + +bool ElementaryStreamDescriptorAtom::GetOneParameterSet(std::vector& byteStream) const +{ + if (m_ES_Params.decConfig.info.flag == 5) + { + byteStream = m_ES_Params.decConfig.info.info; + return true; + } + else + { + return false; + } +} + +void ElementaryStreamDescriptorAtom::SetESDescriptor(ElementaryStreamDescriptorAtom::ES_Params esDescriptor) +{ + m_ES_Params = esDescriptor; +} + +ElementaryStreamDescriptorAtom::ES_Params ElementaryStreamDescriptorAtom::GetESDescriptor() const +{ + return m_ES_Params; +} + +int HighestBit(std::uint32_t value) +{ + int n = 0; + while (value) + { + value >>= 1; + ++n; + } + return n; +} + +uint64_t WriteSize(Stream& str, std::uint32_t size) +{ + uint64_t sizeSize = 0; + int currentBit = (std::max(0, HighestBit(size) - 1)) / 7 * 7; + bool hasMore; + do + { + hasMore = (size >> (currentBit - 7)) != 0; + str.Write8(((size >> currentBit) & 0x7F) | (hasMore ? 1u << 7 : 0)); + currentBit -= 7; + ++sizeSize; + } while (hasMore); + return sizeSize; +} + +void WriteDecodeSpec(Stream& str, const ElementaryStreamDescriptorAtom::DecodeSpec& info) +{ + str.Write8(info.flag); + WriteSize(str, info.size); + str.WriteArray(info.info, info.size); +} + +void ElementaryStreamDescriptorAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write8(m_ES_Params.descrFlag); + + bool sizeConverge = false; + std::uint64_t esSizeSize; + std::uint32_t esDescriptorSize = m_ES_Params.size; + + Stream esStr; + + while (!sizeConverge) + { + esStr.Clear(); + esSizeSize = WriteSize(esStr, esDescriptorSize); + esStr.Write16(m_ES_Params.id); + esStr.Write8(m_ES_Params.flags); + if (m_ES_Params.flags & 0x80) + { + esStr.Write16(m_ES_Params.depend); + } + + if (m_ES_Params.flags & 0x40) + { + esStr.Write8(m_ES_Params.URLlen); + if (m_ES_Params.URLlen) + { + esStr.WriteString(m_ES_Params.URL); + } + } + + if (m_ES_Params.flags & 0x20) + { + esStr.Write16(m_ES_Params.OCR_Id); + } + + esStr.Write8(m_ES_Params.decConfig.flag); + + Stream decStr; + std::uint64_t decConfigSize = m_ES_Params.decConfig.size; + std::uint64_t decConfigSizeSize; + bool decSize = false; + while (!decSize) + { + decStr.Clear(); + decConfigSizeSize = WriteSize(decStr, static_cast(decConfigSize)); + decStr.Write8(m_ES_Params.decConfig.idc); + decStr.Write8((m_ES_Params.decConfig.strType << 2) | 0x01); + decStr.Write24(m_ES_Params.decConfig.bufferSizeDB); + decStr.Write32(m_ES_Params.decConfig.maxBitrate); + decStr.Write32(m_ES_Params.decConfig.avgBitrate); + + if (m_ES_Params.decConfig.info.flag == 5) + { + WriteDecodeSpec(decStr, m_ES_Params.decConfig.info); + } + + for (const auto& info : m_otherinfo) + { + WriteDecodeSpec(decStr, info); + } + + decSize = decStr.GetSize() == std::uint64_t(decConfigSize) + decConfigSizeSize; + + if (!decSize) + { + decConfigSize = decStr.GetSize() - decConfigSizeSize; + } + } + esStr.WriteStream(decStr); + + sizeConverge = esStr.GetSize() == std::uint64_t(esDescriptorSize) + esSizeSize; + + if (!sizeConverge) + { + esDescriptorSize = std::uint32_t(esStr.GetSize() - esSizeSize); + } + } + str.WriteStream(esStr); + UpdateSize(str); +} + +void ElementaryStreamDescriptorAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_ES_Params.descrFlag = str.Read8(); + if (m_ES_Params.descrFlag != 3) // descrFlag + { + ISO_LOG(LOG_ERROR, "ElementaryStreamDescritorAtom ES_Params.descrFlag not valid\n"); + throw Exception(); + } + std::uint8_t readByte = 0; + std::uint32_t size = 0; + do + { + readByte = str.Read8(); + std::uint8_t sizeByte = (readByte & 0x7F); + size = (size << 7) | sizeByte; + } while (readByte & 0x80); + + m_ES_Params.size = size; + m_ES_Params.id = str.Read16(); + m_ES_Params.flags = str.Read8(); + + if (m_ES_Params.flags & 0x80) + { + m_ES_Params.depend = str.Read16(); + } + + if (m_ES_Params.flags & 0x40) + { + m_ES_Params.URLlen = str.Read8(); + if (m_ES_Params.URLlen) + { + str.ReadStringWithLen(m_ES_Params.URL, m_ES_Params.URLlen); + } + } + + if (m_ES_Params.flags & 0x20) + { + m_ES_Params.OCR_Id = str.Read16(); + } + + m_ES_Params.decConfig.flag = str.Read8(); + if (m_ES_Params.decConfig.flag != 4) // flag + { + ISO_LOG(LOG_ERROR, "ElementaryStreamDescritorAtom DecoderConfig.flag not valid\n"); + throw Exception(); + } + + readByte = 0; + size = 0; + do + { + readByte = str.Read8(); + std::uint8_t sizeByte = (readByte & 0x7f); + size = (size << 7) | sizeByte; + } while (readByte & 0x80); + + m_ES_Params.decConfig.size = size; + m_ES_Params.decConfig.idc = str.Read8(); + m_ES_Params.decConfig.strType = (str.Read8() >> 2); + m_ES_Params.decConfig.bufferSizeDB = str.Read24(); + m_ES_Params.decConfig.maxBitrate = str.Read32(); + m_ES_Params.decConfig.avgBitrate = str.Read32(); + + while (str.BytesRemain()) + { + std::uint8_t tag = str.Read8(); + + readByte = 0; + size = 0; + do + { + readByte = str.Read8(); + std::uint8_t sizeByte = (readByte & 0x7f); + size = (size << 7) | sizeByte; + } while (readByte & 0x80); + + DecodeSpec info; + + info.flag = tag; + info.size = size; + str.ReadArray(info.info, info.size); + + if (tag == 5) + { + m_ES_Params.decConfig.info = std::move(info); + } + else + { + m_otherinfo.push_back(std::move(info)); + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ElemStreamDescAtom.h b/src/isolib/atoms/ElemStreamDescAtom.h new file mode 100644 index 00000000..ff0c5044 --- /dev/null +++ b/src/isolib/atoms/ElemStreamDescAtom.h @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ElemStreamDescAtom.h +//! \brief: Elementary Stream Descriptor Atom +//! \detail: 'esds' Atom containing the ES_Params +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _ELEMENTARYSTREAMDESCRIPTORATOM_H_ +#define _ELEMENTARYSTREAMDESCRIPTORATOM_H_ + +#include +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class ElementaryStreamDescriptorAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ElementaryStreamDescriptorAtom(); + ElementaryStreamDescriptorAtom(const ElementaryStreamDescriptorAtom& Atom); + + ElementaryStreamDescriptorAtom& operator=(const ElementaryStreamDescriptorAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~ElementaryStreamDescriptorAtom() = default; + + struct DecodeSpec //!< decoder specification + { + std::uint8_t flag = 0; + std::uint32_t size = 0; + std::vector info; + }; + + struct DecoderConfig //!< decoder configuration + { + std::uint8_t flag = 0; + std::uint32_t size = 0; + std::uint8_t idc = 0; + std::uint8_t strType = 0; + std::uint32_t bufferSizeDB = 0; + std::uint32_t maxBitrate = 0; + std::uint32_t avgBitrate = 0; + DecodeSpec info; + }; + + struct ES_Params //!< ES parameters + { + std::uint8_t descrFlag = 0; + std::uint32_t size = 0; + std::uint16_t id = 0; + std::uint8_t flags = 0; + std::uint16_t depend = 0; + std::uint8_t URLlen = 0; + std::string URL; + std::uint16_t OCR_Id = 0; + DecoderConfig decConfig; + }; + + //! + //! \brief get one parameters set + //! + //! \param [in] std::vector& + //! byte stream + //! + //! \return bool + //! operation success or not + //! + bool GetOneParameterSet(std::vector& byteStream) const; + + //! + //! \brief Set ES Descriptor + //! + //! \param [in] ElementaryStreamDescriptorAtom::ES_Params + //! es Descriptor + //! + //! \return void + //! + void SetESDescriptor(ElementaryStreamDescriptorAtom::ES_Params esDescriptor); + + //! + //! \brief Get ES Descriptor + //! + //! \return ES_Params + //! ES Descriptor + //! + ES_Params GetESDescriptor() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& output); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& input); + +private: + ES_Params m_ES_Params; //!< ES_Params + std::list m_otherinfo; //!< other information +}; + +VCD_MP4_END; +#endif /* _ELEMENTARYSTREAMDESCRIPTORATOM_H_ */ diff --git a/src/isolib/atoms/FormAllocator.cpp b/src/isolib/atoms/FormAllocator.cpp new file mode 100644 index 00000000..682adfae --- /dev/null +++ b/src/isolib/atoms/FormAllocator.cpp @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FormAllocator.cpp +//! \brief: FormAllocator class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class DefaultAllocator : public FormAllocator +{ +public: + DefaultAllocator() + { + } + ~DefaultAllocator() + { + } + + void* allocate(size_t n, size_t size) override + { + return malloc(n * size); + } + void deallocate(void* ptr) override + { + free(ptr); + } +}; + +char defAllocData[sizeof(DefaultAllocator)]; +FormAllocator* defAlloc; +static FormAllocator* formAlloc; + +FormAllocator* GetDefaultAllocator() +{ + if (!defAlloc) + { + defAlloc = new (defAllocData) DefaultAllocator(); + } + return defAlloc; +} + +FormAllocator* GetFormAllocator() +{ + if (!formAlloc) + { + defAlloc = GetDefaultAllocator(); + formAlloc = defAlloc; + } + return formAlloc; +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/FormAllocator.h b/src/isolib/atoms/FormAllocator.h new file mode 100644 index 00000000..583ee27d --- /dev/null +++ b/src/isolib/atoms/FormAllocator.h @@ -0,0 +1,201 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FormAllocator.h +//! \brief: Allocate Atom class. +//! \detail: Used for basic atom allocator and std lib operation. +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef FORMALLOCATOR_H +#define FORMALLOCATOR_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include "../include/Common.h" + +VCD_MP4_BEGIN + +class FormAllocator +{ +public: + + //! + //! \brief Constructor + //! + FormAllocator() {}; + + //! + //! \brief Destructor + //! + virtual ~FormAllocator() {}; + + //! + //! \brief allocator function + //! + //! \param [in] size_t + //! number + //! \param [in] size_t + //! size + //! + //! \return void* + //! + virtual void* allocate(size_t n, size_t size) = 0; + + //! + //! \brief deallocate function + //! + //! \param [in] void* + //! pointer + //! + //! \return void + //! + virtual void deallocate(void* ptr) = 0; +}; + +FormAllocator* GetDefaultAllocator(); +FormAllocator* GetFormAllocator(); + +template +class Allocator : public std::allocator +{ +public: + template + struct rebind + { + typedef Allocator other; + }; + + T* allocate(size_t n, const void* hint = 0) + { + (void) hint; + return static_cast(GetFormAllocator()->allocate(n, sizeof(T))); + } + + void deallocate(T* p, size_t n) + { + (void) n; + return GetFormAllocator()->deallocate(p); + } + + //! + //! \brief Constructor + //! + Allocator() : std::allocator() + { + } + Allocator(const Allocator& a) : std::allocator(a) + { + } + template + Allocator(const Allocator& a) : std::allocator(a) + { + } + + Allocator& operator=(const Allocator&) = default; + + //! + //! \brief Destructor + //! + ~Allocator() + { + } +}; + +template +class FormDelete : public std::default_delete +{ +public: + void operator()(T* ptr) const + { + ptr->~T(); + free(ptr); + } +}; + +template +std::shared_ptr MakeShared() +{ + return std::allocate_shared(std::allocator()); +} + +template +std::shared_ptr MakeShared(Args&&... args) +{ + return std::allocate_shared(std::allocator(), std::forward(args)...); +} + +template +using UniquePtr = std::unique_ptr>; + +template +UniquePtr MakeUnique(Args&&... args) +{ + return UniquePtr(new T(std::forward(args)...)); +} + +class Exception : public std::exception +{ +public: + + //! + //! \brief Constructor + //! + Exception() + : std::exception() + , m_data(NULL) + { + } + + //! + //! \brief Destructor + //! + Exception(const char* msg) + : std::exception() + , m_data(msg) + { + } + virtual const char* what() const noexcept + { + return m_data; + } + virtual ~Exception() + { + } + +private: + const char* m_data; //!< message log +}; + +VCD_MP4_END; +#endif /* FORMALLOCATOR_H */ diff --git a/src/isolib/atoms/FourCCInt.cpp b/src/isolib/atoms/FourCCInt.cpp new file mode 100644 index 00000000..80542aa3 --- /dev/null +++ b/src/isolib/atoms/FourCCInt.cpp @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FourCCInt.cpp +//! \brief: FourCCInt class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include +#include "FourCCInt.h" + +VCD_MP4_BEGIN + +FourCCInt::FourCCInt(const std::string& str) +{ + if (str.size() != 4) + { + ISO_LOG(LOG_ERROR, "FourCCInt given an std::string argument not exactly 4 characters long\n"); + throw Exception(); + } + m_value = 0 | (std::uint32_t(str[0]) << 24) | (std::uint32_t(str[1]) << 16) | (std::uint32_t(str[2]) << 8) | + (std::uint32_t(str[3]) << 0); +} + +std::string FourCCInt::GetString() const +{ + std::string str(4, ' '); + str[0] = char((m_value >> 24) & 0xff); + str[1] = char((m_value >> 16) & 0xff); + str[2] = char((m_value >> 8) & 0xff); + str[3] = char((m_value >> 0) & 0xff); + return str; +} + +std::ostream& operator<<(std::ostream& stream, FourCCInt fourcc) +{ + return stream << fourcc.GetString(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/FourCCInt.h b/src/isolib/atoms/FourCCInt.h new file mode 100644 index 00000000..e07fbbdc --- /dev/null +++ b/src/isolib/atoms/FourCCInt.h @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FourCCInt.h +//! \brief: FourCCInt class +//! \detail: FourCCInt operator +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef FOURCCINT_H +#define FOURCCINT_H + +#include +#include +#include "FormAllocator.h" +#include "../include/Common.h" + +VCD_MP4_BEGIN + +class FourCCInt +{ +public: + + //! + //! \brief Constructor + //! + FourCCInt() + : m_value(0) + { + // nothing + } + + FourCCInt(std::uint32_t value) + : m_value(value) + { + // nothing + } + + FourCCInt(const char (&str)[5]) + : m_value(0 | (std::uint32_t(str[0]) << 24) | (std::uint32_t(str[1]) << 16) | (std::uint32_t(str[2]) << 8) | + (std::uint32_t(str[3]) << 0)) + { + // nothing + } + + explicit FourCCInt(const std::string& str); + + std::uint32_t GetUInt32() const + { + return m_value; + } + + std::string GetString() const; + + bool operator==(FourCCInt other) const + { + return m_value == other.m_value; + } + bool operator!=(FourCCInt other) const + { + return m_value != other.m_value; + } + bool operator>=(FourCCInt other) const + { + return m_value >= other.m_value; + } + bool operator<=(FourCCInt other) const + { + return m_value <= other.m_value; + } + bool operator>(FourCCInt other) const + { + return m_value > other.m_value; + } + bool operator<(FourCCInt other) const + { + return m_value < other.m_value; + } + +private: + std::uint32_t m_value; //!< value +}; + +std::ostream& operator<<(std::ostream& stream, FourCCInt fourcc); + +VCD_MP4_END; +#endif /* FOURCCINT_H */ diff --git a/src/isolib/atoms/FullAtom.cpp b/src/isolib/atoms/FullAtom.cpp new file mode 100644 index 00000000..2ffb91f1 --- /dev/null +++ b/src/isolib/atoms/FullAtom.cpp @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FullAtom.cpp +//! \brief: FullAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "FullAtom.h" + +VCD_MP4_BEGIN + +FullAtom::FullAtom(FourCCInt AtomType, uint8_t version, uint32_t flags) + : Atom(AtomType) + , m_version(version) + , m_flags(flags) +{ +} + +void FullAtom::SetVersion(uint8_t version) +{ + m_version = version; +} + +uint8_t FullAtom::GetVersion() const +{ + return m_version; +} + +void FullAtom::SetFlags(uint32_t flags) +{ + m_flags = flags; +} + +uint32_t FullAtom::GetFlags() const +{ + return m_flags; +} + +void FullAtom::WriteFullAtomHeader(Stream& str) +{ + WriteAtomHeader(str); + + str.Write8(m_version); + str.Write24(m_flags); +} + +void FullAtom::ParseFullAtomHeader(Stream& str) +{ + ParseAtomHeader(str); + + m_version = str.Read8(); + m_flags = str.Read24(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/FullAtom.h b/src/isolib/atoms/FullAtom.h new file mode 100644 index 00000000..ca48dfa7 --- /dev/null +++ b/src/isolib/atoms/FullAtom.h @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FullAtom.h +//! \brief: FullAtom class +//! \detail: Full basic Atom difinition. +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef FULLATOM_H +#define FULLATOM_H + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" + +#include + +VCD_MP4_BEGIN + +class FullAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + FullAtom(FourCCInt AtomType, std::uint8_t version, std::uint32_t flags = 0); + + //! + //! \brief Destructor + //! + virtual ~FullAtom() = default; + + //! + //! \brief Set version + //! + //! \param [in] std::uint8_t + //! version value + //! + //! \return void + //! + void SetVersion(std::uint8_t version); + + //! + //! \brief Get version + //! + //! \return std::uint8_t + //! version + //! + std::uint8_t GetVersion() const; + + //! + //! \brief Set Flags + //! + //! \param [in] std::uint32_t + //! Flags value + //! + //! \return void + //! + void SetFlags(std::uint32_t flags); + + //! + //! \brief Get Flags + //! + //! \return std::uint32_t + //! Flags + //! + std::uint32_t GetFlags() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) = 0; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) = 0; + + //! + //! \brief Parse Full atom header information + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ParseFullAtomHeader(Stream& str); + +protected: + + //! + //! \brief Write Full atom header information + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void WriteFullAtomHeader(Stream& str); + +private: + std::uint8_t m_version; //!< version field of the full Atom header + std::uint32_t m_flags; //!< Flags field of the full Atom header. +}; + +VCD_MP4_END; +#endif /* FULLATOM_H */ diff --git a/src/isolib/atoms/HandlerAtom.cpp b/src/isolib/atoms/HandlerAtom.cpp new file mode 100644 index 00000000..84fa5706 --- /dev/null +++ b/src/isolib/atoms/HandlerAtom.cpp @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: HandlerAtom.cpp +//! \brief: HandlerAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "HandlerAtom.h" + + +VCD_MP4_BEGIN + +HandlerAtom::HandlerAtom() + : FullAtom("hdlr", 0, 0) + , m_handlerType() + , m_name("") +{ +} + +void HandlerAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(0); // pre_defined = 0 + str.Write32(m_handlerType.GetUInt32()); + str.Write32(0); // reserved = 0 + str.Write32(0); // reserved = 0 + str.Write32(0); // reserved = 0 + str.WriteZeroEndString(m_name); + + UpdateSize(str); +} + +void HandlerAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + str.Read32(); // pre_defined = 0 + m_handlerType = str.Read32(); + str.Read32(); // reserved = 0 + str.Read32(); // reserved = 0 + str.Read32(); // reserved = 0 + str.ReadZeroEndString(m_name); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/HandlerAtom.h b/src/isolib/atoms/HandlerAtom.h new file mode 100644 index 00000000..e3b9d693 --- /dev/null +++ b/src/isolib/atoms/HandlerAtom.h @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: HandlerAtom.h +//! \brief: Handler Atom class. +//! \detail: 'hdlr' Atom implementation +//! +//! Created on October 14, 2019, 13:39 PM +//! + +#ifndef HANDLERATOM_H +#define HANDLERATOM_H + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class HandlerAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + HandlerAtom(); + + //! + //! \brief Destructor + //! + virtual ~HandlerAtom() = default; + + //! + //! \brief Set and Get function for m_handlerType member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_handlerType + //! m_handlerType member in class + //! \param [in] HandlerType + //! m_handlerType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_handlerType, HandlerType, const); + + //! + //! \brief Set and Get function for m_name member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_name + //! m_name member in class + //! \param [in] Name + //! m_name name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_name, Name, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + FourCCInt m_handlerType; //!< type of the media handler + std::string m_name; //!< designated name of the media handler +}; + +VCD_MP4_END; +#endif /* HANDLERATOM_H */ diff --git a/src/isolib/atoms/HevcConfigAtom.cpp b/src/isolib/atoms/HevcConfigAtom.cpp new file mode 100644 index 00000000..5d30c978 --- /dev/null +++ b/src/isolib/atoms/HevcConfigAtom.cpp @@ -0,0 +1,407 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: HevcConfigAtom.cpp +//! \brief: HevcConfigAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "HevcConfigAtom.h" +#include "Stream.h" +#include "NalUtil.h" + +VCD_MP4_BEGIN + +HevcDecoderConfigurationRecord::HevcDecoderConfigurationRecord() + : m_configVersion(1) + , m_globalProfileSpace(0) + , m_globalTierFlag(0) + , m_globalProfileIdc(0) + , m_globalProfileCompatFlags(0) + , m_globalConstrainIdcFlags(6, 0) + , m_globalLevelIdc(0) + , m_minSpatialSegIdc(0) + , m_parallelismType(0) + , m_chromaFormat(0) + , m_picWidthSamples(0) + , m_picHeightSamples(0) + , m_confWinLeftOffset(0) + , m_confWinRightOffset(0) + , m_confWinTopOffset(0) + , m_confWinBottomOffset(0) + , m_bitDepthLuma(0) + , m_bitDepthChroma(0) + , m_avgFrameRate(0) + , m_constantFrameRate(0) + , m_numTemporalLayers(0) + , m_temporalIdNested(0) + , m_lengthSizeMinus1(0) + , m_nalArray() +{ +} + +void HevcDecoderConfigurationRecord::ConfigSPS(const std::vector &srcSps, float frameRate) +{ + unsigned int maxNumMinus1; + std::vector subLayerFlag(8, 0); + std::vector subLayerLevelFlag(8, 0); + std::vector sps = TransferStreamToRBSP(srcSps); + + auto maxFR = ((float) 0xffff) / 256; + if (frameRate > maxFR) + { + frameRate = maxFR; + } + m_avgFrameRate = static_cast(frameRate * 256 + 0.5); + m_constantFrameRate = 0; + m_lengthSizeMinus1 = 3; // NAL length fields are 4 bytes long (3+1) + + Stream str(sps); + + // NALU header + str.Read1(1); // forbidden_zero_bit + str.Read1(6); // nal_unit_type + str.Read1(6); // nuh_layer_id + str.Read1(3); // nuh_temporal_id_plus1 + + str.Read1(4); // sps_video_parametr_set_id -> not needed + maxNumMinus1 = str.Read1(3); // sps_max_sub_layers_minus1 + m_numTemporalLayers = static_cast(maxNumMinus1 + 1); + m_temporalIdNested = static_cast(str.Read1(1)); // sps_temporal_id_nesting_flag + + // start profile_tier_level parsing + + m_globalProfileSpace = static_cast(str.Read1(2)); // general_profile_space + m_globalTierFlag = static_cast(str.Read1(1)); // general_tier_flag + m_globalProfileIdc = static_cast(str.Read1(5)); // general_profile_idc + m_globalProfileCompatFlags = 0; + // general_profile_compatibility_flags (32 flags) + for (int i = 0; i < 32; i++) + { + m_globalProfileCompatFlags = (m_globalProfileCompatFlags << 1) | str.Read1(1); + } + // constrain_flags (48 flags) + for (unsigned int i = 0; i < 6; i++) + { + m_globalConstrainIdcFlags.at(i) = static_cast(str.Read1(8)); + } + m_globalLevelIdc = static_cast(str.Read1(8)); // general_level_idc + for (unsigned int i = 0; i < maxNumMinus1; i++) + { + subLayerFlag.at(i) = str.Read1(1); // sub_layer_profile_present_flag + subLayerLevelFlag.at(i) = str.Read1(1); // sub_layer_level_present_flag + } + if (maxNumMinus1 > 0) + { + for (unsigned int i = maxNumMinus1; i < 8; i++) + { + str.Read1(2); // reserved_zero_2bits + } + } + // The following sub-layer syntax element are not needed in the decoder + // configuration record + for (unsigned int i = 0; i < maxNumMinus1; i++) + { + if (subLayerFlag.at(i)) + { + str.Read1(2); // sub_layer_profile_space[i] + str.Read1(1); // sub_layer_tier_flag[i] + str.Read1(5); // sub_layer_profile_idc[i] + for (int j = 0; j < 32; j++) + { + str.Read1(1); // sub_layer_profile_compatibility_flag[i][j] + } + for (int j = 0; j < 6; j++) + { + str.Read1(8); // Constraint flags + } + } + if (subLayerLevelFlag.at(i)) + { + str.Read1(8); // sub_level_idc[i] + } + } + + // end profile_tier_level parsing + + str.ReadExpGolombCode(); // sps_seq_parameter_set_id + m_chromaFormat = static_cast(str.ReadExpGolombCode()); // chroma_format_idc + if (m_chromaFormat == 3) + { + str.Read1(1); // separate_colour_plane_flag + } + m_picWidthSamples = static_cast(str.ReadExpGolombCode()); // pic_width_in_luma_samples + m_picHeightSamples = static_cast(str.ReadExpGolombCode()); // pic_height_in_luma_samples + + if (str.Read1(1)) // conformance_window_flag + { + m_confWinLeftOffset = static_cast(str.ReadExpGolombCode()); // conf_win_left_offset + m_confWinRightOffset = static_cast(str.ReadExpGolombCode()); // conf_win_right_offset + m_confWinTopOffset = static_cast(str.ReadExpGolombCode()); // conf_win_top_offset + m_confWinBottomOffset = static_cast(str.ReadExpGolombCode()); // conf_win_bottom_offset + } + else + { + m_confWinLeftOffset = 0; + m_confWinRightOffset = 0; + m_confWinTopOffset = 0; + m_confWinBottomOffset = 0; + } + + m_bitDepthLuma = static_cast(str.ReadExpGolombCode()); // bit_depth_luma_minus8 + m_bitDepthChroma = static_cast(str.ReadExpGolombCode()); // bit_depth_chroma_minus8 + str.ReadExpGolombCode(); // log2_max_pic_order_cnt_lsb_minus4 + + m_minSpatialSegIdc = 0; + m_parallelismType = 0; +} + +void HevcDecoderConfigurationRecord::AddNalUnit(const std::vector &nalUnit, + const HevcNalDefs nalUnitType, + const uint8_t arrCom) +{ + NALs *nalArray = nullptr; + std::vector tmpNU; + unsigned int startCodeLen; + + // find array for the given NAL unit type + for (auto &i : m_nalArray) + { + if (static_cast(nalUnitType) == static_cast(i.nalUnitType)) + { + nalArray = &i; + break; + } + } + + // if an array is not present for the NAL unit type, create one + if (nullptr == nalArray) + { + NALs ATmp; + ATmp.arrayCompleteness = arrCom; + ATmp.nalUnitType = nalUnitType; + m_nalArray.push_back(ATmp); + nalArray = &m_nalArray.back(); + } + + startCodeLen = FindStartCodeLen(nalUnit); + tmpNU.insert(tmpNU.begin(), nalUnit.cbegin() + static_cast(startCodeLen), + nalUnit.cend()); // copy NAL data excluding potential start code + + // add NAL unit to the NAL unit array + nalArray->nalList.push_back(tmpNU); +} + +void HevcDecoderConfigurationRecord::WriteDecConfigRec(Stream &str) const +{ + str.Write1(m_configVersion, 8); + str.Write1(m_globalProfileSpace, 2); + str.Write1(m_globalTierFlag, 1); + str.Write1(m_globalProfileIdc, 5); + str.Write1(m_globalProfileCompatFlags, 32); + for (unsigned int i = 0; i < 6; i++) + { + str.Write1(m_globalConstrainIdcFlags.at(i), 8); + } + str.Write1(m_globalLevelIdc, 8); + str.Write1(0xf, 4); // reserved = '1111'b + str.Write1(m_minSpatialSegIdc, 12); + str.Write1(0x3f, 6); // reserved = '111111'b + str.Write1(m_parallelismType, 2); + str.Write1(0x3f, 6); // reserved = '111111'b + str.Write1(m_chromaFormat, 2); + str.Write1(0x1f, 5); // reserved = '11111'b + str.Write1(m_bitDepthLuma, 3); + str.Write1(0x1f, 5); // reserved = '11111'b + str.Write1(m_bitDepthChroma, 3); + str.Write1(m_avgFrameRate, 16); + str.Write1(m_constantFrameRate, 2); + str.Write1(m_numTemporalLayers, 3); + str.Write1(m_temporalIdNested, 1); + str.Write1(m_lengthSizeMinus1, 2); + + str.Write1(m_nalArray.size(), 8); + for (const auto &i : m_nalArray) + { + str.Write1(i.arrayCompleteness, 1); + str.Write1(0, 1); // reserved = 0 + str.Write1(static_cast(i.nalUnitType), 6); + str.Write1(static_cast(i.nalList.size()), 16); + for (const auto &j : i.nalList) + { + str.Write1(static_cast(j.size()), 16); + str.WriteArray(j, j.size()); // write parameter set NAL unit + } + } +} + +void HevcDecoderConfigurationRecord::ParseConfig(Stream &str) +{ + unsigned int numOfArrays; + + m_configVersion = static_cast(str.Read1(8)); + m_globalProfileSpace = static_cast(str.Read1(2)); + m_globalTierFlag = static_cast(str.Read1(1)); + m_globalProfileIdc = static_cast(str.Read1(5)); + m_globalProfileCompatFlags = str.Read1(32); + for (unsigned int i = 0; i < 6; i++) + { + m_globalConstrainIdcFlags.at(i) = static_cast(str.Read1(8)); + } + m_globalLevelIdc = static_cast(str.Read1(8)); + str.Read1(4); // reserved = '1111'b + m_minSpatialSegIdc = static_cast(str.Read1(12)); + str.Read1(6); // reserved = '111111'b + m_parallelismType = static_cast(str.Read1(2)); + str.Read1(6); // reserved = '111111'b + m_chromaFormat = static_cast(str.Read1(2)); + str.Read1(5); // reserved = '11111'b + m_bitDepthLuma = static_cast(str.Read1(3)); + str.Read1(5); // reserved = '11111'b + m_bitDepthChroma = static_cast(str.Read1(3)); + m_avgFrameRate = static_cast(str.Read1(16)); + m_constantFrameRate = static_cast(str.Read1(2)); + m_numTemporalLayers = static_cast(str.Read1(3)); + m_temporalIdNested = static_cast(str.Read1(1)); + m_lengthSizeMinus1 = static_cast(str.Read1(2)); + + numOfArrays = str.Read1(8); + for (unsigned int i = 0; i < numOfArrays; i++) + { + uint8_t arrCom; + HevcNalDefs type; + unsigned int numNalus; + + arrCom = static_cast(str.Read1(1)); + str.Read1(1); // reserved = 0 + type = (HevcNalDefs) str.Read1(6); + numNalus = str.Read1(16); + for (unsigned int j = 0; j < numNalus; j++) + { + std::vector nals; + unsigned int size; + + size = str.Read1(16); + nals.clear(); + str.ReadArray(nals, size); // read parameter set NAL unit + AddNalUnit(nals, type, arrCom); + } + } +} + +void HevcDecoderConfigurationRecord::GetOneParameterSet(std::vector &str, + const HevcNalDefs type) const +{ + for (const auto &arr : m_nalArray) + { + if (arr.nalUnitType == type && arr.nalList.size() > 0) + { + for (int i=0;i<3;i++) + { + str.push_back(0); + } + str.push_back(1); + str.insert(str.end(), arr.nalList.at(0).cbegin(), arr.nalList.at(0).cend()); + } + } +} + +uint16_t HevcDecoderConfigurationRecord::GetPicWidth() const +{ + // static const std::vector pWidth = {1, 2, 2, 1}; + std::vector pWidth; + pWidth.push_back(1); + pWidth.push_back(2); + pWidth.push_back(2); + pWidth.push_back(1); + uint16_t picWidthRet = m_picWidthSamples - pWidth.at(m_chromaFormat) * (m_confWinLeftOffset + m_confWinRightOffset); + return picWidthRet; +} + +uint16_t HevcDecoderConfigurationRecord::GetPicHeight() const +{ + // static const std::vector pHeight = {1, 2, 1, 1}; + std::vector pHeight; + pHeight.push_back(1); + pHeight.push_back(2); + pHeight.push_back(1); + pHeight.push_back(1); + uint16_t picHeightRet = m_picHeightSamples - pHeight.at(m_chromaFormat) * (m_confWinTopOffset + m_confWinBottomOffset); + return picHeightRet; +} + +uint16_t HevcDecoderConfigurationRecord::GetAvgFrameRate() const +{ + return m_avgFrameRate; +} + +std::uint8_t HevcDecoderConfigurationRecord::GetLengthSizeMinus1() const +{ + return m_lengthSizeMinus1; +} + +void HevcDecoderConfigurationRecord::GetConfigurationMap(ConfigurationMap &aMap) const +{ + std::vector sps; + std::vector pps; + std::vector vps; + GetOneParameterSet(sps, HevcNalDefs::SPS); + GetOneParameterSet(pps, HevcNalDefs::PPS); + GetOneParameterSet(vps, HevcNalDefs::VPS); + + aMap.insert({DecParam::HEVC_SPS, move(sps)}); + aMap.insert({DecParam::HEVC_PPS, move(pps)}); + aMap.insert({DecParam::HEVC_VPS, move(vps)}); +} + +HevcConfigurationAtom::HevcConfigurationAtom() + : Atom("hvcC") + , m_hevcConfig() +{ +} + +HevcConfigurationAtom::HevcConfigurationAtom(const HevcConfigurationAtom& atom) + : Atom(atom.GetType()) + , m_hevcConfig(atom.m_hevcConfig) +{ +} + +void HevcConfigurationAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + m_hevcConfig.WriteDecConfigRec(str); + UpdateSize(str); +} + +void HevcConfigurationAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + m_hevcConfig.ParseConfig(str); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/HevcConfigAtom.h b/src/isolib/atoms/HevcConfigAtom.h new file mode 100644 index 00000000..b6d96bc5 --- /dev/null +++ b/src/isolib/atoms/HevcConfigAtom.h @@ -0,0 +1,331 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: HevcConfigAtom.h +//! \brief: HEVC Configuration Atom class +//! \detail: 'hvcC' Atom implementation. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _HEVCCONFIGURATIONATOM_H_ +#define _HEVCCONFIGURATIONATOM_H_ + +#include "Atom.h" +#include "FormAllocator.h" +#include "DecConfigRecord.h" +#include + +VCD_MP4_BEGIN + +class Stream; + +enum class HevcNalDefs : std::uint8_t //!< hevc nal unit definition +{ + SLICE_TRAIL_N = 0, // 0 + SLICE_TRAIL_R, // 1 + + SLICE_TSA_N, // 2 + SLICE_TSA_R, // 3 + + SLICE_STSA_N, // 4 + SLICE_STSA_R, // 5 + + SLICE_RADL_N, // 6 + SLICE_RADL_R, // 7 + + SLICE_RASL_N, // 8 + SLICE_RASL_R, // 9 + + VCL_N10, + VCL_R11, + VCL_N12, + VCL_R13, + VCL_N14, + VCL_R15, + + SLICE_BLA_W_LP, // 16 + SLICE_BLA_W_RADL, // 17 + SLICE_BLA_N_LP, // 18 + SLICE_IDR_W_RADL, // 19 + SLICE_IDR_N_LP, // 20 + SLICE_CRA, // 21 + IRAP_VCL22, + IRAP_VCL23, + + VCL24, + VCL25, + VCL26, + VCL27, + VCL28, + VCL29, + VCL30, + VCL31, + + VPS, // 32 + SPS, // 33 + PPS, // 34 + ACCESS_UNIT_DELIMITER, // 35 + EOS, // 36 + EOB, // 37 + FILLER_DATA, // 38 + PREFIX_SEI, // 39 + SUFFIX_SEI, // 40 + NVCL41, + NVCL42, + NVCL43, + NVCL44, + NVCL45, + NVCL46, + NVCL47, + UNFORM48, + UNFORM49, + UNFORM50, + UNFORM51, + UNFORM52, + UNFORM53, + UNFORM54, + UNFORM55, + UNFORM56, + UNFORM57, + UNFORM58, + UNFORM59, + UNFORM60, + UNFORM61, + UNFORM62, + UNFORM63, + INVALID +}; + +class HevcDecoderConfigurationRecord : public DecoderConfigurationRecord +{ +public: + + //! + //! \brief Constructor + //! + HevcDecoderConfigurationRecord(); + + //! + //! \brief Destructor + //! + ~HevcDecoderConfigurationRecord() = default; + + //! + //! \brief Parse configuration information from a SPS NAL unit + //! + //! \param [in] const std::vector& + //! sps value + //! \param [in] float + //! frameRate + //! + //! \return void + //! + void ConfigSPS(const std::vector &sps, float frameRate); + + //! + //! \brief Add NAL unit to the NAL unit array + //! + //! \param [in] const std::vector& + //! sps value + //! \param [in] AvcNalDefs + //! nal unit type defs + //! \param [in] std::uint8_t + //! arrayCompleteness + //! + //! \return void + //! + void AddNalUnit(const std::vector &sps, HevcNalDefs nalUnitType, std::uint8_t arrayCompleteness); + + //! + //! \brief Write Decoder Configuration Record + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + void WriteDecConfigRec(Stream &str) const; + + //! + //! \brief Parse Decoder Configuration Record + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + void ParseConfig(Stream &str); + + //! + //! \brief get one parameters set + //! + //! \param [in] std::vector& + //! byte stream + //! \param [in] AvcNalDefs + //! nal unit type defs + //! + //! \return void + //! + void GetOneParameterSet(std::vector &byteStream, HevcNalDefs nalUnitType) const; + + //! + //! \brief Get Picture Width + //! + //! \return std::uint16_t + //! Picture Width + //! + std::uint16_t GetPicWidth() const; + + //! + //! \brief Get Picture Height + //! + //! \return std::uint16_t + //! Picture Height + //! + std::uint16_t GetPicHeight() const; + + //! + //! \brief Get Avg Frame Rate + //! + //! \return std::uint16_t + //! Avg Frame Rate + //! + std::uint16_t GetAvgFrameRate() const; + + //! + //! \brief Get Length Size Minus 1 + //! + //! \return std::uint8_t + //! LengthSizeMinus1 + //! + std::uint8_t GetLengthSizeMinus1() const; + + //! + //! \brief Parse Decoder Configuration Record + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + virtual void GetConfigurationMap(ConfigurationMap &aMap) const override; + +private: + + struct NALs //!< nal array + { + std::uint8_t arrayCompleteness = 0; + HevcNalDefs nalUnitType = HevcNalDefs::INVALID; + std::vector> nalList; + }; + + std::uint8_t m_configVersion; //!< configuration version + std::uint8_t m_globalProfileSpace; //!< global Profile Space + std::uint8_t m_globalTierFlag; //!< global Tier Flag + std::uint8_t m_globalProfileIdc; //!< global Profile Idc + std::uint32_t m_globalProfileCompatFlags; //!< global Profile Compat Flags + std::vector m_globalConstrainIdcFlags;//!< global Constrain Idc Flags + std::uint8_t m_globalLevelIdc; //!< global Level Idc + std::uint16_t m_minSpatialSegIdc; //!< min Spatial Seg Idc + std::uint8_t m_parallelismType; //!< parallelism Type + std::uint8_t m_chromaFormat; //!< chroma Format + std::uint16_t m_picWidthSamples; //!< picture Width Samples + std::uint16_t m_picHeightSamples; //!< picture Height Samples + std::uint16_t m_confWinLeftOffset; //!< conf Win Left Offset + std::uint16_t m_confWinRightOffset; //!< conf Win Right Offset + std::uint16_t m_confWinTopOffset; //!< conf Win Top Offset + std::uint16_t m_confWinBottomOffset; //!< conf Win Bottom Offset + std::uint8_t m_bitDepthLuma; //!< bit Depth Luma + std::uint8_t m_bitDepthChroma; //!< bit Depth Chroma + std::uint16_t m_avgFrameRate; //!< avg FrameRate + std::uint8_t m_constantFrameRate; //!< constant FrameRate + std::uint8_t m_numTemporalLayers; //!< num Temporal Layers + std::uint8_t m_temporalIdNested; //!< temporal Id Nested + std::uint8_t m_lengthSizeMinus1; //!< length Size Minus 1 + std::vector m_nalArray; //!< nal Array +}; + +class HevcConfigurationAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + HevcConfigurationAtom(); + HevcConfigurationAtom(const HevcConfigurationAtom& Atom); + + HevcConfigurationAtom& operator=(const HevcConfigurationAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~HevcConfigurationAtom() = default; + + //! + //! \brief Set and Get function for m_hevcConfig member + //! + //! \param [in] const HevcDecoderConfigurationRecord& + //! value to set + //! \param [in] m_hevcConfig + //! m_hevcConfig member in class + //! \param [in] Configuration + //! m_hevcConfig name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const HevcDecoderConfigurationRecord&, m_hevcConfig, Configuration, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + HevcDecoderConfigurationRecord m_hevcConfig; //!< HEVCConfigurationAtom field HEVCConfig +}; + +VCD_MP4_END; +#endif /* _HEVCCONFIGURATIONATOM_H_ */ diff --git a/src/isolib/atoms/HevcSampEntry.cpp b/src/isolib/atoms/HevcSampEntry.cpp new file mode 100644 index 00000000..ffc15f2c --- /dev/null +++ b/src/isolib/atoms/HevcSampEntry.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: HevcSampEntry.cpp +//! \brief: HevcSampEntry class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "HevcSampEntry.h" + + +VCD_MP4_BEGIN + +HevcSampleEntry::HevcSampleEntry() + : VisualSampleEntryAtom("hvc1", "HEVC Coding") + , m_hevcConfigurationAtom() + , m_isStereoscopic3DPresent(false) + , m_stereoscopic3DAtom() + , m_isSphericalVideoV2AtomPresent(false) + , m_sphericalVideoV2Atom() +{ +} + +HevcSampleEntry::HevcSampleEntry(const HevcSampleEntry& Atom) + : VisualSampleEntryAtom(Atom) + , m_hevcConfigurationAtom(Atom.m_hevcConfigurationAtom) + , m_isStereoscopic3DPresent(Atom.m_isStereoscopic3DPresent) + , m_stereoscopic3DAtom(Atom.m_stereoscopic3DAtom) + , m_isSphericalVideoV2AtomPresent(Atom.m_isSphericalVideoV2AtomPresent) + , m_sphericalVideoV2Atom(Atom.m_sphericalVideoV2Atom) +{ +} + +HevcConfigurationAtom& HevcSampleEntry::GetHevcConfigurationAtom() +{ + return m_hevcConfigurationAtom; +} + +void HevcSampleEntry::CreateStereoscopic3DAtom() +{ + m_isStereoscopic3DPresent = true; +} + +void HevcSampleEntry::CreateSphericalVideoV2Atom() +{ + m_isSphericalVideoV2AtomPresent = true; +} + +const Stereoscopic3D* HevcSampleEntry::GetStereoscopic3DAtom() const +{ + return (m_isStereoscopic3DPresent ? &m_stereoscopic3DAtom : nullptr); +} + +const SphericalVideoV2Atom* HevcSampleEntry::GetSphericalVideoV2Atom() const +{ + return (m_isSphericalVideoV2AtomPresent ? &m_sphericalVideoV2Atom : nullptr); +} + +void HevcSampleEntry::ToStream(Stream& str) +{ + VisualSampleEntryAtom::ToStream(str); + + m_hevcConfigurationAtom.ToStream(str); + + if (m_isStereoscopic3DPresent) + { + m_stereoscopic3DAtom.ToStream(str); + } + + if (m_isSphericalVideoV2AtomPresent) + { + m_sphericalVideoV2Atom.ToStream(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void HevcSampleEntry::FromStream(Stream& str) +{ + + VisualSampleEntryAtom::FromStream(str); + + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + + // Handle this Atom based on the type + if (AtomType == "hvcC") + { + m_hevcConfigurationAtom.FromStream(subStream); + } + else if (AtomType == "st3d") + { + m_stereoscopic3DAtom.FromStream(subStream); + m_isStereoscopic3DPresent = true; + } + else if (AtomType == "sv3d") + { + m_sphericalVideoV2Atom.FromStream(subStream); + m_isSphericalVideoV2AtomPresent = true; + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping unknown Atom of type '%s' inside HevcSampleEntry\n", type); + } + } +} + +HevcSampleEntry* HevcSampleEntry::Clone() const +{ + return (new HevcSampleEntry(*this)); +} + +const Atom* HevcSampleEntry::GetConfigurationAtom() const +{ + return &m_hevcConfigurationAtom; +} + +const DecoderConfigurationRecord* HevcSampleEntry::GetConfigurationRecord() const +{ + return &m_hevcConfigurationAtom.GetConfiguration(); +} + +VCD_MP4_END diff --git a/src/isolib/atoms/HevcSampEntry.h b/src/isolib/atoms/HevcSampEntry.h new file mode 100644 index 00000000..93406816 --- /dev/null +++ b/src/isolib/atoms/HevcSampEntry.h @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: HevcSampEntry.h +//! \brief: HEVC Sample Entry class. +//! \detail: 'hvc1' Atom implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _HEVCSAMPLEENTRY_H_ +#define _HEVCSAMPLEENTRY_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "BasicVideoAtom.h" +#include "HevcConfigAtom.h" +#include "VisualSampEntryAtom.h" + +VCD_MP4_BEGIN + +class HevcSampleEntry : public VisualSampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + HevcSampleEntry(); + HevcSampleEntry(const HevcSampleEntry& other); + + HevcSampleEntry& operator=(const HevcSampleEntry&) = default; + + //! + //! \brief Destructor + //! + virtual ~HevcSampleEntry() = default; + + //! + //! \brief Get Configuration Record + //! + //! \return HevcConfigurationAtom& + //! DecoderConfigurationRecord value + //! + HevcConfigurationAtom& GetHevcConfigurationAtom(); + + //! + //! \brief Create Stereoscopic3D Atom + //! + //! \return void + //! + void CreateStereoscopic3DAtom(); + + //! + //! \brief Create SphericalVideo Atom + //! + //! \return void + //! + void CreateSphericalVideoV2Atom(); + + //! + //! \brief Get Stereoscopic3D Atom + //! + //! \return const Stereoscopic3D* + //! Stereoscopic3D Atom + //! + virtual const Stereoscopic3D* GetStereoscopic3DAtom() const override; + + //! + //! \brief Get SphericalVideo Atom + //! + //! \return const SphericalVideoV2Atom* + //! Stereoscopic3D Atom + //! + virtual const SphericalVideoV2Atom* GetSphericalVideoV2Atom() const override; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) override; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) override; + + //! + //! \brief Get Copy of hevcSampleEntry + //! + //! \return hevcSampleEntry* + //! hevcSampleEntry Atom + //! + virtual HevcSampleEntry* Clone() const override; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: + HevcConfigurationAtom m_hevcConfigurationAtom; //!< hevc configuration atom + bool m_isStereoscopic3DPresent; //!< is Stereoscopic3D Present + Stereoscopic3D m_stereoscopic3DAtom; //!< stereoscopic3D Atom + bool m_isSphericalVideoV2AtomPresent; //!< is SphericalVideoV2Atom Present + SphericalVideoV2Atom m_sphericalVideoV2Atom; //!< spherical Video V2 Atom +}; + +VCD_MP4_END; +#endif /* _HEVCSAMPLEENTRY_H_ */ diff --git a/src/isolib/atoms/InitViewOrientationSampEntry.cpp b/src/isolib/atoms/InitViewOrientationSampEntry.cpp new file mode 100644 index 00000000..c3ab553a --- /dev/null +++ b/src/isolib/atoms/InitViewOrientationSampEntry.cpp @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: InitViewOrientationSampEntry.cpp +//! \brief: InitViewOrientationSampEntry class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "InitViewOrientationSampEntry.h" + +VCD_MP4_BEGIN + +InitViewOrient::InitViewOrient() + : SphereRegionSampleEntryAtom("invo") +{ + auto& config = GetSphereRegionConfig(); + config.SetShapeMode(SphereRegionConfigAtom::ShapeMode::FourGreatCircles); + config.SetDynamicRangeFlag(false); + config.SetStaticAzimuthRange(0); + config.SetStaticElevationRange(0); +} + +InitViewOrient* InitViewOrient::Clone() const +{ + return (new InitViewOrient(*this)); +} + +void InitViewOrient::ToStream(Stream& str) +{ + SphereRegionSampleEntryAtom::ToStream(str); + return; +} + +void InitViewOrient::FromStream(Stream& str) +{ + SphereRegionSampleEntryAtom::FromStream(str); + return; +} + +InitViewOrient::InitViewSample::InitViewSample() + : SphereRegionSample() +{ + regions.push_back(SphereRegion()); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/InitViewOrientationSampEntry.h b/src/isolib/atoms/InitViewOrientationSampEntry.h new file mode 100644 index 00000000..c188081b --- /dev/null +++ b/src/isolib/atoms/InitViewOrientationSampEntry.h @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: InitViewOrientationSampEntry.h +//! \brief: InitViewOrient sample description entry. +//! \detail: Used for Initial Viewing Orientation sample description entry +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _INITIALVIEWINGORIENTETAIONSAMPLEENTRYATOM_H_ +#define _INITIALVIEWINGORIENTETAIONSAMPLEENTRYATOM_H_ + +#include "Stream.h" +#include "CommonTypes.h" +#include "FormAllocator.h" +#include "SphereRegionSampEntryAtom.h" + +VCD_MP4_BEGIN + +class InitViewOrient : public SphereRegionSampleEntryAtom +{ +public: + struct InitViewSample : SphereRegionSample + { + bool refreshFlag = false; + + InitViewSample(); + }; + + //! + //! \brief Constructor + //! + InitViewOrient(); + + //! + //! \brief Destructor + //! + virtual ~InitViewOrient() = default; + + //! + //! \brief Get Copy of InitViewOrient + //! + //! \return InitViewOrient* + //! InitViewOrient Atom + //! + virtual InitViewOrient* Clone() const override; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: +}; + +VCD_MP4_END; +#endif /* _INITIALVIEWINGORIENTETAIONSAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/ItemDataAtom.cpp b/src/isolib/atoms/ItemDataAtom.cpp new file mode 100644 index 00000000..46355f5e --- /dev/null +++ b/src/isolib/atoms/ItemDataAtom.cpp @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemDataAtom.cpp +//! \brief: ItemDataAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "ItemDataAtom.h" + +#include + +VCD_MP4_BEGIN + +ItemDataAtom::ItemDataAtom() + : Atom("idat") + , m_data() +{ +} + +bool ItemDataAtom::Read(std::vector& destination, const std::uint64_t offset, const std::uint64_t length) const +{ + if ((offset + length) > m_data.size()) + { + return false; + } + + destination.insert(destination.end(), m_data.cbegin() + static_cast(offset), + m_data.cbegin() + static_cast(offset + length)); + return true; +} + +bool ItemDataAtom::Read(uint8_t* destination, const std::uint64_t offset, const std::uint64_t length) const +{ + if ((offset + length) > m_data.size() || destination == nullptr) + { + return false; + } + + std::memcpy(destination, m_data.data() + static_cast(offset), length); + return true; +} + +std::uint64_t ItemDataAtom::AddData(const std::vector& data) +{ + const std::uint64_t offset = m_data.size(); + m_data.insert(m_data.end(), data.cbegin(), data.cend()); + + return offset; +} + +void ItemDataAtom::ToStream(Stream& str) +{ + // Do not write an empty Atom at all + if (m_data.size() == 0) + { + return; + } + + WriteAtomHeader(str); + str.WriteArray(m_data, m_data.size()); + UpdateSize(str); +} + +void ItemDataAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + str.ReadArray(m_data, str.BytesRemain()); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ItemDataAtom.h b/src/isolib/atoms/ItemDataAtom.h new file mode 100644 index 00000000..c996d42d --- /dev/null +++ b/src/isolib/atoms/ItemDataAtom.h @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemDataAtom.h +//! \brief: The ItemDataAtom 'idat' Atom +//! \detail: Contains atom data +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _ITEMDATAATOM_H_ +#define _ITEMDATAATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class ItemDataAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + ItemDataAtom(); + + //! + //! \brief Destructor + //! + virtual ~ItemDataAtom() = default; + + //! + //! \brief Read data from atom + //! + //! \param [in] std::vector& + //! destination + //! \param [in] std::uint64_t + //! offset + //! \param [in] std::uint64_t + //! length + //! + //! \return bool + //! return data is copied successfully or not. + //! + bool Read(std::vector& destination, std::uint64_t offset, std::uint64_t length) const; + + //! + //! \brief Read data from atom + //! + //! \param [in] uint8_t* + //! destination + //! \param [in] const std::uint64_t + //! offset + //! \param [in] const std::uint64_t + //! length + //! + //! \return bool + //! return data is copied successfully or not. + //! + bool Read(uint8_t* destination, const std::uint64_t offset, const std::uint64_t length) const; + + //! + //! \brief Add item data to the atom + //! + //! \param [in] const std::vector& + //! data to be added + //! + //! \return std::uint64_t + //! offset of the data + //! + std::uint64_t AddData(const std::vector& data); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector m_data; //!< Data of stored items. +}; + +VCD_MP4_END; +#endif /* _ITEMDATAATOM_H_ */ \ No newline at end of file diff --git a/src/isolib/atoms/ItemInfoAtom.cpp b/src/isolib/atoms/ItemInfoAtom.cpp new file mode 100644 index 00000000..e490916a --- /dev/null +++ b/src/isolib/atoms/ItemInfoAtom.cpp @@ -0,0 +1,380 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemInfoAtom.cpp +//! \brief: ItemInfoAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "ItemInfoAtom.h" + +#include + +VCD_MP4_BEGIN + +ItemInfoAtom::ItemInfoAtom() + : ItemInfoAtom(0) +{ +} + +ItemInfoAtom::ItemInfoAtom(const uint8_t version) + : FullAtom("iinf", version, 0) + , m_itemInfoList() +{ +} + +uint32_t ItemInfoAtom::GetEntryCount() const +{ + return static_cast(m_itemInfoList.size()); +} + +std::vector ItemInfoAtom::GetItemIds() const +{ + std::vector itemIds; + for (const auto& entry : m_itemInfoList) + { + itemIds.push_back(entry.GetItemID()); + } + return itemIds; +} + +void ItemInfoAtom::AddItemInfoEntry(const ItemInfoEntry& infoEntry) +{ + m_itemInfoList.push_back(infoEntry); +} + +const ItemInfoEntry& ItemInfoAtom::GetItemInfoEntry(const std::uint32_t idx) const +{ + return m_itemInfoList.at(idx); +} + +ItemInfoEntry ItemInfoAtom::GetItemById(const uint32_t itemId) const +{ + for (const auto& item : m_itemInfoList) + { + if (item.GetItemID() == itemId) + { + return item; + } + } + ISO_LOG(LOG_ERROR, "Requested ItemInfoEntry not found.\n"); + throw Exception(); +} + +void ItemInfoAtom::Clear() +{ + m_itemInfoList.clear(); +} + +void ItemInfoAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + if (GetVersion() == 0) + { + str.Write16(static_cast(m_itemInfoList.size())); + } + else + { + str.Write32(static_cast(m_itemInfoList.size())); + } + + for (auto& entry : m_itemInfoList) + { + entry.ToStream(str); + } + + UpdateSize(str); +} + +void ItemInfoAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + size_t cnt = 0; + uint8_t version = GetVersion(); + if (version == 0) + { + cnt = str.Read16(); + } + else + { + cnt = str.Read32(); + } + for (size_t i = 0; i < cnt; ++i) + { + ItemInfoEntry infoEntry; + infoEntry.FromStream(str); + AddItemInfoEntry(infoEntry); + } +} + +unsigned int ItemInfoAtom::CountNumberOfItems(FourCCInt itemType) +{ + unsigned int cnt = 0; + for (const auto& entry : m_itemInfoList) + { + FourCCInt type = entry.GetItemType(); + if (type == itemType) + { + ++cnt; + } + } + return cnt; +} + +// return item and its index for the specified itemType and itemID +ItemInfoEntry* ItemInfoAtom::FindItemWithTypeAndID(FourCCInt type, const unsigned int itemID, unsigned int& index) +{ + ItemInfoEntry* entry = nullptr; + unsigned int idx = 0; + + for (auto i = m_itemInfoList.begin(); i != m_itemInfoList.end(); ++i) + { + if (i->GetItemType() == type) + { + if (i->GetItemID() == itemID) + { + entry = &(*i); + index = idx; + break; + } + else + { + ++idx; + } + } + } + return entry; +} + +ItemInfoEntry::ItemInfoEntry() + : FullAtom("infe", 0, 0) + , m_itemID(0) + , m_itemProtectionIndex(0) + , m_itemName() + , m_contentType() + , m_contentEncoding() + , m_extensionType() + , m_itemType() + , m_itemUriType() +{ +} + +ItemInfoEntry::~ItemInfoEntry() +{ +} + +void ItemInfoEntry::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + if (GetVersion() == 0 || GetVersion() == 1) + { + str.Write16(static_cast(m_itemID)); + str.Write16(m_itemProtectionIndex); + str.WriteZeroEndString(m_itemName); + str.WriteZeroEndString(m_contentType); + str.WriteZeroEndString(m_contentEncoding); + } + if (GetVersion() == 1) + { + str.WriteString(m_extensionType); + m_itemInfoExtension->ToStream(str); + } + if (GetVersion() >= 2) + { + if (GetVersion() == 2) + { + str.Write16(static_cast(m_itemID)); + } + else if (GetVersion() == 3) + { + str.Write32(m_itemID); + } + str.Write16(m_itemProtectionIndex); + str.Write32(m_itemType.GetUInt32()); + str.WriteZeroEndString(m_itemName); + if (m_itemType == "mime") + { + str.WriteZeroEndString(m_contentType); + str.WriteZeroEndString(m_contentEncoding); + } + else if (m_itemType == "uri ") + { + str.WriteZeroEndString(m_itemUriType); + } + } + + UpdateSize(str); +} + +void FDItemInfoExtension::ToStream(Stream& str) +{ + str.WriteZeroEndString(m_contentLocation); + str.WriteZeroEndString(m_contentMD5); + str.Write32((uint32_t)((m_contentLength >> 32) & 0xffffffff)); + str.Write32((uint32_t)(m_contentLength & 0xffffffff)); + str.Write32((uint32_t)((m_transferLength >> 32) & 0xffffffff)); + str.Write32((uint32_t)(m_transferLength & 0xffffffff)); + str.Write8(m_entryCount); + for (unsigned int i = 0; i < m_entryCount; i++) + { + str.Write32(m_groupID.at(i)); + } +} + +void ItemInfoEntry::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + if (GetVersion() == 0 || GetVersion() == 1) + { + m_itemID = str.Read16(); + m_itemProtectionIndex = str.Read16(); + str.ReadZeroEndString(m_itemName); + str.ReadZeroEndString(m_contentType); + if (str.BytesRemain() > 0) // This is an optional field + { + str.ReadZeroEndString(m_contentEncoding); + } + } + if (GetVersion() == 1) + { + if (str.BytesRemain() > 0) // This is an optional field + { + str.ReadStringWithLen(m_extensionType, 4); + } + if (str.BytesRemain() > 0) // This is an optional field + { + FDItemInfoExtension* itemInfoExt = new FDItemInfoExtension(); + m_itemInfoExtension.reset(itemInfoExt); + itemInfoExt->FromStream(str); + } + } + if (GetVersion() >= 2) + { + if (GetVersion() == 2) + { + m_itemID = str.Read16(); + } + else if (GetVersion() == 3) + { + m_itemID = str.Read32(); + } + m_itemProtectionIndex = str.Read16(); + m_itemType = str.Read32(); + str.ReadZeroEndString(m_itemName); + if (m_itemType == "mime") + { + str.ReadZeroEndString(m_contentType); + if (str.BytesRemain() > 0) // This is an optional field + { + str.ReadZeroEndString(m_contentEncoding); + } + } + else if (m_itemType == "uri ") + { + str.ReadZeroEndString(m_itemUriType); + } + } +} + +void FDItemInfoExtension::FromStream(Stream& str) +{ + str.ReadZeroEndString(m_contentLocation); + str.ReadZeroEndString(m_contentMD5); + m_contentLength = ((uint64_t) str.Read32()) << 32; + m_contentLength += str.Read32(); + m_transferLength = ((uint64_t) str.Read32()) << 32; + m_transferLength += str.Read32(); + m_entryCount = str.Read8(); + for (unsigned int i = 0; i < m_entryCount; i++) + { + m_groupID.at(i) = str.Read32(); + } +} + +ItemInfoEntry* ItemInfoAtom::GetItemsNumber(FourCCInt itemType, const unsigned int index) +{ + ItemInfoEntry* entry = nullptr; + unsigned int currIndex = 0; + + for (auto i = m_itemInfoList.begin(); i != m_itemInfoList.end(); ++i) + { + if (i->GetItemType() == itemType) + { + if (index == currIndex) + { + entry = &(*i); + break; + } + else + { + ++currIndex; + } + } + } + + return entry; +} + +std::vector ItemInfoAtom::GetItemsByType(FourCCInt itemType) const +{ + std::vector items; + for (const auto& i : m_itemInfoList) + { + if (i.GetItemType() == itemType) + { + items.push_back(i); + } + } + return items; +} + +FDItemInfoExtension::FDItemInfoExtension() + : m_contentLocation() + , m_contentMD5() + , m_contentLength(0) + , m_transferLength(0) + , m_entryCount(0) + , m_groupID(256, 0) +{ +} + +void FDItemInfoExtension::SetGroupID(const std::uint32_t idx, const uint32_t id) +{ + m_groupID.at(idx) = id; +} + +uint32_t FDItemInfoExtension::GetGroupID(const std::uint32_t idx) +{ + return m_groupID.at(idx); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ItemInfoAtom.h b/src/isolib/atoms/ItemInfoAtom.h new file mode 100644 index 00000000..4c499853 --- /dev/null +++ b/src/isolib/atoms/ItemInfoAtom.h @@ -0,0 +1,552 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemInfoAtom.h +//! \brief: Item Information Atom class. +//! \detail: 'iinf' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _ITEMINFOATOM_H_ +#define _ITEMINFOATOM_H_ + +#include +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class ItemInfoEntry; +class ItemInfoExtension; + +class ItemInfoAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ItemInfoAtom(); + ItemInfoAtom(uint8_t version); + + //! + //! \brief Destructor + //! + virtual ~ItemInfoAtom() = default; + + //! + //! \brief Clear contents + //! + //! \return void + //! + void Clear(); + + //! + //! \brief Get Entry Count + //! + //! \return std::uint32_t + //! number of entries + //! + std::uint32_t GetEntryCount() const; + + //! + //! \brief Get Item Ids + //! + //! \return std::vector + //! All Item IDs in this ItemInfoBox + //! + std::vector GetItemIds() const; + + //! + //! \brief Add Item Info Entry + //! + //! \param [in] const ItemInfoEntry& + //! infoEntry + //! + //! \return void + //! + void AddItemInfoEntry(const ItemInfoEntry& infoEntry); + + //! + //! \brief Get Item Info Entry + //! + //! \param [in] const std::uint32_t + //! index + //! + //! \return const ItemInfoEntry& + //! ItemInformationEntry + //! + const ItemInfoEntry& GetItemInfoEntry(const std::uint32_t idx) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Get Items Number + //! + //! \param [in] FourCCInt + //! item Type + //! \param [in] unsigned int + //! index + //! + //! \return ItemInfoEntry* + //! ItemInfoEntry + //! + ItemInfoEntry* GetItemsNumber(FourCCInt itemType, unsigned int index = 0); + + //! + //! \brief Find Item With Type And ID + //! + //! \param [in] FourCCInt + //! item Type + //! \param [in] unsigned int + //! ID + //! \param [in] unsigned int& + //! index + //! + //! \return ItemInfoEntry* + //! ItemInfoEntry + //! + ItemInfoEntry* FindItemWithTypeAndID(FourCCInt itemType, unsigned int itemID, unsigned int& index); + + //! + //! \brief Get the number of items + //! + //! \param [in] FourCCInt + //! item Type + //! + //! \return unsigned int + //! number of items + //! + unsigned int CountNumberOfItems(FourCCInt itemType); + + //! + //! \brief Get Items By Type + //! + //! \param [in] FourCCInt + //! item Type + //! + //! \return std::vector + //! vector of items + //! + std::vector GetItemsByType(FourCCInt itemType) const; + + //! + //! \brief Get Items By Id + //! + //! \param [in] uint32_t + //! item id + //! + //! \return ItemInfoEntry + //! ItemInfoEntry + //! + ItemInfoEntry GetItemById(uint32_t itemId) const; + +private: + std::vector m_itemInfoList; //!< std::vector of the ItemInfoEntry Atoms +}; + +class ItemInfoEntry : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ItemInfoEntry(); + + //! + //! \brief Destructor + //! + virtual ~ItemInfoEntry(); + + ItemInfoEntry(const ItemInfoEntry& itemInfoEntry) = default; + ItemInfoEntry& operator=(const ItemInfoEntry&) = default; + + //! + //! \brief Set and Get function for m_itemID member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_itemID + //! m_itemID member in class + //! \param [in] ItemID + //! m_itemID name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_itemID, ItemID, const); + + //! + //! \brief Set and Get function for m_itemProtectionIndex member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_itemProtectionIndex + //! m_itemProtectionIndex member in class + //! \param [in] ItemProtectionIndex + //! m_itemProtectionIndex name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_itemProtectionIndex, ItemProtectionIndex, const); + + //! + //! \brief Set and Get function for m_itemName member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_itemName + //! m_itemName member in class + //! \param [in] ItemName + //! m_itemName name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_itemName, ItemName, const); + + //! + //! \brief Set and Get function for m_contentType member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_contentType + //! m_contentType member in class + //! \param [in] ContentType + //! m_contentType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_contentType, ContentType, const); + + //! + //! \brief Set and Get function for m_contentEncoding member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_contentEncoding + //! m_contentEncoding member in class + //! \param [in] ContentEncoding + //! m_contentEncoding name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_contentEncoding, ContentEncoding, const); + + //! + //! \brief Set and Get function for m_extensionType member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_extensionType + //! m_extensionType member in class + //! \param [in] ExtensionType + //! m_extensionType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_extensionType, ExtensionType, const); + + //! + //! \brief Set and Get function for m_itemType member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_itemType + //! m_itemType member in class + //! \param [in] ItemType + //! m_itemType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_itemType, ItemType, const); + + //! + //! \brief Set and Get function for m_itemUriType member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_itemUriType + //! m_itemUriType member in class + //! \param [in] ItemUriType + //! m_itemUriType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_itemUriType, ItemUriType, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint32_t m_itemID; //!< ID of the item + std::uint16_t m_itemProtectionIndex; //!< Item protection index + std::string m_itemName; //!< Item name + std::string m_contentType; //!< Content type + std::string m_contentEncoding; //!< Content encoding + + std::string m_extensionType; //!< The extension type + std::shared_ptr m_itemInfoExtension; //!< Item info extension + + FourCCInt m_itemType; //!< Item type + std::string m_itemUriType; //!< Item UIR type +}; + +class ItemInfoExtension +{ +public: + + //! + //! \brief Constructor + //! + ItemInfoExtension() = default; + + //! + //! \brief Destructor + //! + virtual ~ItemInfoExtension() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) = 0; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) = 0; +}; + +class FDItemInfoExtension : public ItemInfoExtension +{ +public: + + //! + //! \brief Constructor + //! + FDItemInfoExtension(); + + //! + //! \brief Destructor + //! + virtual ~FDItemInfoExtension() = default; + + //! + //! \brief Set and Get function for m_contentLocation member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_contentLocation + //! m_contentLocation member in class + //! \param [in] ContentLocation + //! m_contentLocation name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_contentLocation, ContentLocation, ); + + //! + //! \brief Set and Get function for m_contentMD5 member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_contentMD5 + //! m_contentMD5 member in class + //! \param [in] ContentMD5 + //! m_contentMD5 name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_contentMD5, ContentMD5, ); + + //! + //! \brief Set and Get function for m_contentLength member + //! + //! \param [in] uint64_t + //! value to set + //! \param [in] m_contentLength + //! m_contentLength member in class + //! \param [in] ContentLength + //! m_contentLength name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint64_t, m_contentLength, ContentLength, ); + + //! + //! \brief Set and Get function for m_transferLength member + //! + //! \param [in] uint64_t + //! value to set + //! \param [in] m_transferLength + //! m_transferLength member in class + //! \param [in] TranferLength + //! m_transferLength name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint64_t, m_transferLength, TranferLength, ); + + //! + //! \brief Set and Get function for m_entryCount member + //! + //! \param [in] uint8_t + //! value to set + //! \param [in] m_entryCount + //! m_entryCount member in class + //! \param [in] NumGroupID + //! m_entryCount name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint8_t, m_entryCount, NumGroupID, ); + + //! + //! \brief Set Group ID + //! + //! \param [in] const std::uint32_t + //! idx value + //! \param [in] const std::uint32_t + //! id value + //! + //! \return void + //! + void SetGroupID(const std::uint32_t idx, const uint32_t id); + + //! + //! \brief Get Group ID + //! + //! \param [in] const std::uint32_t + //! idx value + //! + //! \return uint32_t + //! group id + //! + uint32_t GetGroupID(const std::uint32_t idx); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::string m_contentLocation; //!< Content location + std::string m_contentMD5; //!< MD5 value + uint64_t m_contentLength; //!< Content length + uint64_t m_transferLength; //!< Transfer length + uint8_t m_entryCount; //!< Entry count + std::vector m_groupID; //!< std::vector of Group ID values +}; + +VCD_MP4_END; +#endif /* _ITEMINFOATOM_H_ */ diff --git a/src/isolib/atoms/ItemLocationAtom.cpp b/src/isolib/atoms/ItemLocationAtom.cpp new file mode 100644 index 00000000..1702c143 --- /dev/null +++ b/src/isolib/atoms/ItemLocationAtom.cpp @@ -0,0 +1,289 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemLocationAtom.cpp +//! \brief: ItemLocationAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "ItemLocationAtom.h" + +#include +#include + +VCD_MP4_BEGIN + +ItemLocation::ItemLocation() + : m_itemID(0) + , m_constructionMethod(ConstructType::FILE_OFFSET) + , m_dataReferenceIndex(0) + , m_baseOffset(0) + , m_extentList() +{ +} + +std::uint16_t ItemLocation::GetExtentCount() const +{ + return static_cast(m_extentList.size()); +} + +void ItemLocation::AddExtent(const ExtentParams& extent) +{ + m_extentList.push_back(extent); +} + +const ExtentList& ItemLocation::GetExtentList() const +{ + return m_extentList; +} + +ItemLocationAtom::ItemLocationAtom() + : FullAtom("iloc", 0, 0) + , m_offSetSize(4) + , m_lengthSize(4) + , m_baseOffSetSize(4) + , m_indexSize(0) + , m_itemLocations() +{ +} + +std::uint32_t ItemLocationAtom::GetItemCount() const +{ + return static_cast(m_itemLocations.size()); +} + +void ItemLocationAtom::AddLocation(const ItemLocation& itemLoc) +{ + // Use version to 1 if needed + if (itemLoc.GetConstructType() != ItemLocation::ConstructType::FILE_OFFSET) + { + SetVersion(1); + } + m_itemLocations.push_back(itemLoc); +} + +void ItemLocationAtom::AddExtent(const std::uint32_t itemId, const ExtentParams& extent) +{ + const auto iter = findItem(itemId); + if (iter == m_itemLocations.end()) + { + ISO_LOG(LOG_ERROR, "ItemLocationAtom::AddExtent() invalid item id\n"); + throw Exception(); + } + + iter->AddExtent(extent); +} + +ItemLocationVector& ItemLocationAtom::GetItemLocations() +{ + return m_itemLocations; +} + +bool ItemLocationAtom::HasItemIdEntry(std::uint32_t itemId) const +{ + if (findItem(itemId) != m_itemLocations.cend()) + { + return true; + } + return false; +} + +bool ItemLocationAtom::SetItemDataReferenceIndex(const std::uint32_t itemId, const std::uint16_t dataReferenceIndex) +{ + const auto iter = findItem(itemId); + if (iter != m_itemLocations.end()) + { + iter->SetDataReferenceIndex(dataReferenceIndex); + return true; + } + + return false; +} + +void ItemLocationAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write1(m_offSetSize, 4); + str.Write1(m_lengthSize, 4); + str.Write1(m_baseOffSetSize, 4); + if ((GetVersion() == 1) || (GetVersion() == 2)) + { + str.Write1(m_indexSize, 4); + } + else + { + str.Write1(0, 4); // reserved = 0 + } + if (GetVersion() < 2) + { + str.Write16(static_cast(m_itemLocations.size())); + } + else if (GetVersion() == 2) + { + str.Write32(static_cast(m_itemLocations.size())); + } + + for (const auto& itemLoc : m_itemLocations) + { + if (GetVersion() < 2) + { + str.Write16(static_cast(itemLoc.GetItemID())); + } + else if (GetVersion() == 2) + { + str.Write32(itemLoc.GetItemID()); + } + + if ((GetVersion() == 1) || (GetVersion() == 2)) + { + str.Write1(0, 12); // reserved = 0 + str.Write1(static_cast(itemLoc.GetConstructType()), 4); + } + str.Write16(itemLoc.GetDataReferenceIndex()); + str.Write1(itemLoc.GetBaseOffset(), static_cast(m_baseOffSetSize * 8)); + str.Write16(itemLoc.GetExtentCount()); + + const ExtentList& extents = itemLoc.GetExtentList(); + for (const auto& locExt : extents) + { + if (((GetVersion() == 1) || (GetVersion() == 2)) && (m_indexSize > 0)) + { + str.Write1(locExt.m_extentIndex, static_cast(m_indexSize * 8)); + } + str.Write1(locExt.m_extentOffset, static_cast(m_offSetSize * 8)); + str.Write1(locExt.m_extentLen, static_cast(m_lengthSize * 8)); + } + } + + UpdateSize(str); +} + +void ItemLocationAtom::FromStream(Stream& str) +{ + unsigned int itemCount = 0; + + ParseFullAtomHeader(str); + + m_offSetSize = static_cast(str.Read1(4)); + m_lengthSize = static_cast(str.Read1(4)); + m_baseOffSetSize = static_cast(str.Read1(4)); + if ((GetVersion() == 1) || (GetVersion() == 2)) + { + m_indexSize = static_cast(str.Read1(4)); + } + else + { + str.Read1(4); // reserved = 0 + } + + if (GetVersion() < 2) + { + itemCount = str.Read16(); + } + else if (GetVersion() == 2) + { + itemCount = str.Read32(); + } + + for (unsigned int i = 0; i < itemCount; i++) + { + ItemLocation itemLoc; + if (GetVersion() < 2) + { + itemLoc.SetItemID(str.Read16()); + } + else if (GetVersion() == 2) + { + itemLoc.SetItemID(str.Read32()); + } + + if ((GetVersion() == 1) || (GetVersion() == 2)) + { + str.Read1(12); // reserved = 0 + itemLoc.SetConstructType(static_cast(str.Read1(4))); + } + itemLoc.SetDataReferenceIndex(str.Read16()); + itemLoc.SetBaseOffset(str.Read1(static_cast(m_baseOffSetSize * 8))); + const unsigned int extentCount = str.Read16(); + for (unsigned int j = 0; j < extentCount; j++) + { + ExtentParams locExt; + if (((GetVersion() == 1) || (GetVersion() == 2)) && (m_indexSize > 0)) + { + locExt.m_extentIndex = str.Read1(static_cast(m_indexSize * 8)); + } + locExt.m_extentOffset = str.Read1(static_cast(m_offSetSize * 8)); + locExt.m_extentLen = str.Read1(static_cast(m_lengthSize * 8)); + itemLoc.AddExtent(locExt); + } + AddLocation(itemLoc); + } +} + +const ItemLocation& ItemLocationAtom::GetItemLocationForID(const unsigned int itemID) const +{ + const auto iter = findItem(itemID); + if (iter != m_itemLocations.cend()) + { + return *iter; + } + ISO_LOG(LOG_ERROR, "ItemLocationAtom::GetItemLocationForID: invalid item ID\n"); + throw Exception(); +} + +const ExtentParams& ItemLocation::GetExtent(const unsigned int i) const +{ + if (i >= m_extentList.size()) + { + ISO_LOG(LOG_ERROR, "ItemLocationAtom::GetExtent: invalid extent ID\n"); + throw Exception(); + } + else + { + return m_extentList.at(i); + } +} + +ItemLocationVector::const_iterator ItemLocationAtom::findItem(const std::uint32_t itemId) const +{ + ItemLocationVector::const_iterator iter = + std::find_if(m_itemLocations.cbegin(), m_itemLocations.cend(), + [itemId](const ItemLocation& itemLocation) { return itemLocation.GetItemID() == itemId; }); + return iter; +} + +ItemLocationVector::iterator ItemLocationAtom::findItem(const std::uint32_t itemId) +{ + ItemLocationVector::iterator iter = + std::find_if(m_itemLocations.begin(), m_itemLocations.end(), + [itemId](const ItemLocation& itemLocation) { return itemLocation.GetItemID() == itemId; }); + return iter; +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ItemLocationAtom.h b/src/isolib/atoms/ItemLocationAtom.h new file mode 100644 index 00000000..b916c607 --- /dev/null +++ b/src/isolib/atoms/ItemLocationAtom.h @@ -0,0 +1,382 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemLocationAtom.h +//! \brief: Item Location atom definition. +//! \detail: Contains location item data structure +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _ITEMLOCATIONATOM_H_ +#define _ITEMLOCATIONATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include + +VCD_MP4_BEGIN + +struct ExtentParams //!< extent parameters +{ + std::uint64_t m_extentIndex = 0; + std::uint64_t m_extentOffset = 0; + std::uint64_t m_extentLen = 0; +}; + +typedef std::vector ExtentList; //!< std::vector of item location extents + +class ItemLocation +{ +public: + + //! + //! \brief Constructor + //! + ItemLocation(); + + //! + //! \brief Destructor + //! + ~ItemLocation() = default; + + enum class ConstructType //!< construction type + { + FILE_OFFSET = 0, + IDAT_OFFSET = 1, + ITEM_OFFSET = 2 + }; + + //! + //! \brief Set and Get function for m_itemID member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_itemID + //! m_itemID member in class + //! \param [in] ItemID + //! m_itemID name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_itemID, ItemID, const); + + //! + //! \brief Set and Get function for m_constructionMethod member + //! + //! \param [in] ConstructType + //! value to set + //! \param [in] m_constructionMethod + //! m_constructionMethod member in class + //! \param [in] ConstructType + //! m_constructionMethod name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(ConstructType, m_constructionMethod, ConstructType, const); + + //! + //! \brief Set and Get function for m_dataReferenceIndex member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_dataReferenceIndex + //! m_dataReferenceIndex member in class + //! \param [in] DataReferenceIndex + //! m_dataReferenceIndex name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_dataReferenceIndex, DataReferenceIndex, const); + + //! + //! \brief Set and Get function for m_baseOffset member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_baseOffset + //! m_baseOffset member in class + //! \param [in] BaseOffset + //! m_baseOffset name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_baseOffset, BaseOffset, const); + + //! + //! \brief Get Extent Count + //! + //! \return std::uint16_t + //! Extent Count + //! + std::uint16_t GetExtentCount() const; + + //! + //! \brief Add Extent + //! + //! \param [in] const ExtentParams& + //! extent + //! + //! \return void + //! + void AddExtent(const ExtentParams& extent); + + //! + //! \brief Get Extent List + //! + //! \return const ExtentList& + //! Extent List + //! + const ExtentList& GetExtentList() const; + + //! + //! \brief Get Extent Params + //! + //! \param [in] unsigned int + //! index + //! + //! \return const ExtentParams& + //! Extent Params + //! + const ExtentParams& GetExtent(unsigned int i) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! [in] unsigned int + //! version + //! + //! \return void + //! + void ToStream(Stream& str, unsigned int version); + +private: + std::uint32_t m_itemID; //!< Item ID + ConstructType m_constructionMethod; //!< Construction method enumeration + std::uint16_t m_dataReferenceIndex; //!< Data reference index + std::uint64_t m_baseOffset; //!< Base offset value + ExtentList m_extentList; //!< List of extents +}; + +typedef std::vector ItemLocationVector; //!< std::vector of Item Locations + +class ItemLocationAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ItemLocationAtom(); + + //! + //! \brief Destructor + //! + virtual ~ItemLocationAtom() = default; + + //! + //! \brief Set and Get function for m_offSetSize member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_offSetSize + //! m_offSetSize member in class + //! \param [in] OffSetSize + //! m_offSetSize name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_offSetSize, OffSetSize, const); + + //! + //! \brief Set and Get function for m_lengthSize member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_lengthSize + //! m_lengthSize member in class + //! \param [in] LengthSize + //! m_lengthSize name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_lengthSize, LengthSize, const); + + //! + //! \brief Set and Get function for m_baseOffSetSize member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_baseOffSetSize + //! m_baseOffSetSize member in class + //! \param [in] BaseOffSetSize + //! m_baseOffSetSize name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_baseOffSetSize, BaseOffSetSize, const); + + //! + //! \brief Set and Get function for m_indexSize member + //! + //! \param [in] std::uint8_t + //! value to set + //! \param [in] m_indexSize + //! m_indexSize member in class + //! \param [in] IndexSize + //! m_indexSize name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint8_t, m_indexSize, IndexSize, const); + + //! + //! \brief Get Item Count + //! + //! \return std::uint32_t + //! Item Count + //! + std::uint32_t GetItemCount() const; + + //! + //! \brief Add Location + //! + //! \param [in] const ItemLocation& + //! item Location + //! + //! \return void + //! + void AddLocation(const ItemLocation& itemLoc); + + //! + //! \brief Add Extent + //! + //! \param [in] std::uint32_t + //! item Id + //! \param [in] const ExtentParams& + //! extent + //! + //! \return void + //! + void AddExtent(std::uint32_t itemId, const ExtentParams& extent); + + //! + //! \brief Has Item Id Entry + //! + //! \param [in] std::uint32_t + //! item id + //! + //! \return bool + //! has or not + //! + bool HasItemIdEntry(std::uint32_t itemId) const; + + //! + //! \brief Set Item Data Reference Index + //! + //! \param [in] std::uint32_t + //! item Id + //! \param [in] std::uint16_t + //! dataReferenceIndex + //! + //! \return bool + //! return successful or not + //! + bool SetItemDataReferenceIndex(std::uint32_t itemId, std::uint16_t dataReferenceIndex); + + //! + //! \brief Get Item Locations + //! + //! \return ItemLocationVector& + //! ItemLocationVector + //! + ItemLocationVector& GetItemLocations(); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + + //! + //! \brief Get Item Location For ID + //! + //! \param [in] unsigned int + //! item id + //! + //! \return const ItemLocation& + //! ItemLocation + //! + const ItemLocation& GetItemLocationForID(unsigned int itemID) const; + +private: + std::uint8_t m_offSetSize; //!< Offset size {0,4, or 8} + std::uint8_t m_lengthSize; //!< Length size {0,4, or 8} + std::uint8_t m_baseOffSetSize; //!< Base offset size {0,4, or 8} + std::uint8_t m_indexSize; //!< Index size {0,4, or 8} and only if version == 1, otherwise reserved + ItemLocationVector m_itemLocations; //!< std::vector of item location entries + ItemLocationVector::const_iterator + findItem(std::uint32_t itemId) const; //!< Find an item with given itemId and return as a const + ItemLocationVector::iterator findItem(std::uint32_t itemId); //!< Find an item with given itemId and return +}; + +VCD_MP4_END; +#endif /* _ITEMLOCATIONATOM_H_ */ diff --git a/src/isolib/atoms/ItemProtAtom.cpp b/src/isolib/atoms/ItemProtAtom.cpp new file mode 100644 index 00000000..50186aee --- /dev/null +++ b/src/isolib/atoms/ItemProtAtom.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemProtAtom.cpp +//! \brief: ItemProtAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "ItemProtAtom.h" + +VCD_MP4_BEGIN + +std::vector ProtectionSchemeInfoAtom::GetData() const +{ + return m_data; +} + +void ProtectionSchemeInfoAtom::SetData(const std::vector& data) +{ + m_data = data; +} + +void ProtectionSchemeInfoAtom::ToStream(Stream& str) +{ + str.WriteArray(m_data, m_data.size()); +} + +void ProtectionSchemeInfoAtom::FromStream(Stream& str) +{ + str.ReadArray(m_data, str.GetSize()); +} + +ItemProtectionAtom::ItemProtectionAtom() + : FullAtom("ipro", 0, 0) +{ +} + +std::uint16_t ItemProtectionAtom::GetSize() const +{ + return static_cast(m_protectionInformation.size()); +} + +const ProtectionSchemeInfoAtom& ItemProtectionAtom::GetEntry(const std::uint16_t index) const +{ + return m_protectionInformation.at(index); +} + +std::uint16_t ItemProtectionAtom::AddEntry(const ProtectionSchemeInfoAtom& sinf) +{ + m_protectionInformation.push_back(sinf); + return static_cast(m_protectionInformation.size() - 1); +} + +void ItemProtectionAtom::ToStream(Stream& str) +{ + if (m_protectionInformation.size() == 0) + { + return; + } + + WriteFullAtomHeader(str); + str.Write16(static_cast(m_protectionInformation.size())); + for (auto& Atom : m_protectionInformation) + { + Atom.ToStream(str); + } + UpdateSize(str); +} + +void ItemProtectionAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + const unsigned int Atoms = str.Read16(); + for (unsigned int i = 0; i < Atoms; ++i) + { + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + ProtectionSchemeInfoAtom sinf; + sinf.FromStream(subStream); + m_protectionInformation.push_back(sinf); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ItemProtAtom.h b/src/isolib/atoms/ItemProtAtom.h new file mode 100644 index 00000000..668166ae --- /dev/null +++ b/src/isolib/atoms/ItemProtAtom.h @@ -0,0 +1,161 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemProtAtom.h +//! \brief: Item Protection Atom class +//! \detail: 'ipro' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _ITEMPROTECTIONATOM_H_ +#define _ITEMPROTECTIONATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" +#include + +VCD_MP4_BEGIN + +class Stream; + +class ProtectionSchemeInfoAtom +{ +public: + + //! + //! \brief Get data + //! + //! \return std::vector + //! data + //! + std::vector GetData() const; + + //! + //! \brief Set data + //! + //! \param [in] const std::vector& + //! data value + //! + //! \return void + //! + void SetData(const std::vector& data); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& stream); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& stream); + +private: + std::vector m_data; //!< Content of this Atom +}; + +class ItemProtectionAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ItemProtectionAtom(); + + //! + //! \brief Destructor + //! + virtual ~ItemProtectionAtom() = default; + + //! + //! \brief Get size + //! + //! \return std::uint16_t + //! size + //! + std::uint16_t GetSize() const; + + //! + //! \brief Get entry + //! + //! \param [in] std::uint16_t + //! index value + //! + //! \return const ProtectionSchemeInfoAtom& + //! ProtectionSchemeInfoAtom + //! + const ProtectionSchemeInfoAtom& GetEntry(std::uint16_t index) const; + + //! + //! \brief Aadd entry + //! + //! \param [in] const ProtectionSchemeInfoAtom& + //! ProtectionSchemeInfoAtom + //! + //! \return std::uint16_t + //! index + //! + std::uint16_t AddEntry(const ProtectionSchemeInfoAtom& sinf); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& stream); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& stream); + +private: + std::vector m_protectionInformation; //!< 'sinf' Atoms +}; + +VCD_MP4_END; +#endif /* _ITEMPROTECTIONATOM_H_ */ diff --git a/src/isolib/atoms/ItemRefAtom.cpp b/src/isolib/atoms/ItemRefAtom.cpp new file mode 100644 index 00000000..f89d982a --- /dev/null +++ b/src/isolib/atoms/ItemRefAtom.cpp @@ -0,0 +1,206 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemRefAtom.cpp +//! \brief: ItemRefAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "ItemRefAtom.h" +#include "Stream.h" + +#include +#include +#include +#include + +VCD_MP4_BEGIN + +SingleItemTypeReferenceAtom::SingleItemTypeReferenceAtom(bool isLarge) + : Atom(FourCCInt()) + , m_fromItemId(0) + , m_isLarge(isLarge) +{ +} + +void SingleItemTypeReferenceAtom::SetReferenceType(FourCCInt referenceType) +{ + Atom::SetType(referenceType); +} + +void SingleItemTypeReferenceAtom::AddToItemID(const uint32_t itemID) +{ + m_toItemIds.push_back(itemID); +} + +void SingleItemTypeReferenceAtom::ClearToItemIDs() +{ + m_toItemIds.clear(); +} + +void SingleItemTypeReferenceAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); // parent Atom + + if (m_isLarge) + { + str.Write32(m_fromItemId); + } + else + { + str.Write16(static_cast(m_fromItemId)); + } + + str.Write16(static_cast(m_toItemIds.size())); + for (const auto i : m_toItemIds) + { + if (m_isLarge) + { + str.Write32(i); + } + else + { + str.Write16(static_cast(i)); + } + } + + UpdateSize(str); +} + +std::vector SingleItemTypeReferenceAtom::GetToItemIds() const +{ + return m_toItemIds; +} + +ItemReferenceAtom::ItemReferenceAtom() + : FullAtom("iref", 0, 0) + , m_refList() +{ +} + +void ItemReferenceAtom::AddItemRef(const SingleItemTypeReferenceAtom& ref) +{ + m_refList.push_back(ref); +} + +void ItemReferenceAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); // parent Atom + + for (auto& i : m_refList) + { + i.ToStream(str); + } + + UpdateSize(str); +} + +void ItemReferenceAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + const bool largeIds = GetVersion() ? true : false; + + while (str.BytesRemain() > 0) + { + SingleItemTypeReferenceAtom singleRef(largeIds); + singleRef.FromStream(str); + AddItemRef(singleRef); + } +} + +void SingleItemTypeReferenceAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); // parent Atom + + if (m_isLarge) + { + m_fromItemId = str.Read32(); + } + else + { + m_fromItemId = str.Read16(); + } + const uint16_t referenceCount = str.Read16(); + for (unsigned int i = 0; i < referenceCount; ++i) + { + if (m_isLarge) + { + m_toItemIds.push_back(str.Read32()); + } + else + { + m_toItemIds.push_back(str.Read16()); + } + } +} + +std::vector ItemReferenceAtom::GetRefOfType(FourCCInt type) const +{ + std::vector refList; + for (const auto& ref : m_refList) + { + FourCCInt pType = ref.GetType(); + if (pType == type) + { + refList.push_back(ref); + } + } + return refList; +} + +void ItemReferenceAtom::AddItems(FourCCInt type, const std::uint32_t fromId, const std::uint32_t toId) +{ + const bool largeIds = GetVersion() ? true : false; + if (((fromId > std::numeric_limits::max()) || (toId > std::numeric_limits::max())) && + !largeIds) + { + ISO_LOG(LOG_ERROR, "ItemReferenceAtom::Add can not add large item IDs to Atom version 0\n"); + throw Exception(); + } + + auto pRef = + std::find_if(m_refList.begin(), m_refList.end(), [&](const SingleItemTypeReferenceAtom& entry) { + return (entry.GetType() == type) && (entry.GetFromItemID() == fromId); + }); + if (pRef != m_refList.end()) + { + pRef->AddToItemID(toId); + } + else + { + // Add a new entry + SingleItemTypeReferenceAtom ref(largeIds); + ref.SetType(type); + ref.SetFromItemID(fromId); + ref.AddToItemID(toId); + m_refList.push_back(ref); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/ItemRefAtom.h b/src/isolib/atoms/ItemRefAtom.h new file mode 100644 index 00000000..b9ac4489 --- /dev/null +++ b/src/isolib/atoms/ItemRefAtom.h @@ -0,0 +1,203 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ItemRefAtom.h +//! \brief: Single Item Reference Atom class. +//! \detail: Definitions for Item Reference Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _ITEMREFERENCEATOM_H_ +#define _ITEMREFERENCEATOM_H_ + +#include "Atom.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include +#include + +VCD_MP4_BEGIN + +class SingleItemTypeReferenceAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + SingleItemTypeReferenceAtom(bool isLarge = false); + + //! + //! \brief Destructor + //! + virtual ~SingleItemTypeReferenceAtom() = default; + + //! + //! \brief Set Reference Type + //! + //! \param [in] FourCCInt + //! Reference Type value + //! + //! \return void + //! + void SetReferenceType(FourCCInt referenceType); + + //! + //! \brief Set and Get function for m_fromItemId member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_fromItemId + //! m_fromItemId member in class + //! \param [in] FromItemID + //! m_fromItemId name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_fromItemId, FromItemID, const); + + //! + //! \brief Add To Item ID + //! + //! \param [in] std::uint32_t + //! item ID value + //! + //! \return void + //! + void AddToItemID(std::uint32_t itemID); + + //! + //! \brief Get version + //! + //! \return std::vector + //! Item array + //! + std::vector GetToItemIds() const; + + //! + //! \brief Clear To Item IDs + //! + //! \return void + //! + void ClearToItemIDs(); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + std::uint32_t m_fromItemId; //!< item Id value + std::vector m_toItemIds; //!< std::vector of item Id values + bool m_isLarge; //!< True if this is a SingleItemTypeReferenceAtomLarge +}; + +class ItemReferenceAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ItemReferenceAtom(); + + //! + //! \brief Destructor + //! + virtual ~ItemReferenceAtom() = default; + + //! + //! \brief Add Items + //! + //! \param [in] FourCCInt + //! type value + //! \param [in] std::uint32_t + //! fromId value + //! \param [in] std::uint32_t + //! toId value + //! + //! \return void + //! + void AddItems(FourCCInt type, std::uint32_t fromId, std::uint32_t toId); + + //! + //! \brief Get Reference Of Type + //! + //! \param [in] FourCCInt + //! type value + //! + //! \return std::vector + //! SingleItemTypeReferenceAtom array + //! + std::vector GetRefOfType(FourCCInt type) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + +private: + void AddItemRef(const SingleItemTypeReferenceAtom& ref); //!< Add an item reference to the ItemReferenceAtom + std::list m_refList; //!< reference atom list +}; + +VCD_MP4_END; +#endif /* _ITEMREFERENCEATOM_H_ */ diff --git a/src/isolib/atoms/MediaAtom.cpp b/src/isolib/atoms/MediaAtom.cpp new file mode 100644 index 00000000..47eefc95 --- /dev/null +++ b/src/isolib/atoms/MediaAtom.cpp @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaAtom.cpp +//! \brief: MediaAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "MediaAtom.h" + + +VCD_MP4_BEGIN + +MediaAtom::MediaAtom() + : Atom("mdia") + , m_mediaHeaderAtom() + , m_handlerAtom() + , m_mediaInformationAtom() +{ +} + +const MediaHeaderAtom& MediaAtom::GetMediaHeaderAtom() const +{ + return m_mediaHeaderAtom; +} + +MediaHeaderAtom& MediaAtom::GetMediaHeaderAtom() +{ + return m_mediaHeaderAtom; +} + +const HandlerAtom& MediaAtom::GetHandlerAtom() const +{ + return m_handlerAtom; +} + +HandlerAtom& MediaAtom::GetHandlerAtom() +{ + return m_handlerAtom; +} + +const MediaInformationAtom& MediaAtom::GetMediaInformationAtom() const +{ + return m_mediaInformationAtom; +} + +MediaInformationAtom& MediaAtom::GetMediaInformationAtom() +{ + return m_mediaInformationAtom; +} + +void MediaAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + // Write other Atoms contained in the movie Atom + m_mediaHeaderAtom.ToStream(str); + m_handlerAtom.ToStream(str); + m_mediaInformationAtom.ToStream(str); + + // Update the size of the movie Atom + UpdateSize(str); +} + +void MediaAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseAtomHeader(str); + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "mdhd") + { + m_mediaHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "hdlr") + { + m_handlerAtom.FromStream(subBitstr); + } + else if (AtomType == "minf") + { + m_mediaInformationAtom.FromStream(subBitstr); + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside MediaAtom.\n", type); + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/MediaAtom.h b/src/isolib/atoms/MediaAtom.h new file mode 100644 index 00000000..d723d4f1 --- /dev/null +++ b/src/isolib/atoms/MediaAtom.h @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaAtom.h +//! \brief: Media Atom class. +//! \detail: 'mdia' Atom +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef MEDIAATOM_H +#define MEDIAATOM_H + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "HandlerAtom.h" +#include "MediaHeaderAtom.h" +#include "MediaInfoAtom.h" + +VCD_MP4_BEGIN + +class MediaAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + MediaAtom(); + + //! + //! \brief Destructor + //! + virtual ~MediaAtom() = default; + + //! + //! \brief Get Media Header Atom + //! + //! \return MediaHeaderAtom& + //! header atom + //! + MediaHeaderAtom& GetMediaHeaderAtom(); + + //! + //! \brief Get Media Header Atom + //! + //! \return const MediaHeaderAtom& + //! header atom + //! + const MediaHeaderAtom& GetMediaHeaderAtom() const; + + //! + //! \brief Get Handler Atom + //! + //! \return HandlerAtom& + //! handler atom + //! + HandlerAtom& GetHandlerAtom(); + + //! + //! \brief Get Handler Atom + //! + //! \return const HandlerAtom& + //! handler atom + //! + const HandlerAtom& GetHandlerAtom() const; + + //! + //! \brief Get Media Information Atom + //! + //! \return MediaInformationAtom& + //! Media Information Atom + //! + MediaInformationAtom& GetMediaInformationAtom(); + + //! + //! \brief Get Media Information Atom + //! + //! \return const MediaInformationAtom& + //! Media Information Atom + //! + const MediaInformationAtom& GetMediaInformationAtom() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + MediaHeaderAtom m_mediaHeaderAtom; //!< Media Header Atom + HandlerAtom m_handlerAtom; //!< Media Handler Atom + MediaInformationAtom m_mediaInformationAtom; //!< Media Information Atom +}; + +VCD_MP4_END; +#endif /* MEDIAATOM_H */ diff --git a/src/isolib/atoms/MediaDataAtom.cpp b/src/isolib/atoms/MediaDataAtom.cpp new file mode 100644 index 00000000..869ad3da --- /dev/null +++ b/src/isolib/atoms/MediaDataAtom.cpp @@ -0,0 +1,250 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaDataAtom.cpp +//! \brief: MediaDataAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "MediaDataAtom.h" + +#include +#include +#include + +using namespace std; + +VCD_MP4_BEGIN + +MediaDataAtom::MediaDataAtom() + : Atom("mdat") + , m_headerData() + , m_mediaData() + , m_totalSize(0) + , m_dataOffsetList() + , m_dataLenList() +{ + SetLargeSize(); + WriteAtomHeader(m_headerData); // write Atom header +} + +void MediaDataAtom::Write(std::ofstream& output) const +{ + output.write(reinterpret_cast(m_headerData.GetStorage().data()), + static_cast(m_headerData.GetStorage().size())); + for (const auto& pData : m_mediaData) + { + output.write(reinterpret_cast(pData.data()), static_cast(pData.size())); + } +} + +void MediaDataAtom::ToStream(Stream& str) +{ + const std::vector& data = m_headerData.GetStorage(); + str.WriteArray(data, data.size()); + + for (const auto& pData : m_mediaData) + { + str.WriteArray(pData, pData.size()); + } +} + +void MediaDataAtom::FromStream(Stream& /*str*/) +{ + return; +} + +void MediaDataAtom::UpdateSize(Stream& str) +{ + uint64_t AtomSize = m_totalSize + str.GetSize(); + SetSize(AtomSize); + + if ((AtomSize > std::numeric_limits::max()) && (GetLargeSize() == false)) + { + ISO_LOG(LOG_ERROR, "Atom::UpdateSize(): Atom size exceeds 4GB but large size for 64-bit size field was not set.\n"); + throw Exception(); + } + + // Write updated size to the bitstream. + if (GetLargeSize()) + { + str.SetByte(8, (AtomSize >> 56) & 0xff); + str.SetByte(9, (AtomSize >> 48) & 0xff); + str.SetByte(10, (AtomSize >> 40) & 0xff); + str.SetByte(11, (AtomSize >> 32) & 0xff); + str.SetByte(12, (AtomSize >> 24) & 0xff); + str.SetByte(13, (AtomSize >> 16) & 0xff); + str.SetByte(14, (AtomSize >> 8) & 0xff); + str.SetByte(15, AtomSize & 0xff); + } + else + { + str.SetByte(0, (AtomSize >> 24) & 0xff); + str.SetByte(1, (AtomSize >> 16) & 0xff); + str.SetByte(2, (AtomSize >> 8) & 0xff); + str.SetByte(3, AtomSize & 0xff); + } +} + +std::uint64_t MediaDataAtom::AddData(const std::vector& srcData) +{ + std::uint64_t offset = + m_headerData.GetSize() + m_totalSize; // offset from the beginning of the Atom (including header) + + m_dataOffsetList.push_back(offset); // current offset + m_dataLenList.push_back(srcData.size()); // length of the data to be added + + m_mediaData.push_back(srcData); + m_totalSize += srcData.size(); + + UpdateSize(m_headerData); + return offset; +} + +std::uint64_t MediaDataAtom::AddData(const uint8_t* buffer, const uint64_t bufferSize) +{ + std::uint64_t offset = + m_headerData.GetSize() + m_totalSize; // offset from the beginning of the Atom (including header) + + m_dataOffsetList.push_back(offset); // current offset + m_dataLenList.push_back(bufferSize); // length of the data to be added + + std::vector tmp(buffer, buffer + bufferSize); + m_mediaData.insert(m_mediaData.end(), std::move(tmp)); + + m_totalSize += bufferSize; + + UpdateSize(m_headerData); + return offset; +} + +void MediaDataAtom::AddNalData(const std::vector>& srcData) +{ + std::uint64_t totalLen = 0; + uint64_t headSize = m_headerData.GetSize(); + size_t dataOffset = headSize + m_totalSize; // data offset for the image within the media data Atom + + for (const auto& i : srcData) + { + AddNalData(i); + totalLen += m_dataLenList.back(); + m_dataOffsetList.pop_back(); + m_dataLenList.pop_back(); + } + + m_dataOffsetList.push_back(static_cast(dataOffset)); + m_dataLenList.push_back(totalLen); // total length of the data added +} + +void MediaDataAtom::AddNalData(const std::vector& srcData) +{ + std::uint64_t pStartLen; + std::uint64_t pStartPos; + std::uint64_t pCurr = 0; + std::uint64_t totalLen = 0; + + m_dataOffsetList.push_back(static_cast( + m_headerData.GetSize() + m_totalSize)); // record offset for the picture to be added + + std::vector pMediaData; + pMediaData.reserve(srcData.size()); + + // replace start codes with nal length fields + pStartLen = FindStartCode(srcData, 0, pStartPos); + pCurr += pStartLen; + while (pCurr < srcData.size()) + { + pStartLen = + FindStartCode(srcData, pCurr, pStartPos); // find next start code to determine NAL unit size + const std::uint64_t nalSize = pStartPos - pCurr; + + // write length field + pMediaData.push_back(static_cast((uint32_t(nalSize) >> 24) & 0xff)); + pMediaData.push_back(static_cast((uint32_t(nalSize) >> 16) & 0xff)); + pMediaData.push_back(static_cast((uint32_t(nalSize) >> 8) & 0xff)); + pMediaData.push_back(static_cast(uint32_t(nalSize) & 0xff)); + + std::vector::const_iterator sourceIt = + srcData.begin() + static_cast::difference_type>(pCurr); + pMediaData.insert(pMediaData.end(), sourceIt, + sourceIt + static_cast::difference_type>(nalSize)); + + pCurr = pStartPos + pStartLen; + totalLen += (nalSize + 4); + } + + m_mediaData.push_back(std::move(pMediaData)); + m_totalSize += m_mediaData.back().size(); + + m_dataLenList.push_back(totalLen); // total length of the data added + + UpdateSize(m_headerData); +} + +std::uint64_t MediaDataAtom::FindStartCode(const std::vector& srcData, + const std::uint64_t pPos, + std::uint64_t& pStartPos) +{ + std::uint64_t i = pPos; + std::uint64_t len = 0; + bool pFound = false; + const size_t srcDataSize = srcData.size(); + + while (i < srcDataSize && !pFound) + { + const uint8_t byte = srcData[i]; + if (byte == 0) + { + ++len; + } + else if (len > 1 && byte == 1) + { + ++len; + pFound = true; + } + else + { + len = 0; + } + ++i; + } + + if (pFound) + { + pStartPos = i - len; + } + else + { + pStartPos = i; + len = 0; + } + + return len; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/MediaDataAtom.h b/src/isolib/atoms/MediaDataAtom.h new file mode 100644 index 00000000..de593a4d --- /dev/null +++ b/src/isolib/atoms/MediaDataAtom.h @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaDataAtom.h +//! \brief: Media Data Atom class. +//! \detail: 'mdat' Atom contains media data +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _MEDIADATAATOM_H_ +#define _MEDIADATAATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +#include + +class MediaDataAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + MediaDataAtom(); + + //! + //! \brief Destructor + //! + ~MediaDataAtom() = default; + + //! + //! \brief Write to ofstream + //! + //! \param [out] std::ofstream& + //! output + //! + //! \return void + //! + void Write(std::ofstream& output) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + + //! + //! \brief Update total atom size. + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void UpdateSize(Stream& str); + + //! + //! \brief Add media data to the atom + //! + //! \param [in] const std::vector& + //! data to be added + //! + //! \return std::uint64_t + //! offset of the data + //! + std::uint64_t AddData(const std::vector& srcData); + + //! + //! \brief Add media data to the atom + //! + //! \param [in] const uint8_t* + //! buffer + //! \param [in] const std::vector& + //! data to be added + //! + //! \return std::uint64_t + //! offset of the data + //! + std::uint64_t AddData(const uint8_t* buffer, const uint64_t bufferSize); + + //! + //! \brief Add media nal data to the atom + //! + //! \param [in] const std::vector>& + //! source data to be added + //! + //! \return void + //! + void AddNalData(const std::vector>& srcData); + + //! + //! \brief Add media nal data to the atom + //! + //! \param [in] const std::vector& + //! source data to be added + //! + //! \return void + //! + void AddNalData(const std::vector& srcData); + +private: + Stream m_headerData; //!< header container + std::list> m_mediaData; //!< media data container + uint64_t m_totalSize; //!< total size of m_mediaData vectors + std::vector m_dataOffsetList; //!< offsets relative to the beginning of the media data Atom + std::vector m_dataLenList; //!< vector of data lengths which are inserted to the media Atom + + //! + //! \brief Find Start Code + //! + //! \param [in] const std::vector& + //! source data + //! \param [in] std::uint64_t + //! initial Position + //! \param [in] std::uint64_t& + //! start Code Postion + //! + //! \return std::uint64_t + //! offset of the data + //! + std::uint64_t FindStartCode(const std::vector& srcData, + std::uint64_t initPos, + std::uint64_t& startCodePos); +}; + +VCD_MP4_END; +#endif /* _MEDIADATAATOM_H_ */ diff --git a/src/isolib/atoms/MediaHeaderAtom.cpp b/src/isolib/atoms/MediaHeaderAtom.cpp new file mode 100644 index 00000000..546bed20 --- /dev/null +++ b/src/isolib/atoms/MediaHeaderAtom.cpp @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaHeaderAtom.cpp +//! \brief: MediaHeaderAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "MediaHeaderAtom.h" + + +VCD_MP4_BEGIN + +MediaHeaderAtom::MediaHeaderAtom() + : FullAtom("mdhd", 0, 0) + , m_creationTime(0) + , m_modificationTime(0) + , m_timeScale(0) + , m_duration(0) + , m_language(0) +{ +} + +void MediaHeaderAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + + if (GetVersion() == 0) + { + str.Write32(static_cast(m_creationTime)); + str.Write32(static_cast(m_modificationTime)); + str.Write32(m_timeScale); + str.Write32(static_cast(m_duration)); + } + else if (GetVersion() == 1) + { + str.Write64(m_creationTime); + str.Write64(m_modificationTime); + str.Write32(m_timeScale); + str.Write64(m_duration); + } + else + { + ISO_LOG(LOG_ERROR, "MediaHeaderAtom::ToStream() supports only 'mdhd' version 0 and version 1\n"); + throw Exception(); + } + + str.Write16(0); // Pad, Langauge + str.Write16(0); // Predefined + + // Update the size of the movie Atom + UpdateSize(str); +} + +void MediaHeaderAtom::FromStream(Stream& str) +{ + Stream subBitstr; + + // First parse the Atom header + ParseFullAtomHeader(str); + if ((GetVersion() != 0) && (GetVersion() != 1)) + { + ISO_LOG(LOG_ERROR, "MediaHeaderAtom::FromStream() supports only 'mdhd' version 0 and version 1\n"); + throw Exception(); + } + uint8_t pVersion = GetVersion(); + if (pVersion == 0) + { + m_creationTime = str.Read32(); + m_modificationTime = str.Read32(); + } + else + { + m_creationTime = str.Read64(); + m_modificationTime = str.Read64(); + } + m_timeScale = str.Read32(); + if (pVersion == 0) + { + m_duration = str.Read32(); + } + else + { + m_duration = str.Read64(); + } + + str.Read16(); // Pad, Langauge + str.Read16(); // Predefined +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/MediaHeaderAtom.h b/src/isolib/atoms/MediaHeaderAtom.h new file mode 100644 index 00000000..54886949 --- /dev/null +++ b/src/isolib/atoms/MediaHeaderAtom.h @@ -0,0 +1,169 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaHeaderAtom.h +//! \brief: Media Header Atom class. +//! \detail: 'mdhd' Atom contains basic inforation about the media data +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _MEDIAHEADERATOM_H_ +#define _MEDIAHEADERATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include + +VCD_MP4_BEGIN + +class MediaHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + MediaHeaderAtom(); + + //! + //! \brief Destructor + //! + virtual ~MediaHeaderAtom() = default; + + //! + //! \brief Set and Get function for m_creationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_creationTime + //! m_creationTime member in class + //! \param [in] CreationTime + //! m_creationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_creationTime, CreationTime, ); + + //! + //! \brief Set and Get function for m_modificationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_modificationTime + //! m_modificationTime member in class + //! \param [in] ModificationTime + //! m_modificationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_modificationTime, ModificationTime, ); + + //! + //! \brief Set and Get function for m_timeScale member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_timeScale + //! m_timeScale member in class + //! \param [in] TimeScale + //! m_timeScale name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_timeScale, TimeScale, const); + + //! + //! \brief Set and Get function for m_duration member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_duration + //! m_duration member in class + //! \param [in] Duration + //! m_duration name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_duration, Duration, const); + + //! + //! \brief Set and Get function for m_language member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_language + //! m_language member in class + //! \param [in] Language + //! m_language name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_language, Language, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint64_t m_creationTime; //!< Creation time + std::uint64_t m_modificationTime; //!< Modification time + std::uint32_t m_timeScale; //!< Timescale + std::uint64_t m_duration; //!< Duration + std::uint16_t m_language; //!< Language +}; + +VCD_MP4_END; +#endif /* _MEDIAHEADERATOM_H_ */ diff --git a/src/isolib/atoms/MediaInfoAtom.cpp b/src/isolib/atoms/MediaInfoAtom.cpp new file mode 100644 index 00000000..dbcfce27 --- /dev/null +++ b/src/isolib/atoms/MediaInfoAtom.cpp @@ -0,0 +1,187 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaInfoAtom.cpp +//! \brief: MediaInfoAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "MediaInfoAtom.h" + + +VCD_MP4_BEGIN + +MediaInformationAtom::MediaInformationAtom() + : Atom("minf") + , m_mediaType(MediaType::Null) + , m_videoMediaHeaderAtom() + , m_soundMediaHeaderAtom() + , m_nullMediaHeaderAtom() + , m_dataInfoAtom() + , m_sampleTableAtom() +{ +} + +void MediaInformationAtom::SetMediaType(MediaType type) +{ + m_mediaType = type; +} + +MediaInformationAtom::MediaType MediaInformationAtom::GetMediaType() const +{ + return m_mediaType; +} + +const VideoMediaHeaderAtom& MediaInformationAtom::GetVideoMediaHeaderAtom() const +{ + return m_videoMediaHeaderAtom; +} + +VideoMediaHeaderAtom& MediaInformationAtom::GetVideoMediaHeaderAtom() +{ + return m_videoMediaHeaderAtom; +} + +const DataInformationAtom& MediaInformationAtom::GetDataInformationAtom() const +{ + return m_dataInfoAtom; +} + +DataInformationAtom& MediaInformationAtom::GetDataInformationAtom() +{ + return m_dataInfoAtom; +} + +const SampleTableAtom& MediaInformationAtom::GetSampleTableAtom() const +{ + return m_sampleTableAtom; +} + +SampleTableAtom& MediaInformationAtom::GetSampleTableAtom() +{ + return m_sampleTableAtom; +} + +const NullMediaHeaderAtom& MediaInformationAtom::GetNullMediaHeaderAtom() const +{ + return m_nullMediaHeaderAtom; +} + +NullMediaHeaderAtom& MediaInformationAtom::GetNullMediaHeaderAtom() +{ + return m_nullMediaHeaderAtom; +} + +const SoundMediaHeaderAtom& MediaInformationAtom::GetSoundMediaHeaderAtom() const +{ + return m_soundMediaHeaderAtom; +} + +SoundMediaHeaderAtom& MediaInformationAtom::GetSoundMediaHeaderAtom() +{ + return m_soundMediaHeaderAtom; +} + +void MediaInformationAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + // Write other Atoms contained in the movie Atom + switch (m_mediaType) + { + case MediaType::Null: + { + m_nullMediaHeaderAtom.ToStream(str); + break; + } + case MediaType::Video: + { + m_videoMediaHeaderAtom.ToStream(str); + break; + } + case MediaType::Sound: + { + m_soundMediaHeaderAtom.ToStream(str); + break; + } + // @todo should also support hmhd, sthd + } + m_dataInfoAtom.ToStream(str); + m_sampleTableAtom.ToStream(str); + + // Update the size of the movie Atom + UpdateSize(str); +} + +void MediaInformationAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseAtomHeader(str); + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + // Handle this Atom based on the type + if (AtomType == "vmhd") + { + m_videoMediaHeaderAtom.FromStream(subBitstr); + SetMediaType(MediaType::Video); + } + else if (AtomType == "smhd") + { + m_soundMediaHeaderAtom.FromStream(subBitstr); + SetMediaType(MediaType::Sound); + } + else if (AtomType == "nmhd") + { + m_nullMediaHeaderAtom.FromStream(subBitstr); + SetMediaType(MediaType::Null); + } + else if (AtomType == "dinf") + { + m_dataInfoAtom.FromStream(subBitstr); + } + else if (AtomType == "stbl") + { + m_sampleTableAtom.FromStream(subBitstr); + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside MediaInformationAtom.\n", type); + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/MediaInfoAtom.h b/src/isolib/atoms/MediaInfoAtom.h new file mode 100644 index 00000000..80084c0e --- /dev/null +++ b/src/isolib/atoms/MediaInfoAtom.h @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaInfoAtom.h +//! \brief: Media Information Atom class. +//! \detail: 'minf' Atom +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _MEDIAINFORMATIONATOM_H_ +#define _MEDIAINFORMATIONATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "DataInfoAtom.h" +#include "NullMediaHeaderAtom.h" +#include "SampTableAtom.h" +#include "SoundMediaHeaderAtom.h" +#include "VideoMediaHeaderAtom.h" + +VCD_MP4_BEGIN + +class MediaInformationAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + MediaInformationAtom(); + + //! + //! \brief Destructor + //! + virtual ~MediaInformationAtom() = default; + + enum class MediaType //!< media type + { + Null, + Video, + Sound + }; + + //! + //! \brief Set Media Type + //! + //! \param [in] MediaType + //! type value + //! + //! \return void + //! + void SetMediaType(MediaType type); + + //! + //! \brief Get Media Type + //! + //! \return MediaType + //! Media Type + //! + MediaType GetMediaType() const; + + //! + //! \brief Get Video Media Header Atom + //! + //! \return VideoMediaHeaderAtom& + //! Video Media Header Atom + //! + VideoMediaHeaderAtom& GetVideoMediaHeaderAtom(); + + //! + //! \brief Get Video Media Header Atom + //! + //! \return const VideoMediaHeaderAtom& + //! Video Media Header Atom + //! + const VideoMediaHeaderAtom& GetVideoMediaHeaderAtom() const; + + //! + //! \brief Get Data Information Atom + //! + //! \return DataInformationAtom& + //! Data Information Atom + //! + DataInformationAtom& GetDataInformationAtom(); + + //! + //! \brief Get Data Information Atom + //! + //! \return const DataInformationAtom& + //! Data Information Atom + //! + const DataInformationAtom& GetDataInformationAtom() const; + + //! + //! \brief Get Sample Table Atom + //! + //! \return SampleTableAtom& + //! Sample Table Atom + //! + SampleTableAtom& GetSampleTableAtom(); + + //! + //! \brief Get Sample Table Atom + //! + //! \return const SampleTableAtom& + //! Sample Table Atom + //! + const SampleTableAtom& GetSampleTableAtom() const; + + //! + //! \brief Get Null Media Header Atom + //! + //! \return NullMediaHeaderAtom& + //! Null Media Header Atom + //! + NullMediaHeaderAtom& GetNullMediaHeaderAtom(); + + //! + //! \brief Get Null Media Header Atom + //! + //! \return const NullMediaHeaderAtom& + //! Null Media Header Atom + //! + const NullMediaHeaderAtom& GetNullMediaHeaderAtom() const; + + //! + //! \brief Get Sound Media Header Atom + //! + //! \return SoundMediaHeaderAtom& + //! Sound Media Header Atom + //! + SoundMediaHeaderAtom& GetSoundMediaHeaderAtom(); + + //! + //! \brief Get Sound Media Header Atom + //! + //! \return const SoundMediaHeaderAtom& + //! Sound Media Header Atom + //! + const SoundMediaHeaderAtom& GetSoundMediaHeaderAtom() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + MediaType m_mediaType; //!< Which media type? + VideoMediaHeaderAtom m_videoMediaHeaderAtom; //!< Video media header Atom + SoundMediaHeaderAtom m_soundMediaHeaderAtom; //!< Sound Media Header Atom + NullMediaHeaderAtom m_nullMediaHeaderAtom; //!< Null Media Header Atom + DataInformationAtom m_dataInfoAtom; //!< Data information Atom + SampleTableAtom m_sampleTableAtom; //!< Sample Table Atom +}; + +VCD_MP4_END; +#endif /* end of include guard: MEDIAINFORMATIONATOM_HPP */ diff --git a/src/isolib/atoms/MediaTypeDefs.h b/src/isolib/atoms/MediaTypeDefs.h new file mode 100644 index 00000000..8543cdb1 --- /dev/null +++ b/src/isolib/atoms/MediaTypeDefs.h @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MediaTypeDefs.h +//! \brief: Media Type Definition class +//! \detail: defines media type +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _MEDIATYPEDEFS_H_ +#define _MEDIATYPEDEFS_H_ + +#include +#include +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +enum class MediaType //!< Supported bitstream media types +{ + AVCSTR, + HEVCSTR, + UNKNOWN +}; + +namespace MediaTypeTool +{ + //! + //! \brief Get Type + //! + //! \param [in] const std::string + //! type + //! \param [in] const std::string + //! file Name For Error Msg + //! + //! \return MediaType + //! media type + //! + inline MediaType GetType(const std::string pType, const std::string fileNameForErrorMsg) + { + if (pType == "avc1" || pType == "avc3") + { + return MediaType::AVCSTR; + } + else if (pType == "hvc1" || pType == "hev1") + { + return MediaType::HEVCSTR; + } + else + { + // Unsupported code type + std::string fileInfo = (fileNameForErrorMsg.empty()) ? "" : " (" + fileNameForErrorMsg + ")"; + + if (pType.empty()) + { + ISO_LOG(LOG_ERROR, "Failed to define media type, code_type not set %s\n", fileInfo.c_str()); + throw Exception(); + } + else + { + ISO_LOG(LOG_ERROR, "Failed to define media type for unsupported code_type '%s', %s\n", pType.c_str(), fileInfo.c_str()); + throw Exception(); + } + } + } + + //! + //! \brief Stream Type Name + //! + //! \param [in] MediaType + //! media type + //! + //! \return const std::string + //! media type string + //! + inline const std::string GetMP4VRImpl::StreamTypeName(MediaType mediaType) + { + switch (mediaType) + { + case MediaType::AVCSTR: + { + return "AVC"; + break; + } + case MediaType::HEVCSTR: + { + return "HEVC"; + break; + } + default: + { + // Invalid media type + return "INVALID"; + break; + } + } + } +} + +VCD_MP4_END; +#endif /* _MEDIATYPEDEFS_H_ */ diff --git a/src/isolib/atoms/MetaAtom.cpp b/src/isolib/atoms/MetaAtom.cpp new file mode 100644 index 00000000..1fd367ff --- /dev/null +++ b/src/isolib/atoms/MetaAtom.cpp @@ -0,0 +1,349 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MetaAtom.cpp +//! \brief: MetaAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! +#include "MetaAtom.h" + +VCD_MP4_BEGIN + +XmlAtom::XmlAtom() + : FullAtom("uri ", 0, 0) + , m_contents() +{ +} + +void XmlAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.WriteZeroEndString(m_contents); + UpdateSize(str); +} + +void XmlAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + str.ReadZeroEndString(m_contents); +} + +MetaAtom::MetaAtom() + : FullAtom("meta", 0, 0) + , m_handlerAtom() + , m_hasPrimaryItemAtom() + , m_primaryItemAtom() + , m_hasDataInfoAtom() + , m_dataInfoAtom() + , m_hasItemLocationAtom() + , m_itemLocationAtom() + , m_hasItemProtectionAtom() + , m_itemProtectionAtom() + , m_hasItemInfoAtom() + , m_itemInfoAtom(2) + , m_hasItemRefAtom() + , m_itemRefAtom() + , m_hasItemDataAtom() + , m_itemDataAtom() + , m_hasXmlAtom() + , m_xmlAtom() +{ +} + +FourCCInt MetaAtom::GetHandlerType() const +{ + FourCCInt ret = m_handlerAtom.GetHandlerType(); + return ret; +} + +void MetaAtom::SetHandlerType(FourCCInt handler) +{ + m_handlerAtom.SetHandlerType(handler); +} + +const PrimaryItemAtom& MetaAtom::GetPrimaryItemAtom() const +{ + return m_primaryItemAtom; +} + +void MetaAtom::SetPrimaryItem(const std::uint32_t itemId) +{ + m_hasPrimaryItemAtom = true; + m_primaryItemAtom.SetItemId(itemId); + + if (m_itemLocationAtom.HasItemIdEntry(itemId)) + { + auto urlAtom = MakeShared(); + urlAtom->SetFlags(1); // Flag 0x01 tells the data is in this file. DataEntryUrlAtom will write only its header. + const std::uint16_t index = m_dataInfoAtom.AddDataEntryAtom(urlAtom); + m_itemLocationAtom.SetItemDataReferenceIndex(itemId, index); + } +} + +const ItemInfoAtom& MetaAtom::GetItemInfoAtom() const +{ + return m_itemInfoAtom; +} + +void MetaAtom::SetItemInfoAtom(const ItemInfoAtom& itemInfoAtom) +{ + m_hasItemInfoAtom = true; + m_itemInfoAtom = itemInfoAtom; +} + +const ItemLocationAtom& MetaAtom::GetItemLocationAtom() const +{ + return m_itemLocationAtom; +} + +const ItemReferenceAtom& MetaAtom::GetItemReferenceAtom() const +{ + return m_itemRefAtom; +} + +const DataInformationAtom& MetaAtom::GetDataInformationAtom() const +{ + return m_dataInfoAtom; +} + +void MetaAtom::AddItemReference(FourCCInt type, const std::uint32_t from_id, const std::uint32_t toId) +{ + m_hasItemRefAtom = true; + m_itemRefAtom.AddItems(type, from_id, toId); +} + +void MetaAtom::AddIloc(const std::uint32_t itemId, + const std::uint32_t offset, + const std::uint32_t length, + const std::uint32_t baseOffset) +{ + m_hasItemLocationAtom = true; + + ExtentParams locationExtent; + locationExtent.m_extentOffset = offset; + locationExtent.m_extentLen = length; + + ItemLocation itemLocation; + itemLocation.SetItemID(itemId); + itemLocation.SetBaseOffset(baseOffset); + itemLocation.AddExtent(locationExtent); + + m_itemLocationAtom.AddLocation(itemLocation); +} + +void MetaAtom::AddItem(const std::uint32_t itemId, FourCCInt type, const std::string& name, const bool hidden) +{ + m_hasItemInfoAtom = true; + + ItemInfoEntry infoAtom; + infoAtom.SetVersion(2); + infoAtom.SetItemType(type); + infoAtom.SetItemID(itemId); + infoAtom.SetItemName(name); + + if (hidden) + { + infoAtom.SetFlags(1); + } + + m_itemInfoAtom.AddItemInfoEntry(infoAtom); +} + +void MetaAtom::AddMdatItem(const std::uint32_t itemId, + FourCCInt type, + const std::string& name, + const std::uint32_t baseOffset) +{ + m_hasItemLocationAtom = true; + + AddItem(itemId, type, name); + + ItemLocation itemLocation; + itemLocation.SetItemID(itemId); + itemLocation.SetBaseOffset(baseOffset); + itemLocation.SetConstructType(ItemLocation::ConstructType::FILE_OFFSET); + m_itemLocationAtom.AddLocation(itemLocation); +} + +void MetaAtom::AddItemExtent(const std::uint32_t itemId, const std::uint32_t offset, const std::uint32_t length) +{ + m_hasItemLocationAtom = true; + + ExtentParams locationExtent; + locationExtent.m_extentOffset = offset; + locationExtent.m_extentLen = length; + m_itemLocationAtom.AddExtent(itemId, locationExtent); +} + +void MetaAtom::AddIdatItem(const std::uint32_t itemId, FourCCInt type, const std::string& name, const std::vector& data) +{ + m_hasItemLocationAtom = true; + + const unsigned int offset = m_itemDataAtom.AddData(data); + AddItem(itemId, type, name); + ExtentParams locationExtent; + locationExtent.m_extentOffset = offset; + locationExtent.m_extentLen = data.size(); + + ItemLocation itemLocation; + itemLocation.SetItemID(itemId); + itemLocation.AddExtent(locationExtent); + itemLocation.SetConstructType(ItemLocation::ConstructType::IDAT_OFFSET); + m_itemLocationAtom.AddLocation(itemLocation); +} + +void MetaAtom::AddItemIdatExtent(const std::uint32_t itemId, const std::vector& data) +{ + m_hasItemLocationAtom = true; + + const unsigned int offset = m_itemDataAtom.AddData(data); + ExtentParams locationExtent; + locationExtent.m_extentOffset = offset; + locationExtent.m_extentLen = data.size(); + m_itemLocationAtom.AddExtent(itemId, locationExtent); +} + +const ItemDataAtom& MetaAtom::GetItemDataAtom() const +{ + return m_itemDataAtom; +} + +const ProtectionSchemeInfoAtom& MetaAtom::GetProtectionSchemeInfoAtom(std::uint16_t index) const +{ + const ProtectionSchemeInfoAtom& atom = m_itemProtectionAtom.GetEntry(index); + return atom; +} + +const XmlAtom& MetaAtom::GetXmlAtom() const +{ + return m_xmlAtom; +} + +void MetaAtom::SetXmlAtom(const XmlAtom& atom) +{ + m_hasXmlAtom = true; + m_xmlAtom = atom; +} + +void MetaAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + m_handlerAtom.ToStream(str); + if (m_hasPrimaryItemAtom) + { + m_primaryItemAtom.ToStream(str); + } + if (m_hasDataInfoAtom) + { + m_dataInfoAtom.ToStream(str); + } + if (m_hasItemLocationAtom) + { + m_itemLocationAtom.ToStream(str); + } + if (m_hasItemProtectionAtom) + { + m_itemProtectionAtom.ToStream(str); + } + if (m_hasItemInfoAtom) + { + m_itemInfoAtom.ToStream(str); + } + if (m_hasItemRefAtom) + { + m_itemRefAtom.ToStream(str); + } + if (m_hasItemDataAtom) + { + m_itemDataAtom.ToStream(str); + } + if (m_hasXmlAtom) + { + m_xmlAtom.ToStream(str); + } + + UpdateSize(str); +} + +void MetaAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + while (str.BytesRemain() > 0) + { + FourCCInt pAtomType; + Stream subStr = str.ReadSubAtomStream(pAtomType); + + if (pAtomType == "hdlr") + { + m_handlerAtom.FromStream(subStr); + } + else if (pAtomType == "pitm") + { + m_hasPrimaryItemAtom = true; + m_primaryItemAtom.FromStream(subStr); + } + else if (pAtomType == "iloc") + { + m_hasItemLocationAtom = true; + m_itemLocationAtom.FromStream(subStr); + } + else if (pAtomType == "iinf") + { + m_hasItemInfoAtom = true; + m_itemInfoAtom.FromStream(subStr); + } + else if (pAtomType == "iref") + { + m_hasItemRefAtom = true; + m_itemRefAtom.FromStream(subStr); + } + else if (pAtomType == "dinf") + { + m_hasDataInfoAtom = true; + m_dataInfoAtom.FromStream(subStr); + } + else if (pAtomType == "idat") + { + m_hasItemDataAtom = true; + m_itemDataAtom.FromStream(subStr); + } + else if (pAtomType == "ipro") + { + m_hasItemProtectionAtom = true; + m_itemProtectionAtom.FromStream(subStr); + } + else if (pAtomType == "xml ") + { + m_hasXmlAtom = true; + m_xmlAtom.FromStream(subStr); + } + } +} +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/MetaAtom.h b/src/isolib/atoms/MetaAtom.h new file mode 100644 index 00000000..6ebe7476 --- /dev/null +++ b/src/isolib/atoms/MetaAtom.h @@ -0,0 +1,407 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MetaAtom.h +//! \brief: Meta Atom class +//! \detail: 'meta' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _METAATOM_H_ +#define _METAATOM_H_ + +#include "FormAllocator.h" +#include "DataInfoAtom.h" +#include "FullAtom.h" +#include "HandlerAtom.h" +#include "ItemDataAtom.h" +#include "ItemInfoAtom.h" +#include "ItemLocationAtom.h" +#include "ItemProtAtom.h" +#include "ItemRefAtom.h" +#include "PrimaryItemAtom.h" +#include + +VCD_MP4_BEGIN + +class XmlAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + XmlAtom(); + + //! + //! \brief Destructor + //! + virtual ~XmlAtom() = default; + + //! + //! \brief Set and Get function for m_contents member + //! + //! \param [in] const std::string& + //! value to set + //! \param [in] m_contents + //! m_contents member in class + //! \param [in] Contents + //! m_contents name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::string&, m_contents, Contents, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::string m_contents; //!< the XML content +}; + +class MetaAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + MetaAtom(); + + //! + //! \brief Destructor + //! + virtual ~MetaAtom() = default; + + //! + //! \brief Get Handler Type + //! + //! \return FourCCInt + //! Handler Type + //! + FourCCInt GetHandlerType() const; + + //! + //! \brief Set Handler Type + //! + //! \param [in] FourCCInt + //! Handler value + //! + //! \return void + //! + void SetHandlerType(FourCCInt handler); + + //! + //! \brief Get PrimaryItem Atom + //! + //! \return const PrimaryItemAtom& + //! PrimaryItem Atom + //! + const PrimaryItemAtom& GetPrimaryItemAtom() const; + + //! + //! \brief Set PrimaryItem Atom + //! + //! \param [in] std::uint32_t + //! item Id + //! + //! \return void + //! + void SetPrimaryItem(std::uint32_t itemId); + + //! + //! \brief Get ItemInfo Atom + //! + //! \return const ItemInfoAtom& + //! ItemInfo Atom + //! + const ItemInfoAtom& GetItemInfoAtom() const; + + //! + //! \brief Set ItemInfo Atom + //! + //! \param [in] const ItemInfoAtom& + //! item Info Atom + //! + //! \return void + //! + void SetItemInfoAtom(const ItemInfoAtom& itemInfoAtom); + + //! + //! \brief Get ItemLocation Atom + //! + //! \return const ItemLocationAtom& + //! ItemLocation Atom + //! + const ItemLocationAtom& GetItemLocationAtom() const; + + //! + //! \brief Get ItemReference Atom + //! + //! \return const ItemReferenceAtom& + //! ItemReference Atom + //! + const ItemReferenceAtom& GetItemReferenceAtom() const; + + //! + //! \brief Add Property + //! + //! \param [in] std::shared_ptr + //! atom pointer + //! \param [in] const std::vector& + //! item Ids + //! \param [in] bool + //! essential or not + //! + //! \return void + //! + void AddProperty(std::shared_ptr Atom, const std::vector& itemIds, bool essential); + + //! + //! \brief Add Property + //! + //! \param [in] std::uint16_t + //! index + //! \param [in] const std::vector& + //! item Ids + //! \param [in] bool + //! essential or not + //! + //! \return void + //! + void AddProperty(std::uint16_t index, const std::vector& itemIds, bool essential); + + //! + //! \brief Get ProtectionSchemeInfo Atom + //! + //! \param [in] std::uint16_t + //! index + //! \return const ProtectionSchemeInfoAtom& + //! ProtectionSchemeInfo Atom + //! + const ProtectionSchemeInfoAtom& GetProtectionSchemeInfoAtom(std::uint16_t index) const; + + //! + //! \brief Get DataInformation Atom + //! + //! \return const DataInformationAtom& + //! DataInformation Atom + //! + const DataInformationAtom& GetDataInformationAtom() const; + + //! + //! \brief Get ItemData Atom + //! + //! \return const ItemDataAtom& + //! ItemData Atom + //! + const ItemDataAtom& GetItemDataAtom() const; + + //! + //! \brief Add Iloc + //! + //! \param [in] std::uint32_t + //! item Ids + //! \param [in] std::uint32_t + //! offset + //! \param [in] std::uint32_t + //! length + //! \param [in] std::uint32_t + //! baseOffset + //! + //! \return void + //! + void AddIloc(std::uint32_t itemId, std::uint32_t offset, std::uint32_t length, std::uint32_t baseOffset); + + //! + //! \brief Add Item + //! + //! \param [in] std::uint32_t + //! item Ids + //! \param [in] FourCCInt + //! type + //! \param [in] const std::string& + //! name + //! \param [in] bool + //! hidden or not + //! + //! \return void + //! + void AddItem(std::uint32_t itemId, FourCCInt type, const std::string& name, bool hidden = false); + + //! + //! \brief Add Item Reference + //! + //! \param [in] FourCCInt + //! type + //! \param [in] std::uint32_t + //! fromId + //! \param [in] std::uint32_t + //! toId + //! + //! \return void + //! + void AddItemReference(FourCCInt type, std::uint32_t fromId, std::uint32_t toId); + + //! + //! \brief Add Idat Item + //! + //! \param [in] std::uint32_t + //! item Ids + //! \param [in] FourCCInt + //! type + //! \param [in] const std::string& + //! name + //! \param [in] const std::vector& + //! data + //! + //! \return void + //! + void AddIdatItem(std::uint32_t itemId, FourCCInt type, const std::string& name, const std::vector& data); + + //! + //! \brief Add Item Idat Extent + //! + //! \param [in] std::uint32_t + //! item Ids + //! \param [in] const std::vector& + //! data + //! + //! \return void + //! + void AddItemIdatExtent(std::uint32_t itemId, const std::vector& data); + + //! + //! \brief Add Mdat Item + //! + //! \param [in] std::uint32_t + //! item Ids + //! \param [in] FourCCInt + //! type + //! \param [in] const std::string& + //! name + //! \param [in] std::uint32_t + //! base Offset + //! + //! \return void + //! + void AddMdatItem(std::uint32_t itemId, FourCCInt type, const std::string& name, std::uint32_t baseOffset); + + //! + //! \brief Add Item Extent + //! + //! \param [in] std::uint32_t + //! item Ids + //! \param [in] std::uint32 + //! offset + //! \param [in] std::uint32_t + //! length + //! + //! \return void + //! + void AddItemExtent(std::uint32_t itemId, std::uint32_t offset, std::uint32_t length); + + //! + //! \brief Get Xml Atom + //! + //! \return const XmlAtom& + //! Xml Atom + //! + const XmlAtom& GetXmlAtom() const; + + //! + //! \brief Set Xml Atom + //! + //! \param [in] const XmlAtom& + //! Xml Atom + //! + //! \return void + //! + void SetXmlAtom(const XmlAtom& xmlAtom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + HandlerAtom m_handlerAtom; //!< handler atom + bool m_hasPrimaryItemAtom; //!< has PrimaryItemAtom or not + PrimaryItemAtom m_primaryItemAtom; //!< primary Item Atom + bool m_hasDataInfoAtom; //!< has DataInfoAtom or not + DataInformationAtom m_dataInfoAtom; //!< Data Information Atom + bool m_hasItemLocationAtom; //!< has ItemLocationAtom or not + ItemLocationAtom m_itemLocationAtom; //!< Item Location Atom + bool m_hasItemProtectionAtom; //!< has ItemProtectionAtom or not + ItemProtectionAtom m_itemProtectionAtom;//!< Item Protection Atom + bool m_hasItemInfoAtom; //!< has Item Info Atom + ItemInfoAtom m_itemInfoAtom; //!< Item Information Atom + bool m_hasItemRefAtom; //!< has ItemRefAtom or not + ItemReferenceAtom m_itemRefAtom; //!< Item Reference Atom + bool m_hasItemDataAtom; //!< has ItemDataAtom or not + ItemDataAtom m_itemDataAtom; //!< Item Data Atom + bool m_hasXmlAtom; //!< has XmlAtom or not + XmlAtom m_xmlAtom; //!< Xml Atom +}; + +VCD_MP4_END; +#endif /* _METAATOM_H_ */ diff --git a/src/isolib/atoms/MetaDataSampEntryAtom.cpp b/src/isolib/atoms/MetaDataSampEntryAtom.cpp new file mode 100644 index 00000000..6d850648 --- /dev/null +++ b/src/isolib/atoms/MetaDataSampEntryAtom.cpp @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MetaDataSampEntryAtom.cpp +//! \brief: MetaDataSampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "MetaDataSampEntryAtom.h" + + +VCD_MP4_BEGIN + +MetaDataSampleEntryAtom::MetaDataSampleEntryAtom(FourCCInt codingname) + : SampleEntryAtom(codingname) +{ +} + +void MetaDataSampleEntryAtom::ToStream(Stream& str) +{ + SampleEntryAtom::ToStream(str); + + // Update the size of the movie Atom + UpdateSize(str); +} + +void MetaDataSampleEntryAtom::FromStream(Stream& str) +{ + SampleEntryAtom::FromStream(str); +} + +const Atom* MetaDataSampleEntryAtom::GetConfigurationAtom() const +{ + ISO_LOG(LOG_ERROR, "MetaDataSampleEntryAtom::GetConfigurationAtom() not impelmented \n"); + return nullptr; +} + +const DecoderConfigurationRecord* MetaDataSampleEntryAtom::GetConfigurationRecord() const +{ + ISO_LOG(LOG_ERROR, "MetaDataSampleEntryAtom::GetConfigurationRecord() not impelmented \n"); + return nullptr; +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/MetaDataSampEntryAtom.h b/src/isolib/atoms/MetaDataSampEntryAtom.h new file mode 100644 index 00000000..a3b17c5a --- /dev/null +++ b/src/isolib/atoms/MetaDataSampEntryAtom.h @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MetaDataSampEntryAtom.h +//! \brief: MetaDataSampEntryAtom class. +//! \detail: Meta data sample entry definitions. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _METADATASAMPLEENTRYATOM_H_ +#define _METADATASAMPLEENTRYATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "SampEntryAtom.h" + +VCD_MP4_BEGIN + +class MetaDataSampleEntryAtom : public SampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + MetaDataSampleEntryAtom(FourCCInt codingname); + + //! + //! \brief Destructor + //! + virtual ~MetaDataSampleEntryAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: +}; + +VCD_MP4_END; +#endif /* _METADATASAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/MovieAtom.cpp b/src/isolib/atoms/MovieAtom.cpp new file mode 100644 index 00000000..f8583606 --- /dev/null +++ b/src/isolib/atoms/MovieAtom.cpp @@ -0,0 +1,312 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieAtom.cpp +//! \brief: MovieAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "MovieAtom.h" +#include "Stream.h" +#include + +VCD_MP4_BEGIN + +MovieExtendsHeaderAtom::MovieExtendsHeaderAtom(uint8_t version) + : FullAtom("mehd", version, 0) + , m_fragmentDuration(0) +{ +} + +void MovieExtendsHeaderAtom::SetFragmentDuration(const uint64_t fragmentDuration) +{ + m_fragmentDuration = fragmentDuration; +} + +uint64_t MovieExtendsHeaderAtom::GetFragmentDuration() const +{ + return m_fragmentDuration; +} + +void MovieExtendsHeaderAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + if (GetVersion() == 0) + { + str.Write32(static_cast(m_fragmentDuration)); + } + else if (GetVersion() == 1) + { + str.Write64(m_fragmentDuration); + } + else + { + ISO_LOG(LOG_ERROR, "ToStream() supports only 'mehd' version 0 or 1\n"); + throw Exception(); + } + UpdateSize(str); +} + +void MovieExtendsHeaderAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + if (GetVersion() == 0) + { + m_fragmentDuration = str.Read32(); + } + else if (GetVersion() == 1) + { + m_fragmentDuration = str.Read64(); + } + else + { + ISO_LOG(LOG_ERROR, "FromStream() supports only 'mehd' version 0 or 1\n"); + throw Exception(); + } +} + +MovieExtendsAtom::MovieExtendsAtom() + : Atom("mvex") + , m_movieExtendsHeaderAtomPresent(false) + , m_movieExtendsHeaderAtom() + , m_trackExtends() +{ +} + +void MovieExtendsAtom::AddMovieExtendsHeaderAtom(const MovieExtendsHeaderAtom& movieExtendsHeaderAtom) +{ + m_movieExtendsHeaderAtomPresent = true; + m_movieExtendsHeaderAtom = movieExtendsHeaderAtom; +} + +bool MovieExtendsAtom::IsMovieExtendsHeaderAtomPresent() const +{ + return m_movieExtendsHeaderAtomPresent; +} + +const MovieExtendsHeaderAtom& MovieExtendsAtom::GetMovieExtendsHeaderAtom() const +{ + return m_movieExtendsHeaderAtom; +} + +void MovieExtendsAtom::AddTrackExtendsAtom(UniquePtr trackExtendsAtom) +{ + m_trackExtends.push_back(std::move(trackExtendsAtom)); +} + +const std::vector MovieExtendsAtom::GetTrackExtendsAtoms() const +{ + std::vector trackExtendsAtoms; + for (auto& trackExtends : m_trackExtends) + { + trackExtendsAtoms.push_back(trackExtends.get()); + } + return trackExtendsAtoms; +} + +void MovieExtendsAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + if (IsMovieExtendsHeaderAtomPresent() == true) + { + m_movieExtendsHeaderAtom.ToStream(str); + } + for (auto& trackExtends : m_trackExtends) + { + trackExtends->ToStream(str); + } + UpdateSize(str); +} + +void MovieExtendsAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + bool foundTrex = false; + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "mehd") + { + m_movieExtendsHeaderAtomPresent = true; + m_movieExtendsHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "trex") + { + UniquePtr trackExtendsAtom(new TrackExtendsAtom()); + trackExtendsAtom->FromStream(subBitstr); + m_trackExtends.push_back(std::move(trackExtendsAtom)); + foundTrex = true; + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside MovieExtendsAtom.\n", type); + } + } + + if (!foundTrex) + { + ISO_LOG(LOG_ERROR, "FromStreamAtom cannot find mandatory TrackExtendsAtom Atom\n"); + throw Exception(); + } +} + +MovieAtom::MovieAtom() + : Atom("moov") + , m_movieHeaderAtom() + , m_tracks() + , m_movieExtendsAtom() +{ +} + +void MovieAtom::Clear() +{ + m_movieHeaderAtom = {}; + m_tracks.clear(); + m_movieExtendsAtom = {}; +} + +MovieHeaderAtom& MovieAtom::GetMovieHeaderAtom() +{ + return m_movieHeaderAtom; +} + +const MovieHeaderAtom& MovieAtom::GetMovieHeaderAtom() const +{ + return m_movieHeaderAtom; +} + +std::vector MovieAtom::GetTrackAtoms() +{ + std::vector trackAtoms; + for (auto& track : m_tracks) + { + trackAtoms.push_back(track.get()); + } + return trackAtoms; +} + +TrackAtom* MovieAtom::GetTrackAtom(uint32_t trackId) +{ + for (auto& track : m_tracks) + { + if (track.get()->GetTrackHeaderAtom().GetTrackID() == trackId) + { + return track.get(); + } + } + return nullptr; +} + +void MovieAtom::AddTrackAtom(UniquePtr trackAtom) +{ + m_tracks.push_back(std::move(trackAtom)); +} + +bool MovieAtom::IsMovieExtendsAtomPresent() const +{ + return !!m_movieExtendsAtom; +} + +const MovieExtendsAtom* MovieAtom::GetMovieExtendsAtom() const +{ + return m_movieExtendsAtom.get(); +} + +void MovieAtom::AddMovieExtendsAtom(UniquePtr movieExtendsAtom) +{ + m_movieExtendsAtom = std::move(movieExtendsAtom); +} + +// @todo Implement support for MovieAtom-level MetaAtom +bool MovieAtom::IsMetaAtomPresent() const +{ + return false; +} + +void MovieAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + + m_movieHeaderAtom.ToStream(str); + + for (auto& track : m_tracks) + { + track->ToStream(str); + } + + if (m_movieExtendsAtom) + { + m_movieExtendsAtom->ToStream(str); + } + + UpdateSize(str); +} + +void MovieAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "mvhd") + { + m_movieHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "trak") + { + UniquePtr trackAtom(new TrackAtom()); + trackAtom->FromStream(subBitstr); + // Ignore Atom if the handler type is not video, audio or metadata + FourCCInt handlerType = trackAtom->GetMediaAtom().GetHandlerAtom().GetHandlerType(); + if (handlerType == "vide" || handlerType == "soun" || handlerType == "meta") + { + m_tracks.push_back(move(trackAtom)); + } + } + else if (AtomType == "mvex") + { + m_movieExtendsAtom = MakeUnique(); + m_movieExtendsAtom->FromStream(subBitstr); + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside movie Atom.\n", type); + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/MovieAtom.h b/src/isolib/atoms/MovieAtom.h new file mode 100644 index 00000000..39c31670 --- /dev/null +++ b/src/isolib/atoms/MovieAtom.h @@ -0,0 +1,310 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieAtom.h +//! \brief: Movie Atom class +//! \detail: 'moov' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _MOVIEATOM_H_ +#define _MOVIEATOM_H_ + +#include "Atom.h" +#include "FormAllocator.h" +#include "MovieHeaderAtom.h" +#include "TrackAtom.h" +#include "FullAtom.h" +#include "Stream.h" +#include "TrackExtAtom.h" + +VCD_MP4_BEGIN + +class MovieExtendsHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + MovieExtendsHeaderAtom(uint8_t version = 0); + + //! + //! \brief Destructor + //! + virtual ~MovieExtendsHeaderAtom() = default; + + //! + //! \brief Set Fragment Duration + //! + //! \param [in] const uint64_t + //! fragment Duration + //! + //! \return void + //! + void SetFragmentDuration(const uint64_t fragmentDuration); + + //! + //! \brief Get Fragment Duration + //! + //! \return uint64_t + //! fragment Duration + //! + uint64_t GetFragmentDuration() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint64_t m_fragmentDuration; //!< fragment duration +}; + +class MovieExtendsAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + MovieExtendsAtom(); + + //! + //! \brief Destructor + //! + virtual ~MovieExtendsAtom() = default; + + //! + //! \brief Add Movie Extends Header Atom + //! + //! \param [in] const MovieExtendsHeaderAtom& + //! Movie Extends Header Atom + //! + //! \return void + //! + void AddMovieExtendsHeaderAtom(const MovieExtendsHeaderAtom& movieExtendsHeaderAtom); + + //! + //! \brief Is Movie Extends Header Atom Present + //! + //! \return bool + //! is present or not + //! + bool IsMovieExtendsHeaderAtomPresent() const; + + //! + //! \brief Get MovieExtendsHeader Atom + //! + //! \return const MovieExtendsHeaderAtom& + //! MovieExtendsHeader Atom + //! + const MovieExtendsHeaderAtom& GetMovieExtendsHeaderAtom() const; + + //! + //! \brief Add TrackExtends Atom + //! + //! \param [in] UniquePtr + //! trackExtendsAtom pointer + //! + //! \return void + //! + void AddTrackExtendsAtom(UniquePtr trackExtendsAtom); + + //! + //! \brief Get TrackExtends Atoms + //! + //! \return const std::vector + //! TrackExtends Atoms pointers + //! + const std::vector GetTrackExtendsAtoms() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + bool m_movieExtendsHeaderAtomPresent; //!< is movie Extends Header Atom Present + MovieExtendsHeaderAtom m_movieExtendsHeaderAtom; //!< Movie Extends Header Atom + std::vector> m_trackExtends;//!< Contained TrackExtendsAtoms +}; + +class MovieAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + MovieAtom(); + + //! + //! \brief Destructor + //! + virtual ~MovieAtom() = default; + + //! + //! \brief Clear track atom + //! + void Clear(); + + //! + //! \brief Get MovieHeader Atom + //! + //! \return MovieHeaderAtom& + //! MovieHeader Atom + //! + MovieHeaderAtom& GetMovieHeaderAtom(); + + //! + //! \brief Get MovieHeader Atom + //! + //! \return const MovieHeaderAtom& + //! MovieHeader Atom + //! + const MovieHeaderAtom& GetMovieHeaderAtom() const; + + //! + //! \brief Get Track Atoms + //! + //! \return std::vector + //! Track Atoms + //! + std::vector GetTrackAtoms(); + + //! + //! \brief Get Track Atom + //! + //! \param [in] uint32_t + //! track Id + //! \return TrackAtom* + //! Track Atom + //! + TrackAtom* GetTrackAtom(uint32_t trackId); + + //! + //! \brief Get Movie Extends Atom + //! + //! \return const MovieExtendsAtom* + //! Movie Extends Atom + //! + const MovieExtendsAtom* GetMovieExtendsAtom() const; + + //! + //! \brief Add Movie Extends Atom + //! + //! \param [in] UniquePtr + //! movie Extends Atom pointer + //! \return void + //! + void AddMovieExtendsAtom(UniquePtr movieExtendsAtom); + + //! + //! \brief Is Meta Atom Present + //! + //! \return bool + //! is or not + //! + bool IsMetaAtomPresent() const; + + //! + //! \brief Is Movie Extends Atom Present + //! + //! \return bool + //! is or not + //! + bool IsMovieExtendsAtomPresent() const; + + //! + //! \brief Add Track Atom + //! + //! \param [in] UniquePtr + //! track Atom pointer + //! \return void + //! + void AddTrackAtom(UniquePtr trackAtom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + MovieHeaderAtom m_movieHeaderAtom; //!< The mandatory MovieHeaderAtom + std::vector> m_tracks; //!< Contained TrackAtoms + UniquePtr m_movieExtendsAtom; //!< Optional Movie Extends Atom +}; + +VCD_MP4_END; +#endif /* _MOVIEATOM_H_ */ diff --git a/src/isolib/atoms/MovieFragAtom.cpp b/src/isolib/atoms/MovieFragAtom.cpp new file mode 100644 index 00000000..916b36a7 --- /dev/null +++ b/src/isolib/atoms/MovieFragAtom.cpp @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieFragAtom.cpp +//! \brief: MovieFragAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "MovieFragAtom.h" + +VCD_MP4_BEGIN + +MovieFragmentAtom::MovieFragmentAtom(std::vector& sampleDefaults) + : Atom("moof") + , m_movieFragmentHeaderAtom() + , m_trackFragmentAtoms() + , m_sampleDefaults(sampleDefaults) + , m_firstByteOffset(0) +{ +} + +MovieFragmentHeaderAtom& MovieFragmentAtom::GetMovieFragmentHeaderAtom() +{ + return m_movieFragmentHeaderAtom; +} + +void MovieFragmentAtom::AddTrackFragmentAtom(UniquePtr trackFragmentAtom) +{ + m_trackFragmentAtoms.push_back(std::move(trackFragmentAtom)); +} + +std::vector MovieFragmentAtom::GetTrackFragmentAtoms() +{ + std::vector trackFragmentAtoms; + for (auto& trackFragmentAtom : m_trackFragmentAtoms) + { + trackFragmentAtoms.push_back(trackFragmentAtom.get()); + } + return trackFragmentAtoms; +} + +void MovieFragmentAtom::SetMoofFirstByteOffset(std::uint64_t moofFirstByteOffset) +{ + m_firstByteOffset = moofFirstByteOffset; +} + +std::uint64_t MovieFragmentAtom::GetMoofFirstByteOffset() +{ + return m_firstByteOffset; +} + +void MovieFragmentAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + m_movieFragmentHeaderAtom.ToStream(str); + for (auto& trackFragmentAtom : m_trackFragmentAtoms) + { + trackFragmentAtom->ToStream(str); + } + UpdateSize(str); +} + +void MovieFragmentAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "mfhd") + { + m_movieFragmentHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "traf") + { + UniquePtr trackFragmentAtom(new TrackFragmentAtom(m_sampleDefaults)); + trackFragmentAtom->FromStream(subBitstr); + m_trackFragmentAtoms.push_back(std::move(trackFragmentAtom)); + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside MovieFragmentAtom.\n", type); + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/MovieFragAtom.h b/src/isolib/atoms/MovieFragAtom.h new file mode 100644 index 00000000..61d66dd2 --- /dev/null +++ b/src/isolib/atoms/MovieFragAtom.h @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieFragAtom.h +//! \brief: Movie Fragment Atom class +//! \detail: 'moof' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _MOVIEFRAGMENTATOM_H_ +#define _MOVIEFRAGMENTATOM_H_ + +#include +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "MovieFragHeaderAtom.h" +#include "TrackExtAtom.h" +#include "TrackFragAtom.h" + +VCD_MP4_BEGIN + +class MovieFragmentAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + MovieFragmentAtom(std::vector& sampleDefaults); + + //! + //! \brief Destructor + //! + virtual ~MovieFragmentAtom() = default; + + //! + //! \brief Get Movie Fragment Header Atom + //! + //! \return MovieFragmentHeaderAtom& + //! Movie Fragment Header Atom + //! + MovieFragmentHeaderAtom& GetMovieFragmentHeaderAtom(); + + //! + //! \brief Add Track Fragment Atom + //! + //! \param [in] UniquePtr + //! track Fragment Atom pointer + //! + //! \return void + //! + void AddTrackFragmentAtom(UniquePtr trackFragmentAtom); + + //! + //! \brief Get Track Fragment Atoms + //! + //! \return std::vector + //! Track Fragment Atoms + //! + std::vector GetTrackFragmentAtoms(); + + //! + //! \brief Set Moof First Byte Offset + //! + //! \param [in] std::uint64_t + //! Moof First Byte Offset + //! + //! \return void + //! + void SetMoofFirstByteOffset(std::uint64_t moofFirstByteOffset); + + //! + //! \brief Get Moof First Byte Offset + //! + //! \return std::uint64_t + //! Moof First Byte Offset + //! + std::uint64_t GetMoofFirstByteOffset(); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + MovieFragmentHeaderAtom m_movieFragmentHeaderAtom; //!< Movie Fragment Header Atom + std::vector> m_trackFragmentAtoms; //!< Contained TrackFragmentAtoms + std::vector& m_sampleDefaults; //!< sampleDefaults array + std::uint64_t m_firstByteOffset; //!< Offset of 1st byte of this moof +}; + +VCD_MP4_END; +#endif /* _MOVIEFRAGMENTATOM_H_ */ diff --git a/src/isolib/atoms/MovieFragDataTypes.h b/src/isolib/atoms/MovieFragDataTypes.h new file mode 100644 index 00000000..35a19ed9 --- /dev/null +++ b/src/isolib/atoms/MovieFragDataTypes.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieFragDataTypes.h +//! \brief: MovieFragDataTypes class +//! \detail: Movie fragments data types definitions. +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef MOVIEFRAGMENTSDATATYPES_H +#define MOVIEFRAGMENTSDATATYPES_H + +#include +#include "Stream.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +struct SampleFlagsType //!< sample flags type +{ + uint32_t reserved : 4, is_leading : 2, sample_depends_on : 2, sample_is_depended_on : 2, + sample_has_redundancy : 2, sample_padding_value : 3, sample_is_non_sync_sample : 1, + sample_degradation_priority : 16; +}; + +union SampleFlags { //!< sample flags + uint32_t flagsAsUInt; + SampleFlagsType flags; + + static SampleFlags Read(Stream& str) + { + SampleFlags r; + r.flags.reserved = str.Read1(4); + r.flags.is_leading = str.Read1(2); + r.flags.sample_depends_on = str.Read1(2); + r.flags.sample_is_depended_on = str.Read1(2); + r.flags.sample_has_redundancy = str.Read1(2); + r.flags.sample_padding_value = str.Read1(3); + r.flags.sample_is_non_sync_sample = str.Read1(1); + r.flags.sample_degradation_priority = str.Read1(16); + return r; + } + + static void Write(Stream& str, const SampleFlags& r) + { + str.Write1(r.flags.reserved, 4); + str.Write1(r.flags.is_leading, 2); + str.Write1(r.flags.sample_depends_on, 2); + str.Write1(r.flags.sample_is_depended_on, 2); + str.Write1(r.flags.sample_has_redundancy, 2); + str.Write1(r.flags.sample_padding_value, 3); + str.Write1(r.flags.sample_is_non_sync_sample, 1); + str.Write1(r.flags.sample_degradation_priority, 16); + } +}; + +struct SampleDefaults //!< sample defaults +{ + std::uint32_t trackId; + std::uint32_t defaultSampleDescriptionIndex; + std::uint32_t defaultSampleDuration; + std::uint32_t defaultSampleSize; + SampleFlags defaultSampleFlags; +}; + +VCD_MP4_END; +#endif /* MOVIEFRAGMENTSDATATYPES_H */ diff --git a/src/isolib/atoms/MovieFragHeaderAtom.cpp b/src/isolib/atoms/MovieFragHeaderAtom.cpp new file mode 100644 index 00000000..2ea63bcb --- /dev/null +++ b/src/isolib/atoms/MovieFragHeaderAtom.cpp @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieFragHeaderAtom.cpp +//! \brief: MovieFragHeaderAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "MovieFragHeaderAtom.h" + +VCD_MP4_BEGIN + +MovieFragmentHeaderAtom::MovieFragmentHeaderAtom() + : FullAtom("mfhd", 0, 0) + , m_sequenceNumber(0) + +{ +} + +void MovieFragmentHeaderAtom::SetSequenceNumber(const uint32_t sequencyNumber) +{ + m_sequenceNumber = sequencyNumber; +} + +uint32_t MovieFragmentHeaderAtom::GetSequenceNumber() const +{ + return m_sequenceNumber; +} + +void MovieFragmentHeaderAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(m_sequenceNumber); + UpdateSize(str); +} + +void MovieFragmentHeaderAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_sequenceNumber = str.Read32(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/MovieFragHeaderAtom.h b/src/isolib/atoms/MovieFragHeaderAtom.h new file mode 100644 index 00000000..e9a71c4a --- /dev/null +++ b/src/isolib/atoms/MovieFragHeaderAtom.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieFragHeaderAtom.h +//! \brief: Movie Fragment Header Atom class +//! \detail: 'mfhd' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _MOVIEFRAGMENTHEADERATOM_H_ +#define _MOVIEFRAGMENTHEADERATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class MovieFragmentHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + MovieFragmentHeaderAtom(); + + //! + //! \brief Destructor + //! + virtual ~MovieFragmentHeaderAtom() = default; + + //! + //! \brief Set Sequence Number + //! + //! \param [in] std::uint32_t + //! Sequence Number + //! + //! \return void + //! + void SetSequenceNumber(const uint32_t sequencyNumber); + + //! + //! \brief Get Sequence Number + //! + //! \return std::uint32_t + //! Sequence Number + //! + uint32_t GetSequenceNumber() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint32_t m_sequenceNumber; //!< Sequence Number +}; + +VCD_MP4_END; +#endif /* _MOVIEFRAGMENTHEADERATOM_H_ */ diff --git a/src/isolib/atoms/MovieHeaderAtom.cpp b/src/isolib/atoms/MovieHeaderAtom.cpp new file mode 100644 index 00000000..8bcfa041 --- /dev/null +++ b/src/isolib/atoms/MovieHeaderAtom.cpp @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieHeaderAtom.cpp +//! \brief: MovieHeaderAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "MovieHeaderAtom.h" +#include + +VCD_MP4_BEGIN + +static const int MATRIX_LENGTH = 9; + +MovieHeaderAtom::MovieHeaderAtom() + : FullAtom("mvhd", 0, 0) + , m_creationTime(0) + , m_modificationTime(0) + , m_timeScale(0) + , m_duration(0) + , m_matrix({0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000}) + , m_nextTrackID(0) +{ +} + +void MovieHeaderAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + if (GetVersion() == 0) + { + str.Write32(static_cast(m_creationTime)); + str.Write32(static_cast(m_modificationTime)); + str.Write32(m_timeScale); + str.Write32(static_cast(m_duration)); + } + else if (GetVersion() == 1) + { + str.Write64(m_creationTime); + str.Write64(m_modificationTime); + str.Write32(m_timeScale); + str.Write64(m_duration); + } + else + { + ISO_LOG(LOG_ERROR, "ToStream() supports only 'mvhd' version 0 and version 1\n"); + throw Exception(); + } + str.Write32(0x00010000); // Rate + str.Write16(0x0100); // Volume + str.Write16(0); // Reserved + + str.Write32(0); // Reserved + str.Write32(0); + + for (unsigned int i = 0; i < MATRIX_LENGTH; ++i) + { + str.Write32(static_cast(m_matrix.at(i))); // Matrix[9] + } + + str.Write32(0); // Predefined[6] + str.Write32(0); + str.Write32(0); + str.Write32(0); + str.Write32(0); + str.Write32(0); + + str.Write32(m_nextTrackID); + + UpdateSize(str); +} + +void MovieHeaderAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + if ((GetVersion() != 0) && (GetVersion() != 1)) + { + ISO_LOG(LOG_ERROR, "FromStream() supports only 'mvhd' version 0 and version 1\n"); + throw Exception(); + } + + if (GetVersion() == 0) + { + m_creationTime = str.Read32(); + m_modificationTime = str.Read32(); + } + else + { + m_creationTime = str.Read64(); + m_modificationTime = str.Read64(); + } + m_timeScale = str.Read32(); + if (GetVersion() == 0) + { + m_duration = str.Read32(); + } + else + { + m_duration = str.Read64(); + } + str.Read32(); // Rate + str.Read16(); // Volume + str.Read16(); // Reserved + + str.Read32(); // Reserved + str.Read32(); + + m_matrix.clear(); + for (int i = 0; i < MATRIX_LENGTH; ++i) + { + m_matrix.push_back(static_cast(str.Read32())); // Matrix[9] + } + + str.Read32(); // Predefined[6] + str.Read32(); + str.Read32(); + str.Read32(); + str.Read32(); + str.Read32(); + + m_nextTrackID = str.Read32(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/MovieHeaderAtom.h b/src/isolib/atoms/MovieHeaderAtom.h new file mode 100644 index 00000000..a1391c24 --- /dev/null +++ b/src/isolib/atoms/MovieHeaderAtom.h @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: MovieHeaderAtom.h +//! \brief: Movie Header Atom class +//! \detail: 'mvhd' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _MOVIEHEADERATOM_H_ +#define _MOVIEHEADERATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class MovieHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + MovieHeaderAtom(); + + //! + //! \brief Destructor + //! + virtual ~MovieHeaderAtom() = default; + + //! + //! \brief Set and Get function for m_creationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_creationTime + //! m_creationTime member in class + //! \param [in] CreationTime + //! m_creationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_creationTime, CreationTime, const); + + //! + //! \brief Set and Get function for m_modificationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_modificationTime + //! m_modificationTime member in class + //! \param [in] ModificationTime + //! m_modificationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_modificationTime, ModificationTime, const); + + //! + //! \brief Set and Get function for m_timeScale member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_timeScale + //! m_timeScale member in class + //! \param [in] TimeScale + //! m_timeScale name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_timeScale, TimeScale, const); + + //! + //! \brief Set and Get function for m_duration member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_duration + //! m_duration member in class + //! \param [in] Duration + //! m_duration name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_duration, Duration, const); + + //! + //! \brief Set and Get function for m_nextTrackID member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_nextTrackID + //! m_nextTrackID member in class + //! \param [in] NextTrackID + //! m_nextTrackID name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_nextTrackID, NextTrackID, const); + + //! + //! \brief Set and Get function for m_matrix member + //! + //! \param [in] const std::vector& + //! value to set + //! \param [in] m_matrix + //! m_matrix member in class + //! \param [in] Matrix + //! m_matrix name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(const std::vector&, m_matrix, Matrix, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint64_t m_creationTime; //!< creation Time + std::uint64_t m_modificationTime; //!< modification Time + std::uint32_t m_timeScale; //!< time Scale + std::uint64_t m_duration; //!< duration + std::vector m_matrix; //!< matrix + std::uint32_t m_nextTrackID; //!< next Track ID +}; + +VCD_MP4_END; +#endif /* _MOVIEHEADERATOM_H_ */ diff --git a/src/isolib/atoms/Mp4AudDecConfigRecord.cpp b/src/isolib/atoms/Mp4AudDecConfigRecord.cpp new file mode 100644 index 00000000..63d3dccd --- /dev/null +++ b/src/isolib/atoms/Mp4AudDecConfigRecord.cpp @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4AudDecConfigRecord.cpp +//! \brief: Mp4AudDecConfigRecord class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "Mp4AudDecConfigRecord.h" +#include "ElemStreamDescAtom.h" + +VCD_MP4_BEGIN + +MP4AudioDecoderConfigurationRecord::MP4AudioDecoderConfigurationRecord(ElementaryStreamDescriptorAtom& aAtom) + : m_ESDAtom(aAtom) +{ +} + +void MP4AudioDecoderConfigurationRecord::GetConfigurationMap(ConfigurationMap& aMap) const +{ + std::vector decoderSpecInfo; + aMap.clear(); + if (m_ESDAtom.GetOneParameterSet(decoderSpecInfo)) + { + // Only handle AudioSpecificConfig from 1.6.2.1 AudioSpecificConfig of ISO/IEC 14496-3:200X(E) subpart 1 + aMap.insert({DecParam::AudioSpecificConfig, std::move(decoderSpecInfo)}); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/Mp4AudDecConfigRecord.h b/src/isolib/atoms/Mp4AudDecConfigRecord.h new file mode 100644 index 00000000..6a9bfce0 --- /dev/null +++ b/src/isolib/atoms/Mp4AudDecConfigRecord.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4AudDecConfigRecord.h +//! \brief: Mp4AudDecConfigRecord class +//! \detail: Mp4 Audio Decode Configuration Record +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _MP4AUDIODECODERCONFIGRECORD_H_ +#define _MP4AUDIODECODERCONFIGRECORD_H_ + +#include "DecConfigRecord.h" + +VCD_MP4_BEGIN + +class ElementaryStreamDescriptorAtom; + +class MP4AudioDecoderConfigurationRecord : public DecoderConfigurationRecord +{ +public: + + //! + //! \brief Constructor + //! + MP4AudioDecoderConfigurationRecord(ElementaryStreamDescriptorAtom& aAtom); + + //! + //! \brief Destructor + //! + virtual ~MP4AudioDecoderConfigurationRecord() = default; + + //! + //! \brief Get Configuration Map + //! + //! \param [in] ConfigurationMap& + //! map + //! \return void + //! + virtual void GetConfigurationMap(ConfigurationMap& aMap) const override; + +protected: + const ElementaryStreamDescriptorAtom& m_ESDAtom; //!< Elementary Stream Descriptor Atom +}; + +VCD_MP4_END; +#endif /* _MP4AUDIODECODERCONFIGRECORD_H_ */ diff --git a/src/isolib/atoms/Mp4AudSampEntryAtom.cpp b/src/isolib/atoms/Mp4AudSampEntryAtom.cpp new file mode 100644 index 00000000..4e2dddba --- /dev/null +++ b/src/isolib/atoms/Mp4AudSampEntryAtom.cpp @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4AudSampEntryAtom.cpp +//! \brief: Mp4AudSampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "Mp4AudSampEntryAtom.h" +#include + +VCD_MP4_BEGIN + +MP4AudioSampleEntryAtom::MP4AudioSampleEntryAtom() + : AudioSampleEntryAtom("mp4a") + , m_ESDAtom() + , m_hasSpatialAudioAtom(false) + , m_spatialAudioAtom() + , m_hasNonDiegeticAudioAtom(false) + , m_nonDiegeticAudioAtom() + , m_record(m_ESDAtom) +{ +} + +ElementaryStreamDescriptorAtom& MP4AudioSampleEntryAtom::GetESDAtom() +{ + return m_ESDAtom; +} + +const ElementaryStreamDescriptorAtom& MP4AudioSampleEntryAtom::GetESDAtom() const +{ + return m_ESDAtom; +} + +bool MP4AudioSampleEntryAtom::HasSpatialAudioAtom() const +{ + return m_hasSpatialAudioAtom; +} + +const SpatialAudioAtom& MP4AudioSampleEntryAtom::GetSpatialAudioAtom() const +{ + return m_spatialAudioAtom; +} + +void MP4AudioSampleEntryAtom::SetSpatialAudioAtom(const SpatialAudioAtom& spatialAudioAtom) +{ + m_hasSpatialAudioAtom = true; + m_spatialAudioAtom = spatialAudioAtom; +} + +bool MP4AudioSampleEntryAtom::HasNonDiegeticAudioAtom() const +{ + return m_hasNonDiegeticAudioAtom; +} + +const NonDiegeticAudioAtom& MP4AudioSampleEntryAtom::GetNonDiegeticAudioAtom() const +{ + return m_nonDiegeticAudioAtom; +} + +void MP4AudioSampleEntryAtom::SetNonDiegeticAudioAtom(const NonDiegeticAudioAtom& nonDiegeticAudioAtom) +{ + m_hasNonDiegeticAudioAtom = true; + m_nonDiegeticAudioAtom = nonDiegeticAudioAtom; +} + +void MP4AudioSampleEntryAtom::ToStream(Stream& str) +{ + AudioSampleEntryAtom::ToStream(str); + m_ESDAtom.ToStream(str); + + if (m_hasSpatialAudioAtom) + { + m_spatialAudioAtom.ToStream(str); + } + + if (m_hasNonDiegeticAudioAtom) + { + m_nonDiegeticAudioAtom.ToStream(str); + } + + UpdateSize(str); +} + +void MP4AudioSampleEntryAtom::FromStream(Stream& str) +{ + AudioSampleEntryAtom::FromStream(str); + + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "esds") + { + m_ESDAtom.FromStream(subBitstr); + } + else if (AtomType == "SA3D") + { + m_hasSpatialAudioAtom = true; + m_spatialAudioAtom.FromStream(subBitstr); + } + else if (AtomType == "SAND") + { + m_hasNonDiegeticAudioAtom = true; + m_nonDiegeticAudioAtom.FromStream(subBitstr); + } + } +} + +MP4AudioSampleEntryAtom* MP4AudioSampleEntryAtom::Clone() const +{ + return (new MP4AudioSampleEntryAtom(*this)); +} + +const Atom* MP4AudioSampleEntryAtom::GetConfigurationAtom() const +{ + return &m_ESDAtom; +} + +const DecoderConfigurationRecord* MP4AudioSampleEntryAtom::GetConfigurationRecord() const +{ + return &m_record; +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/Mp4AudSampEntryAtom.h b/src/isolib/atoms/Mp4AudSampEntryAtom.h new file mode 100644 index 00000000..8e72393a --- /dev/null +++ b/src/isolib/atoms/Mp4AudSampEntryAtom.h @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4AudSampEntryAtom.h +//! \brief: Mp4AudSampEntryAtom class. +//! \detail: This Atom contains information related to the mp4 audio samples of the track +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _MP4AUDIOSAMPLEENTRYATOM_H_ +#define _MP4AUDIOSAMPLEENTRYATOM_H_ + +#include "AudSampEntryAtom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "ElemStreamDescAtom.h" +#include "BasicAudAtom.h" +#include "Mp4AudDecConfigRecord.h" + +VCD_MP4_BEGIN + +class MP4AudioSampleEntryAtom : public AudioSampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + MP4AudioSampleEntryAtom(); + + //! + //! \brief Destructor + //! + virtual ~MP4AudioSampleEntryAtom() = default; + + //! + //! \brief Get ESD Atom + //! + //! \return ElementaryStreamDescriptorAtom& + //! ESD Atom + //! + ElementaryStreamDescriptorAtom& GetESDAtom(); + + //! + //! \brief Get ESD Atom + //! + //! \return const ElementaryStreamDescriptorAtom& + //! ESD Atom + //! + const ElementaryStreamDescriptorAtom& GetESDAtom() const; + + //! + //! \brief Has Spatial Audio Atom or not + //! + //! \return bool + //! has or not + //! + bool HasSpatialAudioAtom() const; + + //! + //! \brief Get Spatial Audio Atom + //! + //! \return const SpatialAudioAtom& + //! Spatial Audio Atom + //! + const SpatialAudioAtom& GetSpatialAudioAtom() const; + + //! + //! \brief Set Spatial Audio Atom + //! + //! \param [in] const SpatialAudioAtom& + //! Spatial Audio Atom + //! + //! \return void + //! + void SetSpatialAudioAtom(const SpatialAudioAtom&); + + //! + //! \brief Has Non Diegetic Audio Atom or not + //! + //! \return bool + //! has or not + //! + bool HasNonDiegeticAudioAtom() const; + + //! + //! \brief Get Non Diegetic Audio Atom + //! + //! \return const NonDiegeticAudioAtom& + //! Non Diegetic Audio Atom + //! + const NonDiegeticAudioAtom& GetNonDiegeticAudioAtom() const; + + //! + //! \brief Set Non Diegetic Audio Atom + //! + //! \param [in] const NonDiegeticAudioAtom& + //! Non Diegetic Audio Atom + //! + //! \return void + //! + void SetNonDiegeticAudioAtom(const NonDiegeticAudioAtom&); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Get Copy of MP4AudioSampleEntryAtom + //! + //! \return MP4AudioSampleEntryAtom* + //! MP4AudioSampleEntryAtom Atom + //! + virtual MP4AudioSampleEntryAtom* Clone() const; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: + ElementaryStreamDescriptorAtom m_ESDAtom; //!< Elementary Stream Descriptor Atom + bool m_hasSpatialAudioAtom; //!< has Spatial Audio Atom or not + SpatialAudioAtom m_spatialAudioAtom; //!< Spatial Audio Atom + bool m_hasNonDiegeticAudioAtom; //!< has Non Diegetic Audio Atom or not + NonDiegeticAudioAtom m_nonDiegeticAudioAtom;//!< Non Diegetic Audio Atom + MP4AudioDecoderConfigurationRecord m_record;//!< MP4 Audio Decoder Configuration Record +}; + +VCD_MP4_END; +#endif /* _MP4AUDIOSAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/Mp4VisualSampEntryAtom.cpp b/src/isolib/atoms/Mp4VisualSampEntryAtom.cpp new file mode 100644 index 00000000..205f73df --- /dev/null +++ b/src/isolib/atoms/Mp4VisualSampEntryAtom.cpp @@ -0,0 +1,152 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4VisualSampEntryAtom.cpp +//! \brief: Mp4VisualSampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! +#include "Mp4VisualSampEntryAtom.h" +#include + + +VCD_MP4_BEGIN + +MP4VisualSampleEntryAtom::MP4VisualSampleEntryAtom() + : VisualSampleEntryAtom("mp4v", "MPEG4 Visual Coding") + , m_ESDAtom() + , m_isStereoscopic3DPresent(false) + , m_stereoscopic3DAtom() + , m_isSphericalVideoV2AtomPresent(false) + , m_sphericalVideoV2Atom() +{ +} + +MP4VisualSampleEntryAtom::MP4VisualSampleEntryAtom(const MP4VisualSampleEntryAtom& Atom) + : VisualSampleEntryAtom(Atom) + , m_ESDAtom(Atom.m_ESDAtom) + , m_isStereoscopic3DPresent(Atom.m_isStereoscopic3DPresent) + , m_stereoscopic3DAtom(Atom.m_stereoscopic3DAtom) + , m_isSphericalVideoV2AtomPresent(Atom.m_isSphericalVideoV2AtomPresent) + , m_sphericalVideoV2Atom(Atom.m_sphericalVideoV2Atom) +{ +} + +void MP4VisualSampleEntryAtom::CreateStereoscopic3DAtom() +{ + m_isStereoscopic3DPresent = true; +} + +void MP4VisualSampleEntryAtom::CreateSphericalVideoV2Atom() +{ + m_isSphericalVideoV2AtomPresent = true; +} + +ElementaryStreamDescriptorAtom& MP4VisualSampleEntryAtom::GetESDAtom() +{ + return m_ESDAtom; +} + +const Stereoscopic3D* MP4VisualSampleEntryAtom::GetStereoscopic3DAtom() const +{ + return (m_isStereoscopic3DPresent ? &m_stereoscopic3DAtom : nullptr); +} + +const SphericalVideoV2Atom* MP4VisualSampleEntryAtom::GetSphericalVideoV2Atom() const +{ + return (m_isSphericalVideoV2AtomPresent ? &m_sphericalVideoV2Atom : nullptr); +} + +void MP4VisualSampleEntryAtom::ToStream(Stream& str) +{ + VisualSampleEntryAtom::ToStream(str); + m_ESDAtom.ToStream(str); + + if (m_isStereoscopic3DPresent) + { + m_stereoscopic3DAtom.ToStream(str); + } + + if (m_isSphericalVideoV2AtomPresent) + { + m_sphericalVideoV2Atom.ToStream(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void MP4VisualSampleEntryAtom::FromStream(Stream& str) +{ + VisualSampleEntryAtom::FromStream(str); + + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "esds") + { + m_ESDAtom.FromStream(subBitstr); + } + else if (AtomType == "st3d") + { + m_stereoscopic3DAtom.FromStream(subBitstr); + m_isStereoscopic3DPresent = true; + } + else if (AtomType == "sv3d") + { + m_sphericalVideoV2Atom.FromStream(subBitstr); + m_isSphericalVideoV2AtomPresent = true; + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside MP4VisualSampleEntryAtom.\n", type); + } + } +} + +MP4VisualSampleEntryAtom* MP4VisualSampleEntryAtom::Clone() const +{ + return (new MP4VisualSampleEntryAtom(*this)); +} + +const Atom* MP4VisualSampleEntryAtom::GetConfigurationAtom() const +{ + ISO_LOG(LOG_ERROR, "MP4VisualSampleEntryAtom::GetConfigurationAtom() not impelmented \n"); + return nullptr; +} + +const DecoderConfigurationRecord* MP4VisualSampleEntryAtom::GetConfigurationRecord() const +{ + ISO_LOG(LOG_ERROR, "MP4VisualSampleEntryAtom::GetConfigurationRecord() not impelmented \n"); + return nullptr; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/Mp4VisualSampEntryAtom.h b/src/isolib/atoms/Mp4VisualSampEntryAtom.h new file mode 100644 index 00000000..8bf64838 --- /dev/null +++ b/src/isolib/atoms/Mp4VisualSampEntryAtom.h @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4VisualSampEntryAtom.h +//! \brief: Mp4VisualSampEntryAtom class. +//! \detail: contains information related to the mp4 visual samples of the track +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _MP4VISUALSAMPLEENTRYATOM_H_ +#define _MP4VISUALSAMPLEENTRYATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "ElemStreamDescAtom.h" +#include "BasicVideoAtom.h" +#include "VisualSampEntryAtom.h" + +VCD_MP4_BEGIN + +/** @brief MP4VIsualSampleEntryAtom class. Extends from VisualSampleEntryAtom. + * @details This Atom contains information related to the mp4 visual samples of the track + * @details as defined in the ISO/IEC FDIS 14496-14 standard. **/ + +class MP4VisualSampleEntryAtom : public VisualSampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + MP4VisualSampleEntryAtom(); + MP4VisualSampleEntryAtom(const MP4VisualSampleEntryAtom& Atom); + + MP4VisualSampleEntryAtom& operator=(const MP4VisualSampleEntryAtom&) = default; + //! + //! \brief Destructor + //! + virtual ~MP4VisualSampleEntryAtom() = default; + + //! + //! \brief Create Stereoscopic3D Atom + //! + //! \return void + //! + void CreateStereoscopic3DAtom(); + + //! + //! \brief Create SphericalVideo Atom + //! + //! \return void + //! + void CreateSphericalVideoV2Atom(); + + //! + //! \brief Get ESD Atom + //! + //! \return ElementaryStreamDescriptorAtom& + //! ESD Atom + //! + ElementaryStreamDescriptorAtom& GetESDAtom(); + + //! + //! \brief Get Stereoscopic3D Atom + //! + //! \return const Stereoscopic3D* + //! Stereoscopic3D Atom + //! + virtual const Stereoscopic3D* GetStereoscopic3DAtom() const override; + + //! + //! \brief Get SphericalVideo Atom + //! + //! \return const SphericalVideoV2Atom* + //! Stereoscopic3D Atom + //! + virtual const SphericalVideoV2Atom* GetSphericalVideoV2Atom() const override; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str) override; + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str) override; + + //! + //! \brief Get Copy of MP4VisualSampleEntryAtom + //! + //! \return MP4VisualSampleEntryAtom* + //! MP4VisualSampleEntryAtom Atom + //! + virtual MP4VisualSampleEntryAtom* Clone() const override; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: + ElementaryStreamDescriptorAtom m_ESDAtom; //!< Elementary Stream Descriptor Atom + bool m_isStereoscopic3DPresent; //!< is Stereo scopic3D Present or not + Stereoscopic3D m_stereoscopic3DAtom; //!< Stereoscopic3D atom + bool m_isSphericalVideoV2AtomPresent; //!< is Spherical VideoV2Atom Present or not + SphericalVideoV2Atom m_sphericalVideoV2Atom;//!< Spherical Video V2 Atom +}; + +VCD_MP4_END; +#endif /* _MP4VISUALSAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/NalUtil.cpp b/src/isolib/atoms/NalUtil.cpp new file mode 100644 index 00000000..977b7dd8 --- /dev/null +++ b/src/isolib/atoms/NalUtil.cpp @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: NalUtil.cpp +//! \brief: NalUtil class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "NalUtil.h" + +VCD_MP4_BEGIN + +unsigned int FindStartCodeLen(const std::vector& data) +{ + unsigned int pos = 0; + const auto pSize = data.size(); + + while ((pos + 1) < pSize && data[pos] == 0) + { + ++pos; + } + + if (pos > 1 && data[pos] == 1) + { + return pos + 1; + } + else + { + return 0; + } +} + +std::vector TransferStreamToRBSP(const std::vector& pStr) +{ + std::vector dest; + const unsigned int nalBytes = pStr.size(); + + dest.reserve(nalBytes); + + unsigned int i = FindStartCodeLen(pStr); + + static const size_t NAL_HEAD_LEN = 2; + dest.insert(dest.end(), pStr.cbegin() + i, pStr.cbegin() + i + NAL_HEAD_LEN); + i += NAL_HEAD_LEN; + + // copy rest of the data while removing start code emulation prevention bytes + enum class Status + { + DATA_COPY, + SINGLE_ZERO, + TWO_ZEROS + }; + Status status = Status::DATA_COPY; + int pBeginOffset = static_cast(i); + for (; i < nalBytes; ++i) + { + const unsigned int byte = pStr[i]; + switch (status) + { + case Status::DATA_COPY: + if (byte != 0) + status = Status::DATA_COPY; + else + status = Status::SINGLE_ZERO; + break; + + case Status::SINGLE_ZERO: + if (byte != 0) + status = Status::DATA_COPY; + else + status = Status::TWO_ZEROS; + break; + + case Status::TWO_ZEROS: + if (byte == 0x03) + { + dest.insert(dest.end(), pStr.cbegin() + pBeginOffset, pStr.cbegin() + i); + pBeginOffset = static_cast(i) + 1; + status = Status::DATA_COPY; + } + else if (byte == 0) + status = Status::TWO_ZEROS; + else + status = Status::DATA_COPY; + break; + } + } + dest.insert(dest.end(), pStr.cbegin() + pBeginOffset, pStr.cend()); + return dest; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/NalUtil.h b/src/isolib/atoms/NalUtil.h new file mode 100644 index 00000000..94d594ec --- /dev/null +++ b/src/isolib/atoms/NalUtil.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: NalUtil.h +//! \brief: NalUtil class +//! \detail: defines the number of bytes in start code +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _NALUTIL_H_ +#define _NALUTIL_H_ + +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +//! +//! \brief Find Start Code Len +//! +//! \param [in] const std::vector +//! source data +//! +//! \return unsigned int +//! length +//! +unsigned int FindStartCodeLen(const std::vector &data); + +//! +//! \brief Transfer Stream To RBSP +//! +//! \param [in] const std::vector & +//! byte Stream +//! +//! \return std::vector +//! byte stream output +//! +std::vector TransferStreamToRBSP(const std::vector &byteStr); + +VCD_MP4_END; +#endif /* _NALUTIL_H_ */ diff --git a/src/player/Mesh.cpp b/src/isolib/atoms/NullMediaHeaderAtom.cpp similarity index 73% rename from src/player/Mesh.cpp rename to src/isolib/atoms/NullMediaHeaderAtom.cpp index 08a11931..17d95d35 100644 --- a/src/player/Mesh.cpp +++ b/src/isolib/atoms/NullMediaHeaderAtom.cpp @@ -22,51 +22,32 @@ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - - * */ //! -//! \file Mesh.cpp -//! \brief Implement class for Mesh. +//! \file: NullMediaHeaderAtom.cpp +//! \brief: NullMediaHeaderAtom class implementation //! +//! Created on October 15, 2019, 13:39 PM +//! +#include "NullMediaHeaderAtom.h" -#include "Mesh.h" - -VCD_NS_BEGIN - -Mesh::Mesh() - : m_vertices(NULL), m_texCoords(NULL), m_indices(NULL), m_vertexNum(0), m_indexNum(0) -{ -} - -Mesh::~Mesh() -{ -} - -uint32_t Mesh::GetVertexNum() -{ - return m_vertexNum; -} - -uint32_t Mesh::GetIndexNum() -{ - return m_indexNum; -} +VCD_MP4_BEGIN -float *Mesh::GetVertices() +NullMediaHeaderAtom::NullMediaHeaderAtom() + : FullAtom("nmhd", 0, 0) { - return m_vertices; } -float *Mesh::GetTexCoords() +void NullMediaHeaderAtom::ToStream(Stream& str) { - return m_texCoords; + WriteFullAtomHeader(str); + UpdateSize(str); } -uint32_t *Mesh::GetIndices() +void NullMediaHeaderAtom::FromStream(Stream& str) { - return m_indices; + ParseFullAtomHeader(str); } -VCD_NS_END \ No newline at end of file +VCD_MP4_END \ No newline at end of file diff --git a/src/OmafDashAccess/OmafDashDownload/OmafDownloaderObserver.h b/src/isolib/atoms/NullMediaHeaderAtom.h similarity index 64% rename from src/OmafDashAccess/OmafDashDownload/OmafDownloaderObserver.h rename to src/isolib/atoms/NullMediaHeaderAtom.h index 4506d705..32c9ad32 100644 --- a/src/OmafDashAccess/OmafDashDownload/OmafDownloaderObserver.h +++ b/src/isolib/atoms/NullMediaHeaderAtom.h @@ -22,60 +22,58 @@ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - */ //! -//! \file: OmafDownloaderObserver.h -//! \brief: download observer base +//! \file: NullMediaHeaderAtom.h +//! \brief: Null Media Header Atom class +//! \detail: 'nmhd' Atom +//! +//! Created on October 15, 2019, 13:39 PM //! -#ifndef OMAFDOWNLOADEROBSERVER_H -#define OMAFDOWNLOADEROBSERVER_H +#ifndef _NULLMEDIAHEADERATOM_H_ +#define _NULLMEDIAHEADERATOM_H_ -#include "../OmafDashParser/Common.h" +#include "FormAllocator.h" +#include "FullAtom.h" -VCD_OMAF_BEGIN +VCD_MP4_BEGIN -//! -//! \class OmafDownloaderObserver -//! \brief download observer abstract class -//! -class OmafDownloaderObserver +class NullMediaHeaderAtom : public FullAtom { public: //! //! \brief Constructor //! - OmafDownloaderObserver(){}; + NullMediaHeaderAtom(); //! //! \brief Destructor //! - virtual ~OmafDownloaderObserver(){}; + virtual ~NullMediaHeaderAtom() = default; //! - //! \brief Will be notified by downloader there is new downloaded data + //! \brief Write atom information to stream //! - //! \param [in] downloadedDataLength - //! length of downloaded data + //! \param [in,out] Stream& + //! bitstream that contains atom information //! //! \return void //! - virtual void DownloadDataNotify(uint64_t downloadedDataLength) = 0; + virtual void ToStream(Stream& str); //! - //! \brief Will be notified by downloader the status has changed + //! \brief Parse atom information from stream //! - //! \param [in] status - //! the changed status + //! \param [in,out] Stream& + //! bitstream that contains atom information //! //! \return void //! - virtual void DownloadStatusNotify(DownloaderStatus status) = 0; + virtual void FromStream(Stream& str); }; -VCD_OMAF_END; - -#endif //OMAFDOWNLOADEROBSERVER_H \ No newline at end of file +VCD_MP4_END; +#endif /* _NULLMEDIAHEADERATOM_H_ */ diff --git a/src/isolib/atoms/PrimaryItemAtom.cpp b/src/isolib/atoms/PrimaryItemAtom.cpp new file mode 100644 index 00000000..178d87b8 --- /dev/null +++ b/src/isolib/atoms/PrimaryItemAtom.cpp @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: PrimaryItemAtom.cpp +//! \brief: PrimaryItemAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "PrimaryItemAtom.h" + +VCD_MP4_BEGIN + +PrimaryItemAtom::PrimaryItemAtom() + : FullAtom("pitm", 0, 0) + , m_itemId(0) +{ +} + +void PrimaryItemAtom::SetItemId(uint32_t itemId) +{ + m_itemId = itemId; +} + +uint32_t PrimaryItemAtom::GetItemId() const +{ + return m_itemId; +} + +void PrimaryItemAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + if (GetVersion() == 0) + { + str.Write16(static_cast(m_itemId)); + } + else + { + str.Write32(m_itemId); + } + UpdateSize(str); +} + +void PrimaryItemAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + if (GetVersion() == 0) + { + m_itemId = str.Read16(); + } + else + { + m_itemId = str.Read32(); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/PrimaryItemAtom.h b/src/isolib/atoms/PrimaryItemAtom.h new file mode 100644 index 00000000..11160ab4 --- /dev/null +++ b/src/isolib/atoms/PrimaryItemAtom.h @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: PrimaryItemAtom.h +//! \brief: Primary Item Atom class +//! \detail: 'pitm' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _PRIMARYITEMATOM_H_ +#define _PRIMARYITEMATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class PrimaryItemAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + PrimaryItemAtom(); + + //! + //! \brief Destructor + //! + virtual ~PrimaryItemAtom() = default; + + //! + //! \brief Set Item Id + //! + //! \param [in] std::uint32_t + //! Item Id value + //! + //! \return void + //! + void SetItemId(std::uint32_t itemId); + + //! + //! \brief Get Item Id + //! + //! \return std::uint32_t + //! Item Id + //! + std::uint32_t GetItemId() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint32_t m_itemId; //!< The identifier of the primary item +}; + +VCD_MP4_END; +#endif /* _PRIMARYITEMATOM_H_ */ diff --git a/src/isolib/atoms/ProjRelatedAtom.cpp b/src/isolib/atoms/ProjRelatedAtom.cpp new file mode 100644 index 00000000..b5326e3b --- /dev/null +++ b/src/isolib/atoms/ProjRelatedAtom.cpp @@ -0,0 +1,447 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ProjRelatedAtom.cpp +//! \brief: ProjRelatedAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include + +#include "Stream.h" +#include "ProjRelatedAtom.h" + +VCD_MP4_BEGIN + +const uint8_t PROJTYPEMASK = 0x1f; + +ProjectionFormatAtom::ProjectionFormatAtom() + : FullAtom("prfr", 0, 0) + , m_projectionFormat(0) +{ +} + +ProjectionFormatAtom::ProjectFormat ProjectionFormatAtom::GetProjectFormat() const +{ + ProjectFormat ret = (ProjectFormat) m_projectionFormat; + return ret; +} + +void ProjectionFormatAtom::SetProjectFormat(ProjectFormat projectionFormat) +{ + m_projectionFormat = (uint8_t) projectionFormat & PROJTYPEMASK; +} + +void ProjectionFormatAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write8(m_projectionFormat & PROJTYPEMASK); + UpdateSize(str); +} + +void ProjectionFormatAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_projectionFormat = str.Read8() & PROJTYPEMASK; +} + +CoverageInformationAtom::CoverageInformationAtom() + : FullAtom("covi", 0, 0) + , m_viewIdcPresenceFlag(false) + , m_defaultViewIdc(ViewMode::INVALID) + , m_sphereRegions() +{ + m_coverageShapeMode = CoverageShapeMode::TWO_AZIMUTH_ELEVATION_CIRCLES; + +} + +CoverageInformationAtom::CoverageInformationAtom(const CoverageInformationAtom& Atom) + : FullAtom(Atom) + , m_coverageShapeMode(Atom.m_coverageShapeMode) + , m_viewIdcPresenceFlag(Atom.m_viewIdcPresenceFlag) + , m_defaultViewIdc(Atom.m_defaultViewIdc) + , m_sphereRegions() +{ + for (auto& region : Atom.m_sphereRegions) + { + m_sphereRegions.push_back(MakeUnique(*region)); + } +} + +std::vector CoverageInformationAtom::GetSphereRegions() const +{ + std::vector regions; + for (auto& region : m_sphereRegions) + { + regions.push_back(region.get()); + } + return regions; +} + +void CoverageInformationAtom::AddSphereRegion(UniquePtr region) +{ + m_sphereRegions.push_back(std::move(region)); +} + +void CoverageInformationAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write8((uint8_t) m_coverageShapeMode); + str.Write8((uint8_t) m_sphereRegions.size()); + + str.Write8(m_viewIdcPresenceFlag ? 0b10000000 : (((uint8_t) m_defaultViewIdc & 0b11) << 5)); + + for (auto& reg : m_sphereRegions) + { + if (m_viewIdcPresenceFlag) + { + // viewIdc is in first 2 bits 0bXX000000 + str.Write8(((uint8_t) reg->viewIdc & 0b0011) << 6); + } + + str.Write32(reg->region.centreAzimuth); + str.Write32(reg->region.centreElevation); + str.Write32(reg->region.centreTilt); + str.Write32(reg->region.azimuthRange); + str.Write32(reg->region.elevationRange); + + str.Write8(reg->region.interpolate ? 0b10000000 : 0x0); + } + + UpdateSize(str); +} + +void CoverageInformationAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_coverageShapeMode = (CoverageShapeMode) str.Read8(); + std::uint8_t numRegions = str.Read8(); + std::uint8_t packed8Bits = str.Read8(); + + m_viewIdcPresenceFlag = (packed8Bits >> 7) == 0x01; + + // if not used set all bits to init it with impossible value + m_defaultViewIdc = (ViewMode)(m_viewIdcPresenceFlag ? 0xff : ((packed8Bits >> 5) & 0b00000011)); + + for (int i = 0; i < numRegions; ++i) + { + auto reg = MakeUnique(); + + if (m_viewIdcPresenceFlag) + { + packed8Bits = str.Read8(); + reg->viewIdc = (ViewMode)((packed8Bits >> 6) & 0b00000011); + } + else + { + reg->viewIdc = ViewMode::INVALID; + } + + reg->region.centreAzimuth = str.Read32(); + reg->region.centreElevation = str.Read32(); + reg->region.centreTilt = str.Read32(); + reg->region.azimuthRange = str.Read32(); + reg->region.elevationRange = str.Read32(); + + reg->region.interpolate = (str.Read8() >> 7) == 0x01; + + m_sphereRegions.push_back(std::move(reg)); + } +} + +void CoverageInformationAtom::Dump() const +{ + ISO_LOG(LOG_INFO, "---------------------------------- COVI ------------------------------\n"); + ISO_LOG(LOG_INFO, "m_coverageShapeMode: %d\n", (std::uint32_t) m_coverageShapeMode); + ISO_LOG(LOG_INFO, "m_viewIdcPresenceFlag: %d\n", (std::uint32_t) m_viewIdcPresenceFlag); + ISO_LOG(LOG_INFO, "m_defaultViewIdc: %d\n", (std::uint32_t) m_defaultViewIdc); + ISO_LOG(LOG_INFO, "m_numRegions: %d\n",(std::uint32_t) m_sphereRegions.size()); + + int pCnt = 0; + for (auto& pReg : m_sphereRegions) + { + pCnt++; + + ISO_LOG(LOG_INFO, "---------- Region - %d\n", pCnt); + ISO_LOG(LOG_INFO, "viewIdc: %d\n", (std::uint32_t) pReg->viewIdc); + ISO_LOG(LOG_INFO, "centreAzimuth: %d\n", pReg->region.centreAzimuth); + ISO_LOG(LOG_INFO, "centreElevation: %d\n", pReg->region.centreElevation); + ISO_LOG(LOG_INFO, "centreTilt: %d\n", pReg->region.centreTilt); + ISO_LOG(LOG_INFO, "azimuthRange: %d\n", pReg->region.azimuthRange); + ISO_LOG(LOG_INFO, "elevationRange: %d\n", pReg->region.elevationRange); + ISO_LOG(LOG_INFO, "interpolate: %d\n", pReg->region.interpolate); + } + ISO_LOG(LOG_INFO, "-============================ End Of COVI ===========================-\n"); +} + +RegionWisePackingAtom::RegionWisePackingAtom() + : FullAtom("rwpk", 0, 0) + , m_constituentPictureMatchingFlag(false) + , m_projPictureWidth(0) + , m_projPictureHeight(0) + , m_packedPictureWidth(0) + , m_packedPictureHeight(0) + , m_regions() +{ +} + +RegionWisePackingAtom::RegionWisePackingAtom(const RegionWisePackingAtom& Atom) + : FullAtom(Atom) + , m_constituentPictureMatchingFlag(Atom.m_constituentPictureMatchingFlag) + , m_projPictureWidth(Atom.m_projPictureWidth) + , m_projPictureHeight(Atom.m_projPictureHeight) + , m_packedPictureWidth(Atom.m_packedPictureWidth) + , m_packedPictureHeight(Atom.m_packedPictureHeight) + , m_regions() +{ + for (auto& region : Atom.m_regions) + { + m_regions.push_back(MakeUnique(*region)); + } +} + +std::vector RegionWisePackingAtom::GetRegions() const +{ + std::vector regions; + for (auto& region : m_regions) + { + regions.push_back(region.get()); + } + return regions; +} + + +void RegionWisePackingAtom::AddRegion(UniquePtr region) +{ + m_regions.push_back(std::move(region)); +} + +void RegionWisePackingAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write8(m_constituentPictureMatchingFlag ? 0b10000000 : 0x0); + str.Write8(m_regions.size()); + + str.Write32(m_projPictureWidth); + str.Write32(m_projPictureHeight); + str.Write16(m_packedPictureWidth); + str.Write16(m_packedPictureHeight); + + for (auto& pReg : m_regions) + { + // 4th bit is guardband flag and last 4 bits are packing type + str.Write8((pReg->guardBandFlag ? 0b00010000 : 0x0) | ((uint8_t) pReg->packingType & 0x0f)); + + if (pReg->packingType == PackingType::RECTANGULAR) + { + auto& packing = pReg->rectangularPacking; + str.Write32(packing->projRegWidth); + str.Write32(packing->projRegHeight); + str.Write32(packing->projRegTop); + str.Write32(packing->projRegLeft); + // type in bits 0bXXX00000 + str.Write8(packing->transformType << 5); + str.Write16(packing->packedRegWidth); + str.Write16(packing->packedRegHeight); + str.Write16(packing->packedRegTop); + str.Write16(packing->packedRegLeft); + + if (pReg->guardBandFlag) + { + str.Write8(packing->leftGbWidth); + str.Write8(packing->rightGbWidth); + str.Write8(packing->topGbHeight); + str.Write8(packing->bottomGbHeight); + + std::uint16_t packed16Bits = packing->gbNotUsedForPredFlag ? (0x1 << 15) : 0x0; + packed16Bits |= (((std::uint16_t)(packing->gbType0) & 0b111) << 12) | (((std::uint16_t)(packing->gbType1) & 0b111) << 9) | + (((std::uint16_t)(packing->gbType2) & 0b111) << 6) | (((std::uint16_t)(packing->gbType3) & 0b111) << 3); + str.Write16(packed16Bits); + } + } + } + + UpdateSize(str); +} + +void RegionWisePackingAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + // read region wise packing struct + m_constituentPictureMatchingFlag = (str.Read8() >> 7) & 0x1; + std::uint8_t numRegions = str.Read8(); + m_projPictureWidth = str.Read32(); + m_projPictureHeight = str.Read32(); + m_packedPictureWidth = str.Read16(); + m_packedPictureHeight = str.Read16(); + + for (int i = 0; i < numRegions; ++i) + { + auto pReg = MakeUnique(); + + std::uint8_t packed8Bits = str.Read8(); + pReg->guardBandFlag = (packed8Bits >> 4) & 0x01; + pReg->packingType = (PackingType)(packed8Bits & 0x0f); + + if (pReg->packingType == PackingType::RECTANGULAR) + { + auto pPacking = MakeUnique(); + + // read RectRegionPacking + pPacking->projRegWidth = str.Read32(); + pPacking->projRegHeight = str.Read32(); + pPacking->projRegTop = str.Read32(); + pPacking->projRegLeft = str.Read32(); + pPacking->transformType = str.Read8() >> 5; + pPacking->packedRegWidth = str.Read16(); + pPacking->packedRegHeight = str.Read16(); + pPacking->packedRegTop = str.Read16(); + pPacking->packedRegLeft = str.Read16(); + + if (pReg->guardBandFlag) + { + // read GuardBand + pPacking->leftGbWidth = str.Read8(); + pPacking->rightGbWidth = str.Read8(); + pPacking->topGbHeight = str.Read8(); + pPacking->bottomGbHeight = str.Read8(); + + std::uint16_t packed16Bits = str.Read16(); + pPacking->gbNotUsedForPredFlag = packed16Bits >> 15 == 1; + pPacking->gbType0 = (packed16Bits >> 12) & 0x07; + pPacking->gbType1 = (packed16Bits >> 9) & 0x07; + pPacking->gbType2 = (packed16Bits >> 6) & 0x07; + pPacking->gbType3 = (packed16Bits >> 3) & 0x07; + } + + pReg->rectangularPacking = std::move(pPacking); + } + + m_regions.push_back(std::move(pReg)); + } +} + +void RegionWisePackingAtom::Dump() const +{ + ISO_LOG(LOG_INFO, "---------------------------------- RWPK ------------------------------\n"); + ISO_LOG(LOG_INFO, "ConstituentPictureMatchingFlag is : %d\n", (std::uint32_t) m_constituentPictureMatchingFlag); + ISO_LOG(LOG_INFO, "Projection Picture Width is : %d\n", (std::uint32_t) m_projPictureWidth); + ISO_LOG(LOG_INFO, "Projection Picture Height is : %d\n", (std::uint32_t) m_projPictureHeight); + ISO_LOG(LOG_INFO, "Packed Picture Width is : %d\n", (std::uint32_t) m_packedPictureWidth); + ISO_LOG(LOG_INFO, "Packed Picture Height is : %d\n", (std::uint32_t) m_packedPictureHeight); + ISO_LOG(LOG_INFO, "Num of Regions is : %d\n", (std::uint32_t) m_regions.size()); + int pCnt = 0; + for (auto& pReg : m_regions) + { + pCnt++; + + ISO_LOG(LOG_INFO, "- Region - %d\n", pCnt); + ISO_LOG(LOG_INFO, "GuarBandFlag is : %d\n", (std::uint32_t) pReg->guardBandFlag); + ISO_LOG(LOG_INFO, "PackingType is : %d\n", (std::uint32_t) pReg->packingType); + + if (pReg->packingType == PackingType::RECTANGULAR) + { + ISO_LOG(LOG_INFO, "transformType is : %d\n", (std::uint32_t) pReg->rectangularPacking->transformType); + ISO_LOG(LOG_INFO, "projection Region Width is: %d\n", (std::uint32_t) pReg->rectangularPacking->projRegWidth); + ISO_LOG(LOG_INFO, "projection Region Height is : %d\n", (std::uint32_t) pReg->rectangularPacking->projRegHeight); + ISO_LOG(LOG_INFO, "projection Region Top is : %d\n", (std::uint32_t) pReg->rectangularPacking->projRegTop); + ISO_LOG(LOG_INFO, "projection Region Left is : %d\n", (std::uint32_t) pReg->rectangularPacking->projRegLeft); + ISO_LOG(LOG_INFO, "packed Region Width is : %d\n", (std::uint32_t) pReg->rectangularPacking->packedRegWidth); + ISO_LOG(LOG_INFO, "packed Region Height is : %d\n", (std::uint32_t) pReg->rectangularPacking->packedRegHeight); + ISO_LOG(LOG_INFO, "packed Region Top is : %d\n", (std::uint32_t) pReg->rectangularPacking->packedRegTop); + ISO_LOG(LOG_INFO, "packed Region Left is : %d\n", (std::uint32_t) pReg->rectangularPacking->packedRegLeft); + + if (!pReg->guardBandFlag) + { + ISO_LOG(LOG_INFO, "No guard band flag !!\n"); + } + else + { + ISO_LOG(LOG_INFO, "left Gb Width is : %d\n", (std::uint32_t) pReg->rectangularPacking->leftGbWidth); + ISO_LOG(LOG_INFO, "right Gb Width is : %d\n", (std::uint32_t) pReg->rectangularPacking->rightGbWidth); + ISO_LOG(LOG_INFO, "top Gb Height is : %d\n", (std::uint32_t) pReg->rectangularPacking->topGbHeight); + ISO_LOG(LOG_INFO, "bottom Gb Height is : %d\n", (std::uint32_t) pReg->rectangularPacking->bottomGbHeight); + ISO_LOG(LOG_INFO, "gbNotUsedForPredFlag is : %d\n", (std::uint32_t) pReg->rectangularPacking->gbNotUsedForPredFlag); + ISO_LOG(LOG_INFO, "gbType0 is : %d\n", (std::uint32_t) pReg->rectangularPacking->gbType0); + ISO_LOG(LOG_INFO, "gbType1 is : %d\n", (std::uint32_t) pReg->rectangularPacking->gbType1); + ISO_LOG(LOG_INFO, "gbType2 is : %d\n", (std::uint32_t) pReg->rectangularPacking->gbType2); + ISO_LOG(LOG_INFO, "gbType3 is : %d\n", (std::uint32_t) pReg->rectangularPacking->gbType3); + } + } + } +} + +RegionWisePackingAtom::Region::Region(const Region& region) + : guardBandFlag(region.guardBandFlag) + , packingType(region.packingType) + , rectangularPacking(nullptr) +{ + if (packingType == PackingType::RECTANGULAR) + { + UniquePtr box = MakeUnique(*region.rectangularPacking); + rectangularPacking = move(box); + } +} + +RotationAtom::RotationAtom() + : FullAtom("rotn", 0, 0) + , m_rotation({}) +{ +} + +RotationAtom::RotationAtom(const RotationAtom& Atom) + : FullAtom(Atom) + , m_rotation(Atom.m_rotation) +{ +} + +void RotationAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(m_rotation.yaw); + str.Write32(m_rotation.pitch); + str.Write32(m_rotation.roll); + + UpdateSize(str); +} + +void RotationAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_rotation.yaw = str.Read32(); + m_rotation.pitch = str.Read32(); + m_rotation.roll = str.Read32(); +} + +VCD_MP4_END diff --git a/src/isolib/atoms/ProjRelatedAtom.h b/src/isolib/atoms/ProjRelatedAtom.h new file mode 100644 index 00000000..61415ce9 --- /dev/null +++ b/src/isolib/atoms/ProjRelatedAtom.h @@ -0,0 +1,494 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ProjRelatedAtom.h +//! \brief: ProjRelatedAtom class +//! \detail: defines Projection Related Atoms +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _PROJECTIONFORMATATOM_H_ +#define _PROJECTIONFORMATATOM_H_ + +#include +#include "FullAtom.h" +#include +#include "CommonTypes.h" + +VCD_MP4_BEGIN + +using namespace std; + +class ProjectionFormatAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + ProjectionFormatAtom(); + + //! + //! \brief Destructor + //! + virtual ~ProjectionFormatAtom() = default; + + enum class ProjectFormat : uint8_t //!< Projection Format + { + ERP = 0, + CUBEMAP + }; + + //! + //! \brief Get Projection Format + //! + //! \return ProjectFormat + //! Projection Format + //! + ProjectFormat GetProjectFormat() const; + + //! + //! \brief Set Projection Format + //! + //! \param [in] ProjectFormat + //! Projection Format value + //! + //! \return void + //! + void SetProjectFormat(ProjectFormat); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint8_t m_projectionFormat; //!< projection format +}; + +class CoverageInformationAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + CoverageInformationAtom(); + CoverageInformationAtom(const CoverageInformationAtom&); + + CoverageInformationAtom& operator=(const CoverageInformationAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~CoverageInformationAtom() = default; + + enum class CoverageShapeMode : uint8_t //!< Coverage Shape Mode + { + FOUR_GCIRCLES = 0, + TWO_AZIMUTH_ELEVATION_CIRCLES + }; + + struct CoverageSphereRegion //!< Coverage Sphere Region + { + ViewMode viewIdc; + SphereRegion region; + }; + + //! + //! \brief Set and Get function for m_coverageShapeMode member + //! + //! \param [in] CoverageShapeMode + //! value to set + //! \param [in] m_coverageShapeMode + //! m_coverageShapeMode member in class + //! \param [in] CoverageShapeMode + //! m_coverageShapeMode name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(CoverageShapeMode, m_coverageShapeMode, CoverageShapeMode, const); + + //! + //! \brief Set and Get function for m_viewIdcPresenceFlag member + //! + //! \param [in] bool + //! value to set + //! \param [in] m_viewIdcPresenceFlag + //! m_viewIdcPresenceFlag member in class + //! \param [in] ViewIdcPresenceFlag + //! m_viewIdcPresenceFlag name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(bool, m_viewIdcPresenceFlag, ViewIdcPresenceFlag, const); + + //! + //! \brief Set and Get function for m_defaultViewIdc member + //! + //! \param [in] ViewMode + //! value to set + //! \param [in] m_defaultViewIdc + //! m_defaultViewIdc member in class + //! \param [in] DefaultViewIdc + //! m_defaultViewIdc name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(ViewMode, m_defaultViewIdc, DefaultViewIdc, const); + + //! + //! \brief Get SphereRegions + //! + //! \return vector + //! SphereRegions + //! + vector GetSphereRegions() const; + + //! + //! \brief Add Sphere Region + //! + //! \param [in] UniquePtr + //! CoverageSphereRegion pointer + //! + //! \return void + //! + void AddSphereRegion(UniquePtr); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief dump information + //! + void Dump() const; + +private: + CoverageShapeMode m_coverageShapeMode; //!< Coverage Shape Mode + bool m_viewIdcPresenceFlag; //!< view Idc Presence Flag + ViewMode m_defaultViewIdc; //!< default View Idc + vector> m_sphereRegions;//!< sphere Regions +}; + +class RegionWisePackingAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + RegionWisePackingAtom(); + RegionWisePackingAtom(const RegionWisePackingAtom&); + + RegionWisePackingAtom& operator=(const RegionWisePackingAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~RegionWisePackingAtom() = default; + + struct RectangularRegionWisePacking //!< Rectangular Region Wise Packing + { + //projection parameters + uint32_t projRegWidth; + uint32_t projRegHeight; + uint32_t projRegTop; + uint32_t projRegLeft; + //transform type + uint8_t transformType; + //packed parameters + uint16_t packedRegWidth; + uint16_t packedRegHeight; + uint16_t packedRegTop; + uint16_t packedRegLeft; + //Flowing parameters take effect when guardBandFlag is true. + bool gbNotUsedForPredFlag; + uint8_t leftGbWidth; + uint8_t rightGbWidth; + uint8_t topGbHeight; + uint8_t bottomGbHeight; + uint8_t gbType0; + uint8_t gbType1; + uint8_t gbType2; + uint8_t gbType3; + }; + + //! + //! \brief Set and Get function for m_constituentPictureMatchingFlag member + //! + //! \param [in] bool + //! value to set + //! \param [in] m_constituentPictureMatchingFlag + //! m_constituentPictureMatchingFlag member in class + //! \param [in] ConstituentPictureMatchingFlag + //! m_constituentPictureMatchingFlag name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(bool, m_constituentPictureMatchingFlag, ConstituentPictureMatchingFlag, const); + + //! + //! \brief Set and Get function for m_projPictureWidth member + //! + //! \param [in] uint32_t + //! value to set + //! \param [in] m_projPictureWidth + //! m_projPictureWidth member in class + //! \param [in] ProjPictureWidth + //! m_projPictureWidth name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_projPictureWidth, ProjPictureWidth, const); + + //! + //! \brief Set and Get function for m_projPictureHeight member + //! + //! \param [in] uint32_t + //! value to set + //! \param [in] m_projPictureHeight + //! m_projPictureHeight member in class + //! \param [in] ProjPictureHeight + //! m_projPictureHeight name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_projPictureHeight, ProjPictureHeight, const); + + //! + //! \brief Set and Get function for m_packedPictureWidth member + //! + //! \param [in] uint16_t + //! value to set + //! \param [in] m_packedPictureWidth + //! m_packedPictureWidth member in class + //! \param [in] PackedPictureWidth + //! m_packedPictureWidth name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint16_t, m_packedPictureWidth, PackedPictureWidth, const); + + //! + //! \brief Set and Get function for m_packedPictureHeight member + //! + //! \param [in] uint16_t + //! value to set + //! \param [in] m_packedPictureHeight + //! m_packedPictureHeight member in class + //! \param [in] PackedPictureHeight + //! m_packedPictureHeight name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint16_t, m_packedPictureHeight, PackedPictureHeight, const); + + enum class PackingType : uint8_t //!< packing type + { + RECTANGULAR = 0 + }; + + struct Region //!< region information + { + Region() + { + guardBandFlag = 0; + packingType = PackingType::RECTANGULAR; + }; + Region(const Region&); + Region& operator=(const Region&) = default; + bool guardBandFlag; + PackingType packingType; + UniquePtr rectangularPacking; + }; + + //! + //! \brief Get regions + //! + //! \return vector + //! regions + //! + vector GetRegions() const; + + //! + //! \brief add regions + //! + //! \param [in] UniquePtr + //! regions value + //! + //! \return void + //! + void AddRegion(UniquePtr); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief dump information + //! + void Dump() const; + +private: + bool m_constituentPictureMatchingFlag; //!< constituent Picture Matching Flag + uint32_t m_projPictureWidth; //!< projection Picture Width + uint32_t m_projPictureHeight; //!< projection Picture Height + uint16_t m_packedPictureWidth; //!< packed Picture Width + uint16_t m_packedPictureHeight; //!< packed Picture Height + vector> m_regions; //!< regions +}; + +class RotationAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + RotationAtom(); + RotationAtom(const RotationAtom&); + + RotationAtom& operator=(const RotationAtom&) = default; + //! + //! \brief Destructor + //! + virtual ~RotationAtom() = default; + + struct Rotation //!< rotation + { + int32_t yaw; + int32_t pitch; + int32_t roll; + }; + + //! + //! \brief Set and Get function for m_rotation member + //! + //! \param [in] Rotation + //! value to set + //! \param [in] m_rotation + //! m_rotation member in class + //! \param [in] Rotation + //! m_rotation name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(Rotation, m_rotation, Rotation, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + Rotation m_rotation; //!< rotation +}; + +VCD_MP4_END; +#endif /* _PROJECTIONFORMATATOM_H_ */ diff --git a/src/isolib/atoms/RestSchemeInfoAtom.cpp b/src/isolib/atoms/RestSchemeInfoAtom.cpp new file mode 100644 index 00000000..8f828512 --- /dev/null +++ b/src/isolib/atoms/RestSchemeInfoAtom.cpp @@ -0,0 +1,588 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: RestSchemeInfoAtom.cpp +//! \brief: RestSchemeInfoAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include + +#include "Stream.h" +#include "RestSchemeInfoAtom.h" + +VCD_MP4_BEGIN + +OriginalFormatAtom::OriginalFormatAtom() + : Atom("frma") +{ +} + +void OriginalFormatAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + str.Write32(m_originalFormat.GetUInt32()); + UpdateSize(str); +} + +void OriginalFormatAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + m_originalFormat = str.Read32(); +} + +ProjectedOmniVideoAtom::ProjectedOmniVideoAtom() + : Atom("povd") + , m_projectionFormatAtom() + , m_regionWisePackingAtom() + , m_coverageInformationAtom() +{ +} + +ProjectedOmniVideoAtom::ProjectedOmniVideoAtom(const ProjectedOmniVideoAtom& atom) + : Atom(atom) + , m_projectionFormatAtom(atom.m_projectionFormatAtom) + , m_regionWisePackingAtom(atom.m_regionWisePackingAtom ? MakeUnique( + *atom.m_regionWisePackingAtom) + : UniquePtr()) + , m_coverageInformationAtom( + atom.m_coverageInformationAtom + ? MakeUnique(*atom.m_coverageInformationAtom) + : UniquePtr()) + , m_rotationAtom(atom.m_rotationAtom ? MakeUnique(*atom.m_rotationAtom) + : UniquePtr()) +{ +} + +ProjectionFormatAtom& ProjectedOmniVideoAtom::GetProjectionFormatAtom() +{ + return m_projectionFormatAtom; +} + +RegionWisePackingAtom& ProjectedOmniVideoAtom::GetRegionWisePackingAtom() +{ + return *m_regionWisePackingAtom; +} + +void ProjectedOmniVideoAtom::SetRegionWisePackingAtom(UniquePtr rwpkAtom) +{ + m_regionWisePackingAtom = std::move(rwpkAtom); +} + +bool ProjectedOmniVideoAtom::HasRegionWisePackingAtom() const +{ + return !!m_regionWisePackingAtom; +} + +CoverageInformationAtom& ProjectedOmniVideoAtom::GetCoverageInformationAtom() +{ + return *m_coverageInformationAtom; +} + +void ProjectedOmniVideoAtom::SetCoverageInformationAtom(UniquePtr coviAtom) +{ + m_coverageInformationAtom = std::move(coviAtom); +} + +bool ProjectedOmniVideoAtom::HasCoverageInformationAtom() const +{ + return !!m_coverageInformationAtom; +} + +RotationAtom& ProjectedOmniVideoAtom::GetRotationAtom() +{ + return *m_rotationAtom; +} + +void ProjectedOmniVideoAtom::SetRotationAtom(UniquePtr rotnAtom) +{ + m_rotationAtom = std::move(rotnAtom); +} + +bool ProjectedOmniVideoAtom::HasRotationAtom() const +{ + return !!m_rotationAtom; +} + +void ProjectedOmniVideoAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + m_projectionFormatAtom.ToStream(str); + if (m_regionWisePackingAtom) + { + m_regionWisePackingAtom->ToStream(str); + } + if (m_coverageInformationAtom) + { + m_coverageInformationAtom->ToStream(str); + } + if (m_rotationAtom) + { + m_rotationAtom->ToStream(str); + } + UpdateSize(str); +} + +void ProjectedOmniVideoAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + m_projectionFormatAtom.FromStream(str); + if (m_projectionFormatAtom.GetType() != "prfr") + { + ISO_LOG(LOG_ERROR, "POVD Atom must start with prfr Atom\n"); + throw Exception(); + } + + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + + if (AtomType == "rwpk") + { + m_regionWisePackingAtom = MakeUnique(); + m_regionWisePackingAtom->FromStream(subStream); + } + else if (AtomType == "covi") + { + m_coverageInformationAtom = MakeUnique(); + m_coverageInformationAtom->FromStream(subStream); + } + else if (AtomType == "rotn") + { + m_rotationAtom = MakeUnique(); + m_rotationAtom->FromStream(subStream); + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Ignoring unknown AtomType found from povd Atom: %s\n", type); + } + } +} + +void ProjectedOmniVideoAtom::dump() const +{ + ISO_LOG(LOG_INFO, "---------------------------------- POVD ------------------------------\n"); + ISO_LOG(LOG_INFO, "m_projectionFormatAtom.GetProjectFormat: %d\n", (std::uint32_t) m_projectionFormatAtom.GetProjectFormat()); + + if (m_regionWisePackingAtom) + { + m_regionWisePackingAtom->Dump(); + } + + if (m_coverageInformationAtom) + { + m_coverageInformationAtom->Dump(); + } + + if (m_rotationAtom) + { + ISO_LOG(LOG_INFO, "Also rotation Atom is present\n"); + } + + ISO_LOG(LOG_INFO, "-============================ End Of POVD ===========================-\n"); +} + +SchemeTypeAtom::SchemeTypeAtom() + : FullAtom("schm", 0, 0) + , m_schemeType(0) + , m_schemeVersion(0) + , m_schemeUri("") +{ +} + +void SchemeTypeAtom::SetSchemeUri(const std::string& uri) +{ + std::uint32_t hasUriFieldFlags = uri.empty() ? GetFlags() & (~0x1) : GetFlags() | 0x1; + SetFlags(hasUriFieldFlags); + m_schemeUri = uri; +} + +const std::string& SchemeTypeAtom::GetSchemeUri() const +{ + return m_schemeUri; +} + +void SchemeTypeAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(m_schemeType.GetUInt32()); + str.Write32(m_schemeVersion); + if (GetFlags() & 0x1) + { + str.WriteZeroEndString(m_schemeUri); + } + UpdateSize(str); +} + +void SchemeTypeAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_schemeType = str.Read32(); + m_schemeVersion = str.Read32(); + if (GetFlags() & 0x1) + { + str.ReadZeroEndString(m_schemeUri); + } +} + +CompatibleSchemeTypeAtom::CompatibleSchemeTypeAtom() + : SchemeTypeAtom() +{ + SetType("csch"); +} + +RestrictedSchemeInfoAtom::RestrictedSchemeInfoAtom() + : Atom("rinf") + , m_projectedOmniVideoAtom() + , m_stereoVideoAtom() +{ +} + +RestrictedSchemeInfoAtom::RestrictedSchemeInfoAtom(const RestrictedSchemeInfoAtom& atom) + : Atom(atom) + , m_originalFormatAtom(atom.m_originalFormatAtom ? std::move(MakeUnique( + *atom.m_originalFormatAtom)) + : nullptr) + , m_schemeTypeAtom(atom.m_schemeTypeAtom ? std::move(MakeUnique(*atom.m_schemeTypeAtom)) + : nullptr) + , m_projectedOmniVideoAtom( + atom.m_projectedOmniVideoAtom + ? std::move(MakeUnique(*atom.m_projectedOmniVideoAtom)) + : nullptr) + , m_stereoVideoAtom(atom.m_stereoVideoAtom + ? std::move(MakeUnique(*atom.m_stereoVideoAtom)) + : nullptr) +{ + for (auto& schemeTypeAtom : atom.m_compatibleSchemeTypes) + { + m_compatibleSchemeTypes.push_back( + MakeUnique(*schemeTypeAtom)); + } +} + + +void RestrictedSchemeInfoAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + if (m_originalFormatAtom) + { + Stream subStream; + m_originalFormatAtom->ToStream(str); + str.WriteStream(subStream); + } + if (m_schemeTypeAtom) + { + Stream subStream; + m_schemeTypeAtom->ToStream(str); + str.WriteStream(subStream); + } + for (auto& compatibleScheme : m_compatibleSchemeTypes) + { + Stream subStream; + compatibleScheme->ToStream(str); + str.WriteStream(subStream); + } + + if (m_schemeTypeAtom && (m_schemeTypeAtom->GetSchemeType() == "podv")) + { + Stream povdStream; + m_projectedOmniVideoAtom->ToStream(povdStream); + + Stream stviStream; + if (m_stereoVideoAtom) + { + m_stereoVideoAtom->ToStream(stviStream); + } + + // write schi + povd + Stream schiStream; + schiStream.WriteHeaders("schi", povdStream.GetSize() + stviStream.GetSize()); + schiStream.WriteStream(povdStream); + schiStream.WriteStream(stviStream); + str.WriteStream(schiStream); + } + + UpdateSize(str); +} + +void RestrictedSchemeInfoAtom::FromStream(Stream& str) +{ + // rinf header + ParseAtomHeader(str); + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "frma") + { + m_originalFormatAtom = std::move(MakeUnique()); + m_originalFormatAtom->FromStream(subBitstr); + } + else if (AtomType == "schm") + { + m_schemeTypeAtom = std::move(MakeUnique()); + m_schemeTypeAtom->FromStream(subBitstr); + } + else if (AtomType == "csch") + { + auto compatibeSchemeTypeAtom = MakeUnique(); + compatibeSchemeTypeAtom->FromStream(subBitstr); + m_compatibleSchemeTypes.push_back(std::move(compatibeSchemeTypeAtom)); + } + else if (AtomType == "schi") + { + if (!m_schemeTypeAtom) + { + ISO_LOG(LOG_ERROR, "Scheme type Atom was not found, before scheme info Atom\n"); + throw Exception(); + } + + // skip schi Atom headers + subBitstr.ReadAtomHeaders(AtomType); + + FourCCInt subSchiAtomType; + + // try to parse internals only if there is enough bytes to contain Atom inside + + while (subBitstr.BytesRemain() > 16) + { + Stream subSchiBitstr = subBitstr.ReadSubAtomStream(subSchiAtomType); + + auto schemeType = m_schemeTypeAtom->GetSchemeType().GetString(); + + if (schemeType == "podv") + { + if (subSchiAtomType == "povd") + { + m_projectedOmniVideoAtom = + std::move(MakeUnique()); + m_projectedOmniVideoAtom->FromStream(subSchiBitstr); + } + + if (subSchiAtomType == "stvi") + { + m_stereoVideoAtom = std::move(MakeUnique()); + m_stereoVideoAtom->FromStream(subSchiBitstr); + } + } + else + { + ISO_LOG(LOG_WARNING, "Skipping unsupported scheme type '%s'\n", schemeType.c_str()); + break; + } + } + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping unsupported Atom in rinf '%s'\n", type); + } + } +} + +FourCCInt RestrictedSchemeInfoAtom::GetOriginalFormat() const +{ + if (!m_originalFormatAtom) + { + ISO_LOG(LOG_ERROR, "Frma Atom was not found\n"); + throw Exception(); + } + FourCCInt ret = m_originalFormatAtom->GetOriginalFormat(); + return ret; +} + +void RestrictedSchemeInfoAtom::SetOriginalFormat(FourCCInt origFormat) +{ + if (!m_originalFormatAtom) + { + m_originalFormatAtom = move(MakeUnique()); + } + + m_originalFormatAtom->SetOriginalFormat(origFormat); +} + +FourCCInt RestrictedSchemeInfoAtom::GetSchemeType() const +{ + if (!m_schemeTypeAtom) + { + ISO_LOG(LOG_ERROR, "Schm Atom was not found\n"); + throw Exception(); + } + FourCCInt ret = m_schemeTypeAtom->GetSchemeType(); + return ret; +} + +SchemeTypeAtom& RestrictedSchemeInfoAtom::GetSchemeTypeAtom() const +{ + return *m_schemeTypeAtom; +} + +void RestrictedSchemeInfoAtom::AddSchemeTypeAtom(UniquePtr schemeTypeAtom) +{ + m_schemeTypeAtom = std::move(schemeTypeAtom); +} + +bool RestrictedSchemeInfoAtom::HasSchemeTypeAtom() const +{ + return !!m_schemeTypeAtom; +} + +ProjectedOmniVideoAtom& RestrictedSchemeInfoAtom::GetProjectedOmniVideoAtom() const +{ + if (!m_projectedOmniVideoAtom) + { + ISO_LOG(LOG_ERROR, "POVD Atom was not found\n"); + throw Exception(); + } + return *m_projectedOmniVideoAtom; +} + +void RestrictedSchemeInfoAtom::AddProjectedOmniVideoAtom(UniquePtr povdAtom) +{ + m_projectedOmniVideoAtom = std::move(povdAtom); +} + +StereoVideoAtom& RestrictedSchemeInfoAtom::GetStereoVideoAtom() const +{ + if (!m_stereoVideoAtom) + { + ISO_LOG(LOG_ERROR, "Stvi Atom was not found\n"); + throw Exception(); + } + return *m_stereoVideoAtom; +} + +void RestrictedSchemeInfoAtom::AddStereoVideoAtom(UniquePtr stviAtom) +{ + m_stereoVideoAtom = std::move(stviAtom); +} + +bool RestrictedSchemeInfoAtom::HasStereoVideoAtom() const +{ + return !!m_stereoVideoAtom; +} + +std::vector RestrictedSchemeInfoAtom::GetCompatibleSchemeTypes() const +{ + std::vector schemeTypes; + for (auto& schemeType : m_compatibleSchemeTypes) + { + schemeTypes.push_back(schemeType.get()); + } + return schemeTypes; +} + +void RestrictedSchemeInfoAtom::AddCompatibleSchemeTypeAtom(UniquePtr compatibleSchemeType) +{ + m_compatibleSchemeTypes.push_back(std::move(compatibleSchemeType)); +} + +const uint32_t SINGLEVIEWMASK = 0x3; + +StereoVideoAtom::StereoVideoAtom() + : FullAtom("stvi", 0, 0) + , m_singleViewAllowed(StereoVideoAtom::SingleViewMode::NONE_MODE) + , m_stereoScheme(StereoVideoAtom::SchemeSpec::SPEC14496) +{ +} + +void StereoVideoAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32((uint32_t) m_singleViewAllowed & SINGLEVIEWMASK); + str.Write32((uint32_t) m_stereoScheme); + + switch (m_stereoScheme) + { + case SchemeSpec::SPEC13818: + case SchemeSpec::SPEC14496: + str.Write32(4); // length 4 + str.Write32((uint32_t) m_stereoIndType.valAsUint32); + break; + case SchemeSpec::SPEC23000: + str.Write32(2); + str.Write8((uint8_t) m_stereoIndType.type23000.compositionType); + str.Write8(m_stereoIndType.type23000.isLeftFirst ? 0x01 : 0x00); + break; + case SchemeSpec::POVD: + str.Write32(2); + str.Write8((uint8_t) m_stereoIndType.typePOVD.compositionType); + str.Write8(m_stereoIndType.typePOVD.useQuincunxSampling ? 0x01 : 0x00); + break; + } + + UpdateSize(str); +} + +void StereoVideoAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_singleViewAllowed = (SingleViewMode)(str.Read32() & SINGLEVIEWMASK); + m_stereoScheme = (SchemeSpec)(str.Read32()); + uint32_t pLength = str.Read32(); + + auto checkLen = [&](uint32_t len) { + if (pLength != len) + { + ISO_LOG(LOG_INFO, "Invalid length ( %d, ) for stvi stereo_indication_type data.\n", pLength); + ISO_LOG(LOG_INFO, "For scheme_type %d, expected length is %d\n", (uint32_t) m_stereoScheme, len); + } + }; + + switch (m_stereoScheme) + { + case SchemeSpec::SPEC13818: + case SchemeSpec::SPEC14496: + m_stereoIndType.valAsUint32 = str.Read32(); + checkLen(4); + break; + case SchemeSpec::SPEC23000: + m_stereoIndType.type23000.compositionType = (ISO23000StereoCompType) str.Read8(); + m_stereoIndType.type23000.isLeftFirst = str.Read8() & 0x01; + checkLen(2); + break; + case SchemeSpec::POVD: + m_stereoIndType.typePOVD.compositionType = (POVDFrameCompType) str.Read8(); + m_stereoIndType.typePOVD.useQuincunxSampling = str.Read8() & 0x01; + checkLen(2); + break; + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/RestSchemeInfoAtom.h b/src/isolib/atoms/RestSchemeInfoAtom.h new file mode 100644 index 00000000..c2062c54 --- /dev/null +++ b/src/isolib/atoms/RestSchemeInfoAtom.h @@ -0,0 +1,660 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: RestSchemeInfoAtom.h +//! \brief: RestSchemeInfoAtom Atom class. +//! \detail: Restrict Scheme Information Atom +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _RESTRICTEDSCHEMEINFOATOM_H_ +#define _RESTRICTEDSCHEMEINFOATOM_H_ + +#include +#include +#include "Atom.h" +#include "FormAllocator.h" +#include "FullAtom.h" +#include "ProjRelatedAtom.h" + +VCD_MP4_BEGIN + +class OriginalFormatAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + OriginalFormatAtom(); + + //! + //! \brief Destructor + //! + virtual ~OriginalFormatAtom() = default; + + //! + //! \brief Set and Get function for m_originalFormat member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_originalFormat + //! m_originalFormat member in class + //! \param [in] OriginalFormat + //! m_originalFormat name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_originalFormat, OriginalFormat, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + FourCCInt m_originalFormat; //!< original format +}; + +class ProjectedOmniVideoAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + ProjectedOmniVideoAtom(); + ProjectedOmniVideoAtom(const ProjectedOmniVideoAtom&); + + ProjectedOmniVideoAtom& operator=(const ProjectedOmniVideoAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~ProjectedOmniVideoAtom() = default; + + //! + //! \brief Get ProjectionFormat Atom + //! + //! \return ProjectionFormatAtom& + //! ProjectionFormat Atom + //! + ProjectionFormatAtom& GetProjectionFormatAtom(); + + //! + //! \brief Get RegionWisePacking Atom + //! + //! \return RegionWisePackingAtom& + //! RegionWisePacking Atom + //! + RegionWisePackingAtom& GetRegionWisePackingAtom(); + + //! + //! \brief Set RegionWisePacking Atom + //! + //! \param [in] UniquePtr + //! RegionWisePacking Atom + //! + //! \return void + //! + void SetRegionWisePackingAtom(UniquePtr); + + //! + //! \brief Has RegionWisePacking Atom or not + //! + //! \return bool + //! has or not + //! + bool HasRegionWisePackingAtom() const; + + //! + //! \brief Get CoverageInformation Atom + //! + //! \return CoverageInformationAtom& + //! CoverageInformation Atom + //! + CoverageInformationAtom& GetCoverageInformationAtom(); + + //! + //! \brief Set CoverageInformation Atom + //! + //! \param [in] UniquePtr + //! CoverageInformation Atom + //! + //! \return void + //! + void SetCoverageInformationAtom(UniquePtr); + + //! + //! \brief Has CoverageInformatio Atom or not + //! + //! \return bool + //! has or not + //! + bool HasCoverageInformationAtom() const; + + //! + //! \brief Get Rotation Atom + //! + //! \return RotationAtom& + //! Rotation Atom + //! + RotationAtom& GetRotationAtom(); + + //! + //! \brief Set Rotation Atom + //! + //! \param [in] UniquePtr + //! Rotation Atom + //! + //! \return void + //! + void SetRotationAtom(UniquePtr); + + //! + //! \brief Has Rotation Atom or not + //! + //! \return bool + //! has or not + //! + bool HasRotationAtom() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief dump information + //! + void dump() const; + +private: + ProjectionFormatAtom m_projectionFormatAtom; //!< Projection Format Atom + UniquePtr m_regionWisePackingAtom; //!< region Wise Packing Atom + UniquePtr m_coverageInformationAtom; //!< Coverage Information Atom + UniquePtr m_rotationAtom; //!< Rotation Atom +}; + +class SchemeTypeAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SchemeTypeAtom(); + + //! + //! \brief Destructor + //! + virtual ~SchemeTypeAtom() = default; + + //! + //! \brief Set and Get function for m_schemeType member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_schemeType + //! m_schemeType member in class + //! \param [in] SchemeType + //! m_schemeType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_schemeType, SchemeType, const); + + //! + //! \brief Set and Get function for m_schemeVersion member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_schemeVersion + //! m_schemeVersion member in class + //! \param [in] SchemeVersion + //! m_schemeVersion name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_schemeVersion, SchemeVersion, const); + + //! + //! \brief Set Scheme Uri + //! + //! \param [in] const std::string& + //! Scheme Uri value + //! + //! \return void + //! + void SetSchemeUri(const std::string& uri); + + //! + //! \brief Get Scheme Uri + //! + //! \return const std::string& + //! Scheme Uri + //! + const std::string& GetSchemeUri() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + FourCCInt m_schemeType; //!< scheme Type + uint32_t m_schemeVersion; //!< scheme Version + std::string m_schemeUri; //!< scheme Uri +}; + +class CompatibleSchemeTypeAtom : public SchemeTypeAtom +{ +public: + + //! + //! \brief Constructor + //! + CompatibleSchemeTypeAtom(); + + //! + //! \brief Destructor + //! + virtual ~CompatibleSchemeTypeAtom() = default; +}; + +class StereoVideoAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + StereoVideoAtom(); + + //! + //! \brief Destructor + //! + virtual ~StereoVideoAtom() = default; + + enum class SingleViewMode : std::uint8_t //!< Single View Mode + { + NONE_MODE = 0, + RIGHT_MODE = 1, + LEFT_MODE = 2, + INVALID = 3 + }; + + enum class SchemeSpec : std::uint8_t //!< Scheme Spec + { + SPEC14496 = 1, + SPEC13818 = 2, + SPEC23000 = 3, + POVD = 4 + + }; + + enum class ISO14496FPArrType : std::uint32_t //!< ISO14496 FP Arr Type + { + INTERBOARD = 0, + INTERCOLUMN = 1, + INTERROW = 2, + SBSPACK = 3, + TPPACK = 4, + INTERTEMPER = 5 + }; + + enum class ISO13818ArrType : std::uint32_t //!< ISO13818 Arr Type + { + SBSSTEREO = 0b0000000000000011, + TBSTEREO = 0b0000000000000100, + TWODVIDEO = 0b0000000000001000 + }; + + enum class ISO23000StereoCompType : std::uint8_t //!< ISO23000 Stereo Comp Type + { + SBSHALF = 0x00, + INTERVERTICAL = 0x01, + FRAMESEQ = 0x02, + LRSEQ = 0x03, + TBHALF = 0x04, + SBSFULL = 0x05, + TBFULL = 0x06 + }; + + struct ISO23000ArrType //!< ISO23000 Arr Type + { + ISO23000StereoCompType compositionType; + bool isLeftFirst; + }; + + enum class POVDFrameCompType : std::uint8_t //!< POVD Frame Comp Type + { + TBPACK = 3, + SBSPACK = 4, + INTERTEMPER = 5 + }; + + struct POVDArrangeType //!< POVD Arrange Type + { + POVDFrameCompType compositionType; + bool useQuincunxSampling; + }; + + union StereoIndicationType { //!< Stereo Indication Type + uint32_t valAsUint32; + ISO14496FPArrType type14496; + ISO13818ArrType type13818; + ISO23000ArrType type23000; + POVDArrangeType typePOVD; + }; + + //! + //! \brief Set and Get function for m_singleViewAllowed member + //! + //! \param [in] SingleViewMode + //! value to set + //! \param [in] m_singleViewAllowed + //! m_singleViewAllowed member in class + //! \param [in] SingleViewAllowed + //! m_singleViewAllowed name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(SingleViewMode, m_singleViewAllowed, SingleViewAllowed, const); + + //! + //! \brief Set and Get function for m_stereoScheme member + //! + //! \param [in] SchemeSpec + //! value to set + //! \param [in] m_stereoScheme + //! m_stereoScheme member in class + //! \param [in] StereoScheme + //! m_stereoScheme name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(SchemeSpec, m_stereoScheme, StereoScheme, const); + + //! + //! \brief Set and Get function for m_stereoIndType member + //! + //! \param [in] StereoIndicationType + //! value to set + //! \param [in] m_stereoIndType + //! m_stereoIndType member in class + //! \param [in] StereoIndicationType + //! m_stereoIndType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(StereoIndicationType, m_stereoIndType, StereoIndicationType, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + SingleViewMode m_singleViewAllowed; //!< Single View Mode + SchemeSpec m_stereoScheme; //!< Scheme Spec + StereoIndicationType m_stereoIndType; //!< Stereo Indication Type +}; + +class RestrictedSchemeInfoAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + RestrictedSchemeInfoAtom(); + RestrictedSchemeInfoAtom(const RestrictedSchemeInfoAtom&); + + RestrictedSchemeInfoAtom& operator=(const RestrictedSchemeInfoAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~RestrictedSchemeInfoAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream&); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream&); + + //! + //! \brief Get OriginalFormat + //! + //! \return FourCCInt + //! OriginalFormat + //! + virtual FourCCInt GetOriginalFormat() const; + + //! + //! \brief Set OriginalFormat + //! + //! \param [in] FourCCInt + //! OriginalFormat value + //! + //! \return void + //! + virtual void SetOriginalFormat(FourCCInt); + + //! + //! \brief Get SchemeType + //! + //! \return FourCCInt + //! SchemeType + //! + virtual FourCCInt GetSchemeType() const; + + //! + //! \brief Get SchemeType Atom + //! + //! \return SchemeTypeAtom& + //! SchemeType Atom + //! + SchemeTypeAtom& GetSchemeTypeAtom() const; + + //! + //! \brief Add Scheme Type Atom + //! + //! \param [in] UniquePtr + //! Scheme Type Atom pointer + //! + //! \return void + //! + void AddSchemeTypeAtom(UniquePtr); + + //! + //! \brief has Scheme Type Atom + //! + //! \return bool + //! has or not + //! + bool HasSchemeTypeAtom() const; + + //! + //! \brief Get ProjectedOmniVideoAtom Atom + //! + //! \return ProjectedOmniVideoAtom& + //! ProjectedOmniVideoAtom Atom + //! + ProjectedOmniVideoAtom& GetProjectedOmniVideoAtom() const; + + //! + //! \brief add ProjectedOmniVideoAtom Atom + //! + //! \param [in] UniquePtr + //! ProjectedOmniVideoAtom Atom pointer + //! + //! \return void + //! + void AddProjectedOmniVideoAtom(UniquePtr); + + //! + //! \brief Get StereoVideoAtom Atom + //! + //! \return StereoVideoAtom& + //! StereoVideoAtom Atom + //! + StereoVideoAtom& GetStereoVideoAtom() const; + + //! + //! \brief add StereoVideoAtom Atom + //! + //! \param [in] UniquePtr + //! StereoVideoAtom Atom pointer + //! + //! \return void + //! + void AddStereoVideoAtom(UniquePtr); + + //! + //! \brief has StereoVideo Atom + //! + //! \return bool + //! has or not + //! + bool HasStereoVideoAtom() const; + + //! + //! \brief Get CompatibleScheme Types + //! + //! \return std::vector + //! CompatibleScheme Types + //! + std::vector GetCompatibleSchemeTypes() const; + + //! + //! \brief add CompatibleScheme Type atom + //! + //! \param [in] UniquePtr + //! CompatibleSchemeType Atom pointer + //! \return void + //! + void AddCompatibleSchemeTypeAtom(UniquePtr); + +private: + UniquePtr m_originalFormatAtom; //!< Original Format Atom + UniquePtr m_schemeTypeAtom; //!< Scheme Type Atom + UniquePtr m_projectedOmniVideoAtom; //!< Projected Omni Video Atom + UniquePtr m_stereoVideoAtom; //!< Stereo Video Atom + std::vector> m_compatibleSchemeTypes; //!< Compatible Scheme Type Atom +}; + +VCD_MP4_END; +#endif /* _RESTRICTEDSCHEMEINFOATOM_H_ */ diff --git a/src/isolib/atoms/SampDescAtom.cpp b/src/isolib/atoms/SampDescAtom.cpp new file mode 100644 index 00000000..41ea0e69 --- /dev/null +++ b/src/isolib/atoms/SampDescAtom.cpp @@ -0,0 +1,200 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampDescAtom.cpp +//! \brief: SampDescAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "SampDescAtom.h" + +#include "AvcSampEntry.h" +#include "HevcSampEntry.h" +#include "InitViewOrientationSampEntry.h" + +#include "Mp4AudSampEntryAtom.h" +#include "Mp4VisualSampEntryAtom.h" +#include "RestSchemeInfoAtom.h" +#include "UriMetaSampEntryAtom.h" + +VCD_MP4_BEGIN + +SampleDescriptionAtom::SampleDescriptionAtom() + : FullAtom("stsd", 0, 0) +{ +} + +void SampleDescriptionAtom::AddSampleEntry(UniquePtr sampleEntry) +{ + m_index.push_back(std::move(sampleEntry)); +} + +void SampleDescriptionAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(static_cast(m_index.size())); + + for (auto& entry : m_index) + { + if (!entry) + { + ISO_LOG(LOG_ERROR, "ToStreamAtom can not write file\n"); + throw Exception(); + } + Stream entryBitStr; + entry->ToStream(entryBitStr); + str.WriteStream(entryBitStr); + } + UpdateSize(str); +} + +void SampleDescriptionAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + const unsigned int entryCount = str.Read32(); + + UniquePtr entrysResvAtom; + + for (unsigned int i = 0; i < entryCount; ++i) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + uint64_t entryStart = str.GetPos(); + Stream entryStream = str.ReadSubAtomStream(AtomType); + + // Add new sample entry types based on handler when necessary + if (AtomType == "hvc1" || AtomType == "hev1" || AtomType == "hvc2") + { + UniquePtr hevcSampleEntry(new HevcSampleEntry()); + hevcSampleEntry->FromStream(entryStream); + + m_index.push_back(std::move(hevcSampleEntry)); + } + else if (AtomType == "avc1" || AtomType == "avc3") + { + UniquePtr avcSampleEntry(new AvcSampleEntry()); + avcSampleEntry->FromStream(entryStream); + m_index.push_back(std::move(avcSampleEntry)); + } + else if (AtomType == "mp4a") + { + UniquePtr mp4AudioSampleEntry(new MP4AudioSampleEntryAtom()); + mp4AudioSampleEntry->FromStream(entryStream); + m_index.push_back(std::move(mp4AudioSampleEntry)); + } + else if (AtomType == "urim") + { + UniquePtr uriMetaSampleEntry(new UriMetaSampleEntryAtom()); + uriMetaSampleEntry->FromStream(entryStream); + m_index.push_back(std::move(uriMetaSampleEntry)); + } + else if (AtomType == "invo") + { + UniquePtr invoSampleEntry(new InitViewOrient()); + invoSampleEntry->FromStream(entryStream); + m_index.push_back(std::move(invoSampleEntry)); + } + else if (AtomType == "mp4v") + { + UniquePtr mp4VisualSampleEntry(new MP4VisualSampleEntryAtom()); + mp4VisualSampleEntry->FromStream(entryStream); + m_index.push_back(std::move(mp4VisualSampleEntry)); + } + else if (AtomType == "resv") + { + // resv is a special case. It is normal video sample entry, with an additional rinf Atom, which describes how + // sample entry is encoded etc. First we read info from rinf Atom, rewrite correct sample entry fromat to + // to the stream replacing "resv" with original format e.g. "avc1". After rewrite stream is rewinded to + // entry start position and additional rinf Atom is stored along next read sample entry. + + // seek restricted video until rinf Atom to find out original format etc. + MP4VisualSampleEntryAtom visualSampleEntryHeaderParser; + visualSampleEntryHeaderParser.VisualSampleEntryAtom::FromStream(entryStream); + Stream rinfAtomSubBitstream; + while (entryStream.BytesRemain() > 0) + { + FourCCInt resvAtomType; + rinfAtomSubBitstream = entryStream.ReadSubAtomStream(resvAtomType); + if (resvAtomType == "rinf") + { + break; + } + } + + if (rinfAtomSubBitstream.GetSize() == 0) + { + ISO_LOG(LOG_ERROR, "There must be rinf Atom inside resv\n"); + throw Exception(); + } + + entrysResvAtom = MakeUnique(); + entrysResvAtom->FromStream(rinfAtomSubBitstream); + + // rewind & rewrite the sample entry Atom type + if (entrysResvAtom->GetOriginalFormat() == "resv") + { + ISO_LOG(LOG_ERROR, "OriginalFormat cannot be resv\n"); + throw Exception(); + } + + std::uint32_t originalFormat = entrysResvAtom->GetOriginalFormat().GetUInt32(); + str.SetPos(entryStart); + str.SetByte(entryStart + 4, (uint8_t)(originalFormat >> 24)); + str.SetByte(entryStart + 5, (uint8_t)(originalFormat >> 16)); + str.SetByte(entryStart + 6, (uint8_t)(originalFormat >> 8)); + str.SetByte(entryStart + 7, (uint8_t) originalFormat); + + i--; + continue; + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping unsupported SampleDescriptionAtom entry of type '%s'\n", type); + // Push nullptr to keep indexing correct, in case it will still be possible to operate with the file. + m_index.push_back(nullptr); + } + + // added entry was transformed from revs Atom, add resv info to the entry + if (entrysResvAtom) + { + if (m_index[i]) + { + m_index[i]->AddRestrictedSchemeInfoAtom(std::move(entrysResvAtom)); + } + else + { + entrysResvAtom.release(); + } + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/SampDescAtom.h b/src/isolib/atoms/SampDescAtom.h new file mode 100644 index 00000000..5d40501b --- /dev/null +++ b/src/isolib/atoms/SampDescAtom.h @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampDescAtom.h +//! \brief: SampDescAtom class. +//! \detail: support different sample entry types +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _SAMPLEDESCRIPTIONATOM_H_ +#define _SAMPLEDESCRIPTIONATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" +#include "SampEntryAtom.h" + +VCD_MP4_BEGIN + +class SampleDescriptionAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SampleDescriptionAtom(); + + //! + //! \brief Destructor + //! + ~SampleDescriptionAtom() = default; + + //! + //! \brief Add Sample Entry + //! + //! \param [in] UniquePtr + //! sample Entry value + //! + //! \return void + //! + void AddSampleEntry(UniquePtr sampleEntry); + + //! + //! \brief Get Sample Entries template + //! + //! \return std::vector + //! entries + //! + template + std::vector GetSampleEntries() const; + + //! + //! \brief Get Sample Entry template + //! + //! \param [in] unsigned int + //! index value + //! + //! \return T* + //! entry pointer + //! + template + T* GetSampleEntry(unsigned int index) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + std::vector> m_index; //!< std::vector of sample entries +}; + +template +std::vector SampleDescriptionAtom::GetSampleEntries() const +{ + std::vector ret; + for (auto& entry : m_index) + { + T* p = dynamic_cast(entry.get()); + if (p) + { + ret.push_back(p); + } + } + return ret; +} + +template +T* SampleDescriptionAtom::GetSampleEntry(const unsigned int index) const +{ + if (m_index.size() < index || index == 0) + { + ISO_LOG(LOG_ERROR, "SampleDescriptionAtom::GetSampleEntry invalid sample entry index.\n"); + throw Exception(); + } + + T* pPtr = dynamic_cast(m_index.at(index - 1).get()); + return pPtr; +} + +VCD_MP4_END; +#endif /* _SAMPLEDESCRIPTIONATOM_H_ */ diff --git a/src/isolib/atoms/SampEntryAtom.cpp b/src/isolib/atoms/SampEntryAtom.cpp new file mode 100644 index 00000000..50617f33 --- /dev/null +++ b/src/isolib/atoms/SampEntryAtom.cpp @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampEntryAtom.cpp +//! \brief: SampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! +#include "SampEntryAtom.h" +#include "Stream.h" + + +VCD_MP4_BEGIN + +static const int RESERVED_BYTES = 6; + +SampleEntryAtom::SampleEntryAtom(FourCCInt codingname) + : Atom(codingname) + , m_dataReferenceIndex(0) + , m_restrictedSchemeInfoAtom(nullptr) +{ +} + +SampleEntryAtom::SampleEntryAtom(const SampleEntryAtom& atom) + : Atom(atom.GetType()) + , m_dataReferenceIndex(atom.m_dataReferenceIndex) + , m_restrictedSchemeInfoAtom(nullptr) +{ + if (atom.m_restrictedSchemeInfoAtom) + { + UniquePtr box = MakeUnique(*atom.m_restrictedSchemeInfoAtom); + m_restrictedSchemeInfoAtom = move(box); + } +} + +std::uint16_t SampleEntryAtom::GetDataReferenceIndex() const +{ + return m_dataReferenceIndex; +} + +void SampleEntryAtom::SetDataReferenceIndex(std::uint16_t dataReferenceIndex) +{ + m_dataReferenceIndex = dataReferenceIndex; +} + +void SampleEntryAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + + for (int i = 0; i < RESERVED_BYTES; ++i) + { + str.Write8(0); // reserved = 0 + } + + str.Write16(m_dataReferenceIndex); + + // Update the size of the movie Atom + UpdateSize(str); +} + +void SampleEntryAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + for (int i = 0; i < RESERVED_BYTES; ++i) + { + str.Read8(); // reserved + } + + m_dataReferenceIndex = str.Read16(); +} + +void SampleEntryAtom::AddRestrictedSchemeInfoAtom(UniquePtr restrictedSchemeInfoAtom) +{ + m_restrictedSchemeInfoAtom = std::move(restrictedSchemeInfoAtom); +} + +RestrictedSchemeInfoAtom* SampleEntryAtom::GetRestrictedSchemeInfoAtom() const +{ + return m_restrictedSchemeInfoAtom.get(); +} + +bool SampleEntryAtom::IsVisual() const +{ + return false; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/SampEntryAtom.h b/src/isolib/atoms/SampEntryAtom.h new file mode 100644 index 00000000..e60e1a37 --- /dev/null +++ b/src/isolib/atoms/SampEntryAtom.h @@ -0,0 +1,158 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampEntryAtom.h +//! \brief: Sample Entry Atom class. +//! \detail: Defines Sample Entry data structure +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _SAMPLEENTRYATOM_H_ +#define _SAMPLEENTRYATOM_H_ + +#include +#include "Atom.h" +#include "FormAllocator.h" +#include "DecConfigRecord.h" +#include "RestSchemeInfoAtom.h" + +VCD_MP4_BEGIN + +class SampleEntryAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + SampleEntryAtom(FourCCInt codingname); + SampleEntryAtom(const SampleEntryAtom& Atom); + + SampleEntryAtom& operator=(const SampleEntryAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~SampleEntryAtom() = default; + + //! + //! \brief Get Data Reference Index + //! + //! \return std::uint16_t + //! Data Reference Index + //! + std::uint16_t GetDataReferenceIndex() const; + + //! + //! \brief Set Data Reference Index + //! + //! \param [in] std::uint16_t + //! data Reference Index + //! + //! \return void + //! + void SetDataReferenceIndex(std::uint16_t dataReferenceIndex); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Get Copy of AudioSampleEntryAtom + //! + //! \return AudioSampleEntryAtom* + //! AudioSampleEntry Atom + //! + virtual SampleEntryAtom* Clone() const = 0; + + //! + //! \brief Add Restricted Scheme Info Atom + //! + //! \param [in] UniquePtr + //! Restricted Scheme Info Atom pointer + //! + //! \return void + //! + void AddRestrictedSchemeInfoAtom(UniquePtr); + + //! + //! \brief Get Restricted Scheme Info Atom + //! + //! \return RestrictedSchemeInfoAtom* + //! Restricted Scheme Info Atom + //! + RestrictedSchemeInfoAtom* GetRestrictedSchemeInfoAtom() const; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const = 0; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const = 0; + + //! + //! \brief Is Visual or not + //! + //! \return bool + //! check is visual or not + //! + virtual bool IsVisual() const; + +private: + std::uint16_t m_dataReferenceIndex; //!< data reference index value + UniquePtr + m_restrictedSchemeInfoAtom; //!< resv info +}; + +VCD_MP4_END; +#endif /* _SAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/SampGroupDescAtom.cpp b/src/isolib/atoms/SampGroupDescAtom.cpp new file mode 100644 index 00000000..98933828 --- /dev/null +++ b/src/isolib/atoms/SampGroupDescAtom.cpp @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampGroupDescAtom.cpp +//! \brief: SampGroupDescAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SampGroupDescAtom.h" + +#include "Stream.h" + +#include + +VCD_MP4_BEGIN + +SampleGroupDescriptionAtom::SampleGroupDescriptionAtom() + : FullAtom("sgpd", 0, 0) + , m_groupType() + , m_defaultLength(0) + , m_sampleGroupEntry() +{ +} + +void SampleGroupDescriptionAtom::AddEntry(UniquePtr sampleGroupEntry) +{ + m_sampleGroupEntry.push_back(std::move(sampleGroupEntry)); +} + +const SampleGroupEntry* SampleGroupDescriptionAtom::GetEntry(std::uint32_t index) const +{ + const SampleGroupEntry* ret = m_sampleGroupEntry.at(index - 1).get(); + return ret; +} + +std::uint32_t SampleGroupDescriptionAtom::GetEntryIndexOfSampleId(const std::uint32_t sampleId) const +{ + uint32_t index = 1; + for (const auto& entry : m_sampleGroupEntry) + { + DirectReferenceSampleListEntry* drsle = dynamic_cast(entry.get()); + if ((drsle != nullptr) && (drsle->GetSampleId() == sampleId)) + { + return index; + } + ++index; + } + ISO_LOG(LOG_ERROR, "SampleGroupDescriptionAtom::GetEntryIndexOfSampleId: no entry for sampleId found.\n"); + throw Exception(); +} + +void SampleGroupDescriptionAtom::ToStream(Stream& str) +{ + if (m_sampleGroupEntry.size() == 0) + { + ISO_LOG(LOG_ERROR, "SampleGroupDescriptionAtom::ToStreamAtom: not writing an invalid Atom without entries\n"); + throw Exception(); + } + + // Write Atom headers + WriteFullAtomHeader(str); + + str.Write32(m_groupType.GetUInt32()); + uint8_t pVersion = GetVersion(); + if (pVersion == 1) + { + str.Write32(m_defaultLength); + } + + str.Write32(static_cast(m_sampleGroupEntry.size())); + + for (auto& entry : m_sampleGroupEntry) + { + if (pVersion == 1 && m_defaultLength == 0) + { + str.Write32(entry->GetSize()); + } + entry->WriteEntry(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void SampleGroupDescriptionAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + + m_groupType = str.Read32(); + + if (GetVersion() == 1) + { + m_defaultLength = str.Read32(); + } + + const uint32_t entryCount = str.Read32(); + + for (unsigned int i = 0; i < entryCount; ++i) + { + uint32_t desLen = m_defaultLength; + if (GetVersion() == 1 && m_defaultLength == 0) + { + desLen = str.Read32(); + } + + Stream subStr; + str.Extract(str.GetPos(), str.GetPos() + desLen, + subStr); // extract "sub-bitstream" for entry + str.SkipBytes(desLen); + + if (m_groupType == "refs") + { + UniquePtr directReferenceSampleListEntry(new DirectReferenceSampleListEntry()); + directReferenceSampleListEntry->ParseEntry(subStr); + m_sampleGroupEntry.push_back(std::move(directReferenceSampleListEntry)); + } + else + { + ISO_LOG(LOG_WARNING, "Skipping an entry of SampleGroupDescriptionAtom of an unknown grouping type '%s'\n", m_groupType.GetString().c_str()); + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/SampGroupDescAtom.h b/src/isolib/atoms/SampGroupDescAtom.h new file mode 100644 index 00000000..5d540010 --- /dev/null +++ b/src/isolib/atoms/SampGroupDescAtom.h @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampGroupDescAtom.h +//! \brief: Sample Group Description Atom class. +//! \detail: Contains Sample Group Description Atom data structure +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SAMPLEGROUPDESCRIPTIONATOM_H_ +#define _SAMPLEGROUPDESCRIPTIONATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" +#include "SampGroupEntry.h" + +VCD_MP4_BEGIN + +class SampleGroupDescriptionAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SampleGroupDescriptionAtom(); + + //! + //! \brief Destructor + //! + virtual ~SampleGroupDescriptionAtom() = default; + + //! + //! \brief Set version + //! + //! \param [in] std::uint8_t + //! version value + //! + //! \return void + //! + void SetVersion(std::uint8_t version); + + //! + //! \brief Set and Get function for m_groupType member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_groupType + //! m_groupType member in class + //! \param [in] GroupingType + //! m_groupType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_groupType, GroupingType, const); + + //! + //! \brief Get Entry Index Of Sample Id + //! + //! \param [in] std::uint32_t + //! sample Id value + //! + //! \return std::uint32_t + //! return entry index + //! + std::uint32_t GetEntryIndexOfSampleId(std::uint32_t sampleId) const; + + //! + //! \brief Set and Get function for m_defaultLength member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_defaultLength + //! m_defaultLength member in class + //! \param [in] DefaultLength + //! m_defaultLength name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_defaultLength, DefaultLength, const); + + //! + //! \brief add entry + //! + //! \param [in] UniquePtr + //! sample Group Entry pointer + //! + //! \return void + //! + void AddEntry(UniquePtr sampleGroupEntry); + + //! + //! \brief Get entry + //! + //! \param [in] std::uint32_t + //! index + //! + //! \return const SampleGroupEntry* + //! SampleGroupEntry pointer + //! + const SampleGroupEntry* GetEntry(std::uint32_t index) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + FourCCInt m_groupType; //!< Grouping type + std::uint32_t m_defaultLength; //!< Default byte size of the description + std::vector> m_sampleGroupEntry; //!< std::vector of sample group entries +}; + +VCD_MP4_END; +#endif /* _SAMPLEGROUPDESCRIPTIONBOX_H_ */ diff --git a/src/isolib/atoms/SampGroupEntry.cpp b/src/isolib/atoms/SampGroupEntry.cpp new file mode 100644 index 00000000..486119f6 --- /dev/null +++ b/src/isolib/atoms/SampGroupEntry.cpp @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampGroupEntry.cpp +//! \brief: SampGroupEntry class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SampGroupEntry.h" + +#include + +VCD_MP4_BEGIN + +DirectReferenceSampleListEntry::DirectReferenceSampleListEntry() + : m_sampleId(0) +{ +} + +void DirectReferenceSampleListEntry::SetDirectRefSampleIds(const std::vector& referenceSampleIds) +{ + if (referenceSampleIds.size() > 255) + { + ISO_LOG(LOG_ERROR, "Too many entries in referenceSampleIds\n"); + throw Exception(); + } + + m_directRefSampIds = referenceSampleIds; +} + +std::vector DirectReferenceSampleListEntry::GetDirectRefSampleIds() const +{ + return m_directRefSampIds; +} + +std::uint32_t DirectReferenceSampleListEntry::GetSize() const +{ + const uint32_t size = static_cast(sizeof(m_sampleId) + sizeof(uint8_t) + + (sizeof(uint32_t) * m_directRefSampIds.size())); + return size; +} + +void DirectReferenceSampleListEntry::WriteEntry(Stream& str) +{ + str.Write32(m_sampleId); + + str.Write8(static_cast(m_directRefSampIds.size())); + for (auto id : m_directRefSampIds) + { + str.Write32(id); + } +} + +void DirectReferenceSampleListEntry::ParseEntry(Stream& str) +{ + m_sampleId = str.Read32(); + const uint8_t numberOfReferencedSamples = str.Read8(); + for (unsigned int i = 0; i < numberOfReferencedSamples; ++i) + { + m_directRefSampIds.push_back(str.Read32()); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SampGroupEntry.h b/src/isolib/atoms/SampGroupEntry.h new file mode 100644 index 00000000..f4d076af --- /dev/null +++ b/src/isolib/atoms/SampGroupEntry.h @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampGroupEntry.h +//! \brief: SampGroupEntry class. +//! \detail: Provides abstract methods to generate a sample group entry. +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SAMPLEGROUPENTRY_H_ +#define _SAMPLEGROUPENTRY_H_ + +#include "Stream.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class SampleGroupEntry +{ +public: + + //! + //! \brief Constructor + //! + SampleGroupEntry() = default; + + //! + //! \brief Destructor + //! + virtual ~SampleGroupEntry() = default; + + //! + //! \brief Get size + //! + //! \return std::uint32_t + //! size + //! + virtual std::uint32_t GetSize() const = 0; + + //! + //! \brief Write entry content + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + virtual void WriteEntry(Stream& str) = 0; + + //! + //! \brief Parse entry content + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + virtual void ParseEntry(Stream& str) = 0; +}; + +class DirectReferenceSampleListEntry : public SampleGroupEntry +{ +public: + + //! + //! \brief Constructor + //! + DirectReferenceSampleListEntry(); + + //! + //! \brief Constructor + //! + virtual ~DirectReferenceSampleListEntry() = default; + + //! + //! \brief Set and Get function for m_sampleId member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_sampleId + //! m_sampleId member in class + //! \param [in] SampleId + //! m_sampleId name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_sampleId, SampleId, const); + + void SetDirectRefSampleIds(const std::vector& refSampleId); + + //! + //! \brief Get Direct Ref Sample Ids + //! + //! \return std::vector + //! return Direct Ref Sample Ids + //! + std::vector GetDirectRefSampleIds() const; + + //! + //! \brief Get size + //! + //! \return std::uint32_t + //! size + //! + virtual std::uint32_t GetSize() const; + + //! + //! \brief Write entry content + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + virtual void WriteEntry(Stream& str); + + //! + //! \brief Parse entry content + //! + //! \param [in] Stream& + //! bitstream + //! + //! \return void + //! + virtual void ParseEntry(Stream& str); + +private: + std::uint32_t m_sampleId; //!< Sample Id whose referenced sample Id will be listed + std::vector m_directRefSampIds; //!< std::vector of direct reference sample Ids +}; + +VCD_MP4_END; +#endif /* _SAMPLEGROUPENTRY_H_ */ diff --git a/src/isolib/atoms/SampRateAtom.cpp b/src/isolib/atoms/SampRateAtom.cpp new file mode 100644 index 00000000..d3921b38 --- /dev/null +++ b/src/isolib/atoms/SampRateAtom.cpp @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampRateAtom.cpp +//! \brief: SampRateAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "SampRateAtom.h" + +VCD_MP4_BEGIN + +SamplingRateAtom::SamplingRateAtom() + : FullAtom("srat", 0, 0) + , m_samplingRate(0) +{ +} + +SamplingRateAtom::SamplingRateAtom(const SamplingRateAtom& atom) + : FullAtom(atom.GetType(), 0, 0) + , m_samplingRate(atom.m_samplingRate) +{ +} + +std::uint32_t SamplingRateAtom::GetSamplingRate() const +{ + return m_samplingRate; +} + +void SamplingRateAtom::SetSamplingRate(std::uint32_t samplingRate) +{ + m_samplingRate = samplingRate; +} + +void SamplingRateAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(m_samplingRate); + + UpdateSize(str); +} + +void SamplingRateAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_samplingRate = str.Read32(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SampRateAtom.h b/src/isolib/atoms/SampRateAtom.h new file mode 100644 index 00000000..82bb2b36 --- /dev/null +++ b/src/isolib/atoms/SampRateAtom.h @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampRateAtom.h +//! \brief: Sampling Rate Atom class +//! \detail: 'srat' Atom +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _SAMPLINGRATEATOM_H_ +#define _SAMPLINGRATEATOM_H_ + +#include +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SamplingRateAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SamplingRateAtom(); + SamplingRateAtom(const SamplingRateAtom& Atom); + + SamplingRateAtom& operator=(const SamplingRateAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~SamplingRateAtom() = default; + + //! + //! \brief Get Sampling Rate + //! + //! \return std::uint32_t + //! SampleRate + //! + std::uint32_t GetSamplingRate() const; + + //! + //! \brief Set Sample Rate + //! + //! \param [in] std::uint32_t + //! SampleRate value + //! + //! \return void + //! + void SetSamplingRate(std::uint32_t samplingRate); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint32_t m_samplingRate; //!< sampling rate +}; + +VCD_MP4_END; +#endif /* _SAMPLINGRATEATOM_H_ */ diff --git a/src/isolib/atoms/SampSizeAtom.cpp b/src/isolib/atoms/SampSizeAtom.cpp new file mode 100644 index 00000000..286af904 --- /dev/null +++ b/src/isolib/atoms/SampSizeAtom.cpp @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampSizeAtom.cpp +//! \brief: SampSizeAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SampSizeAtom.h" + + +VCD_MP4_BEGIN + +SampleSizeAtom::SampleSizeAtom() + : FullAtom("stsz", 0, 0) + , m_sampleSize(0) + , m_sampleNum(0) + , m_entrySize() +{ +} + +void SampleSizeAtom::SetEntrySize(std::vector sample_sizes) +{ + m_entrySize = sample_sizes; +} + +std::vector SampleSizeAtom::GetEntrySize() const +{ + // Fill the entry size in a lazy fashion to avoid doing too much work + // if the Atom ends up being discarded due to invalid data determined + // from othere sources + if (m_entrySize.size() == 0 && m_sampleSize != 0) + { + for (uint32_t i = 0; i < m_sampleNum; i++) + { + m_entrySize.push_back(m_sampleSize); + } + } + return m_entrySize; +} + +void SampleSizeAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + str.Write32(m_sampleSize); + str.Write32(m_sampleNum); // number of samples in the track + for (uint32_t i = 0; i < m_sampleNum; i++) + { + str.Write32(m_entrySize.at(i)); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void SampleSizeAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + + m_sampleSize = str.Read32(); + m_sampleNum = str.Read32(); + + if (m_sampleSize == 0) + { + for (uint32_t i = 0; i < m_sampleNum; i++) + { + if (m_sampleSize == 0) + { + m_entrySize.push_back(str.Read32()); + } + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SampSizeAtom.h b/src/isolib/atoms/SampSizeAtom.h new file mode 100644 index 00000000..205eaaf3 --- /dev/null +++ b/src/isolib/atoms/SampSizeAtom.h @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampSizeAtom.h +//! \brief: SampSize Atom. +//! \detail: 'stsz' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SAMPLESIZEATOM_H_ +#define _SAMPLESIZEATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include +#include + +VCD_MP4_BEGIN + +class SampleSizeAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SampleSizeAtom(); + + //! + //! \brief Destructor + //! + virtual ~SampleSizeAtom() = default; + + //! + //! \brief Set and Get function for m_sampleSize member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_sampleSize + //! m_sampleSize member in class + //! \param [in] SampleSize + //! m_sampleSize name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_sampleSize, SampleSize, ); + + //! + //! \brief Set and Get function for m_sampleNum member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_sampleNum + //! m_sampleNum member in class + //! \param [in] SampleNum + //! m_sampleNum name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_sampleNum, SampleNum, const); + + //! + //! \brief Set Entry Size + //! + //! \param [in] std::vector + //! sample sizes + //! + //! \return void + //! + void SetEntrySize(std::vector sample_sizes); + + //! + //! \brief Get Entry Size + //! + //! \return std::vector + //! Entry Size + //! + std::vector GetEntrySize() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint32_t m_sampleSize; //!< Default sample size. + std::uint32_t m_sampleNum; //!< Number of samples to be listed + mutable std::vector m_entrySize; //!< Sample sizes of each sample. +}; + +VCD_MP4_END; +#endif /* _SAMPLESIZEATOM_H_ */ diff --git a/src/isolib/atoms/SampTableAtom.cpp b/src/isolib/atoms/SampTableAtom.cpp new file mode 100644 index 00000000..720fdcfa --- /dev/null +++ b/src/isolib/atoms/SampTableAtom.cpp @@ -0,0 +1,417 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampTableAtom.cpp +//! \brief: SampTableAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "SampTableAtom.h" + +using namespace std; + +VCD_MP4_BEGIN + +SampleTableAtom::SampleTableAtom() + : Atom("stbl") + , m_sampleDescrAtom() + , m_timeToSampAtom() + , m_sampToChunkAtom() + , m_chunkOffsetAtom() + , m_sampSizeAtom() + , m_compToDecodeAtom(nullptr) + , m_sampGroupDescrAtom(nullptr) + , m_hasSyncSampleAtom(false) +{ +} + +const SampleDescriptionAtom& SampleTableAtom::GetSampleDescriptionAtom() const +{ + return m_sampleDescrAtom; +} + +SampleDescriptionAtom& SampleTableAtom::GetSampleDescriptionAtom() +{ + return m_sampleDescrAtom; +} + +const TimeToSampleAtom& SampleTableAtom::GetTimeToSampleAtom() const +{ + return m_timeToSampAtom; +} + +TimeToSampleAtom& SampleTableAtom::GetTimeToSampleAtom() +{ + return m_timeToSampAtom; +} + +void SampleTableAtom::SetCompositionOffsetAtom(const CompositionOffsetAtom& compositionOffsetAtom) +{ + if (m_compOffsetAtom == nullptr) + { + m_compOffsetAtom = MakeShared(compositionOffsetAtom); + } + else + { + *m_compOffsetAtom = compositionOffsetAtom; + } +} + +std::shared_ptr SampleTableAtom::GetCompositionOffsetAtom() const +{ + return m_compOffsetAtom; +} + +void SampleTableAtom::SetCompositionToDecodeAtom(const CompositionToDecodeAtom& compositionToDecodeAtom) +{ + if (m_compToDecodeAtom == nullptr) + { + m_compToDecodeAtom = MakeShared(compositionToDecodeAtom); + } + else + { + *m_compToDecodeAtom = compositionToDecodeAtom; + } +} + +std::shared_ptr SampleTableAtom::GetCompositionToDecodeAtom() const +{ + return m_compToDecodeAtom; +} + +const SampleToChunkAtom& SampleTableAtom::GetSampleToChunkAtom() const +{ + return m_sampToChunkAtom; +} + +SampleToChunkAtom& SampleTableAtom::GetSampleToChunkAtom() +{ + return m_sampToChunkAtom; +} + +const ChunkOffsetAtom& SampleTableAtom::GetChunkOffsetAtom() const +{ + return m_chunkOffsetAtom; +} + +ChunkOffsetAtom& SampleTableAtom::GetChunkOffsetAtom() +{ + return m_chunkOffsetAtom; +} + +const SampleSizeAtom& SampleTableAtom::GetSampleSizeAtom() const +{ + return m_sampSizeAtom; +} + +SampleSizeAtom& SampleTableAtom::GetSampleSizeAtom() +{ + return m_sampSizeAtom; +} + +void SampleTableAtom::SetSampleGroupDescriptionAtom(UniquePtr sgpd) +{ + m_sampGroupDescrAtom = std::move(sgpd); +} + +SampleToGroupAtom& SampleTableAtom::GetSampleToGroupAtom() +{ + m_sampToGroupAtom.resize(m_sampToGroupAtom.size() + 1); + return m_sampToGroupAtom.back(); +} + +const std::vector& SampleTableAtom::GetSampleToGroupAtoms() const +{ + return m_sampToGroupAtom; +} + +const SampleGroupDescriptionAtom* SampleTableAtom::GetSampleGroupDescriptionAtom(FourCCInt groupingType) const +{ + if (m_sampGroupDescrAtom) + { + if (m_sampGroupDescrAtom->GetGroupingType() == groupingType) + { + return m_sampGroupDescrAtom.get(); + } + } + ISO_LOG(LOG_ERROR, "SampleGroupDescriptionAtom NOT found!\n"); + throw Exception(); +} + +void SampleTableAtom::SetSyncSampleAtom(const SyncSampleAtom& syncSampleAtom) +{ + if (m_syncSampAtom == nullptr) + { + m_syncSampAtom = MakeShared(syncSampleAtom); + } + else + { + *m_syncSampAtom = syncSampleAtom; + } + m_hasSyncSampleAtom = true; +} + +bool SampleTableAtom::HasSyncSampleAtom() +{ + return m_hasSyncSampleAtom; +} + +std::shared_ptr SampleTableAtom::GetSyncSampleAtom() const +{ + return m_syncSampAtom; +} + +void SampleTableAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + // Write other Atoms contained in the movie Atom + m_sampleDescrAtom.ToStream(str); + m_timeToSampAtom.ToStream(str); + m_sampToChunkAtom.ToStream(str); + m_chunkOffsetAtom.ToStream(str); + m_sampSizeAtom.ToStream(str); + + if (m_syncSampAtom != nullptr) + { + m_syncSampAtom->ToStream(str); + } + + if (m_compOffsetAtom != nullptr) + { + m_compOffsetAtom->ToStream(str); + } + + if (m_compToDecodeAtom != nullptr) + { + m_compToDecodeAtom->ToStream(str); + } + + if (m_sampGroupDescrAtom) + { + m_sampGroupDescrAtom->ToStream(str); + } + + if (m_sampToGroupAtom.empty() == false) + { + for (auto sbgp : m_sampToGroupAtom) + { + sbgp.ToStream(str); + } + } + + // Update the size + UpdateSize(str); +} + +void SampleTableAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseAtomHeader(str); + + int64_t pMaxNum = -1; + int64_t pMaxAbNum = + ABMAX_SAMP_CNT; // 4 194 304 (more than day worth of 48hz samples) + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt pAtomType; + Stream subStr = str.ReadSubAtomStream(pAtomType); + + // Handle this Atom based on the type + if (pAtomType == "stsd") + { + m_sampleDescrAtom.FromStream(subStr); + } + else if (pAtomType == "stco" || pAtomType == "co64") // 'co64' is the 64-bit version + { + m_chunkOffsetAtom.FromStream(subStr); + } + else if (pAtomType == "stsz") + { + m_sampSizeAtom.FromStream(subStr); + uint32_t pSampNum = m_sampSizeAtom.GetSampleNum(); + if (pMaxNum == -1) + { + if (pSampNum > pMaxAbNum) + { + ISO_LOG(LOG_ERROR, "Over max sample counts from stsz to rest of sample table\n"); + throw Exception(); + } + pMaxNum = static_cast(pSampNum); + } + else if (pSampNum != pMaxNum) + { + ISO_LOG(LOG_ERROR, "Non-matching sample counts from stsz to rest of sample table\n"); + throw Exception(); + } + } + else if (pAtomType == "stts") + { + m_timeToSampAtom.FromStream(subStr); + uint32_t pSampNum = static_cast(m_timeToSampAtom.GetSampleNum()); + if (pMaxNum == -1) + { + if (pSampNum > pMaxAbNum) + { + ISO_LOG(LOG_ERROR, "Over max sample counts from stts to rest of sample table\n"); + throw Exception(); + } + pMaxNum = static_cast(pSampNum); + } + else if (pSampNum != pMaxNum) + { + ISO_LOG(LOG_ERROR, "Non-matching sample counts from stts to rest of sample table\n"); + throw Exception(); + } + } + else if (pAtomType == "stsc") + { + if (pMaxNum != -1) + { + m_sampToChunkAtom.SetSampleNumMaxSafety(pMaxNum); + } + m_sampToChunkAtom.FromStream(subStr); + } + else if (pAtomType == "stss") + { + m_syncSampAtom = MakeShared(); + if (pMaxNum != -1) + { + m_syncSampAtom->SetSampleNumMaxSafety(pMaxNum); + } + m_syncSampAtom->FromStream(subStr); + m_hasSyncSampleAtom = true; + } + else if (pAtomType == "sgpd") + { + auto sgdb = new SampleGroupDescriptionAtom(); + sgdb->FromStream(subStr); + m_sampGroupDescrAtom.reset(sgdb); + } + else if (pAtomType == "sbgp") + { + SampleToGroupAtom sampleToGroupAtom; + sampleToGroupAtom.FromStream(subStr); + uint32_t pSampNum = static_cast(sampleToGroupAtom.GetNumberOfSamples()); + if (pMaxNum == -1) + { + if (pSampNum > pMaxAbNum) + { + ISO_LOG(LOG_ERROR, "Over max sample counts from sbgp to rest of sample table\n"); + throw Exception(); + } + // we can't update pMaxNum here as sbgp can have less samples than total. + } + else if (pSampNum > pMaxNum) + { + ISO_LOG(LOG_ERROR, "Non-matching sample counts from sbgp to rest of sample table\n"); + throw Exception(); + } + m_sampToGroupAtom.push_back(move(sampleToGroupAtom)); + } + else if (pAtomType == "cslg") + { + m_compToDecodeAtom = MakeShared(); + m_compToDecodeAtom->FromStream(subStr); + } + else if (pAtomType == "ctts") + { + m_compOffsetAtom = MakeShared(); + m_compOffsetAtom->FromStream(subStr); + uint32_t pSampNum = static_cast(m_compOffsetAtom->GetSampleNum()); + if (pMaxNum == -1) + { + if (pSampNum > pMaxAbNum) + { + ISO_LOG(LOG_ERROR, "Over max sample counts from ctts to rest of sample table\n"); + throw Exception(); + } + pMaxNum = static_cast(pSampNum); + } + else if (pSampNum != pMaxNum) + { + ISO_LOG(LOG_ERROR, "Non-matching sample counts from ctts to rest of sample table\n"); + throw Exception(); + } + } + else + { + char type[4]; + pAtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside SampleTableAtom.\n", type); + } + } + + if (pMaxNum == -1) + { + ISO_LOG(LOG_ERROR, "SampleToTableAtom does not determine number of samples\n"); + throw Exception(); + } + else + { + std::vector sizes; + sizes.push_back(m_timeToSampAtom.GetSampleNum()); + sizes.push_back(m_sampSizeAtom.GetSampleNum()); + auto lowerBound = m_sampToChunkAtom.GetSampleNumLowerBound( + static_cast(m_chunkOffsetAtom.GetChunkOffsets().size())); + auto referenceSize = sizes[0]; + for (size_t c = 0; c < sizes.size(); ++c) + { + if (sizes[c] != referenceSize || sizes[c] < lowerBound) + { + ISO_LOG(LOG_ERROR, "SampleToTableAtom contains Atoms with mismatching sample counts\n"); + throw Exception(); + } + } + + // reset it here in case the order of Atoms didn't allow it to + // be set on time for m_sampToGroupAtom parsing + m_sampToChunkAtom.SetSampleNumMaxSafety(pMaxNum); + + // we need to update stsc decoded presentation of chunk entries. + m_sampToChunkAtom.DecodeEntries(static_cast(m_chunkOffsetAtom.GetChunkOffsets().size())); + } +} + +void SampleTableAtom::ResetSamples() +{ + m_timeToSampAtom = TimeToSampleAtom(); + m_sampToChunkAtom = SampleToChunkAtom(); + m_chunkOffsetAtom = ChunkOffsetAtom(); + m_sampSizeAtom = SampleSizeAtom(); + m_syncSampAtom.reset(); + m_compOffsetAtom.reset(); + m_compToDecodeAtom.reset(); + m_sampToGroupAtom.clear(); +} + +VCD_MP4_END diff --git a/src/isolib/atoms/SampTableAtom.h b/src/isolib/atoms/SampTableAtom.h new file mode 100644 index 00000000..fc8520bd --- /dev/null +++ b/src/isolib/atoms/SampTableAtom.h @@ -0,0 +1,291 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampTableAtom.h +//! \brief: SampTableAtom class +//! \detail: "stbl" atom. +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _SAMPLETABLEATOM_H_ +#define _SAMPLETABLEATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "ChunkOffsetAtom.h" +#include "CompOffsetAtom.h" +#include "CompToDecAtom.h" +#include "FormAllocator.h" +#include "SampDescAtom.h" +#include "SampGroupDescAtom.h" +#include "SampSizeAtom.h" +#include "SampToChunkAtom.h" +#include "SampToGroupAtom.h" +#include "SyncSampAtom.h" +#include "TimeToSampAtom.h" + +VCD_MP4_BEGIN + +class SampleTableAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + SampleTableAtom(); + SampleTableAtom(const SampleTableAtom& obj) = delete; + + SampleTableAtom& operator=(const SampleTableAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~SampleTableAtom() = default; + + //! + //! \brief Get SampleDescription Atom + //! + //! \return SampleDescriptionAtom& + //! SampleDescription Atom + //! + SampleDescriptionAtom& GetSampleDescriptionAtom(); + + //! + //! \brief Get SampleDescription Atom + //! + //! \return const SampleDescriptionAtom& + //! SampleDescription Atom + //! + const SampleDescriptionAtom& GetSampleDescriptionAtom() const; + + //! + //! \brief Get TimeToSample Atom + //! + //! \return TimeToSampleAtom& + //! TimeToSample Atom + //! + TimeToSampleAtom& GetTimeToSampleAtom(); + + //! + //! \brief Get TimeToSample Atom + //! + //! \return const TimeToSampleAtom& + //! TimeToSample Atom + //! + const TimeToSampleAtom& GetTimeToSampleAtom() const; + + //! + //! \brief Set CompositionOffset Atom + //! + //! \param [in] const CompositionOffsetAtom& + //! CompositionOffset Atom + //! + //! \return void + //! + void SetCompositionOffsetAtom(const CompositionOffsetAtom& compositionOffsetAtom); + + //! + //! \brief Get CompositionOffset Atom + //! + //! \return std::shared_ptr + //! CompositionOffset Atom + //! + std::shared_ptr GetCompositionOffsetAtom() const; + + //! + //! \brief Set CompositionToDecode Atom + //! + //! \param [in] const CompositionToDecodeAtom& + //! CompositionToDecode Atom + //! + //! \return void + //! + void SetCompositionToDecodeAtom(const CompositionToDecodeAtom& compositionToDecodeAtom); + + //! + //! \brief Get CompositionToDecode Atom + //! + //! \return std::shared_ptr + //! CompositionToDecode Atom + //! + std::shared_ptr GetCompositionToDecodeAtom() const; + + //! + //! \brief Set SyncSample Atom + //! + //! \param [in] const SyncSampleAtom& + //! SyncSample Atom + //! + //! \return void + //! + void SetSyncSampleAtom(const SyncSampleAtom& syncSampleAtom); + + //! + //! \brief Has SyncSample Atom or not + //! + //! \return bool + //! has or not + //! + bool HasSyncSampleAtom(); + + //! + //! \brief Get SyncSample Atom + //! + //! \return std::shared_ptr + //! SyncSample Atom + //! + std::shared_ptr GetSyncSampleAtom() const; + + //! + //! \brief Get SampleToChunk Atom + //! + //! \return SampleToChunkAtom& + //! SampleToChunk Atom + //! + SampleToChunkAtom& GetSampleToChunkAtom(); + + //! + //! \brief Get SampleToChunk Atom + //! + //! \return const SampleToChunkAtom& + //! SampleToChunk Atom + //! + const SampleToChunkAtom& GetSampleToChunkAtom() const; + + //! + //! \brief Get ChunkOffset Atom + //! + //! \return ChunkOffsetAtom& + //! ChunkOffset Atom + //! + ChunkOffsetAtom& GetChunkOffsetAtom(); + + //! + //! \brief Get ChunkOffset Atom + //! + //! \return const ChunkOffsetAtom& + //! ChunkOffset Atom + //! + const ChunkOffsetAtom& GetChunkOffsetAtom() const; + + //! + //! \brief Get SampleSize Atom + //! + //! \return SampleSizeAtom& + //! SampleSize Atom + //! + SampleSizeAtom& GetSampleSizeAtom(); + + //! + //! \brief Get SampleSize Atom + //! + //! \return const SampleSizeAtom& + //! SampleSize Atom + //! + const SampleSizeAtom& GetSampleSizeAtom() const; + + //! + //! \brief Set SampleGroupDescription Atom + //! + //! \param [in] UniquePtr + //! SampleGroupDescription Atom + //! + //! \return void + //! + void SetSampleGroupDescriptionAtom(UniquePtr sgpd); + + //! + //! \brief Get SampleToGroup Atom + //! + //! \return SampleToGroupAtom& + //! SampleToGroup Atom + //! + SampleToGroupAtom& GetSampleToGroupAtom(); + + //! + //! \brief Get SampleToGroup Atoms + //! + //! \return const std::vector& + //! SampleToGroup Atoms + //! + const std::vector& GetSampleToGroupAtoms() const; + + //! + //! \brief Get SampleGroupDescription Atoms + //! + //! \param [in] FourCCInt + //! grouping Type + //! + //! \return const SampleGroupDescriptionAtom* + //! SampleGroupDescription Atoms + //! + const SampleGroupDescriptionAtom* GetSampleGroupDescriptionAtom(FourCCInt groupingType) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + + //! + //! \brief Reset Samples + //! + void ResetSamples(); + +private: + + SampleDescriptionAtom m_sampleDescrAtom; //!< Sample description Atom (mandatory) + TimeToSampleAtom m_timeToSampAtom; //!< Time-to-sample Atom (mandatory) + SampleToChunkAtom m_sampToChunkAtom; //!< Sample-to-chunk Atom (mandatory) + ChunkOffsetAtom m_chunkOffsetAtom; //!< Chunk offset Atom (mandatory) + SampleSizeAtom m_sampSizeAtom; //!< Sample size Atom (mandatory) + std::shared_ptr m_syncSampAtom; //!< Sync sample Atom (optional) + std::shared_ptr m_compOffsetAtom; //!< Composition offset Atom (optional) + std::shared_ptr m_compToDecodeAtom;//!< Composition to decode Atom (optional) + UniquePtr m_sampGroupDescrAtom; //!< Pointer to the sample group description Atom + std::vector m_sampToGroupAtom; //!< Vectory of sample-to-group Atoms + bool m_hasSyncSampleAtom; //!< has Sync Sample Atom +}; + +VCD_MP4_END; +#endif /* _SAMPLETABLEATOM_H_ */ diff --git a/src/isolib/atoms/SampToChunkAtom.cpp b/src/isolib/atoms/SampToChunkAtom.cpp new file mode 100644 index 00000000..79e0a0a6 --- /dev/null +++ b/src/isolib/atoms/SampToChunkAtom.cpp @@ -0,0 +1,230 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampToChunkAtom.cpp +//! \brief: SampToChunkAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SampToChunkAtom.h" + + +#include +#include + +VCD_MP4_BEGIN + +SampleToChunkAtom::SampleToChunkAtom() + : FullAtom("stsc", 0, 0) + , m_runOfChunks() + , m_maxSampleNum(-1) +{ +} + +bool SampleToChunkAtom::GetSampleDescrIndex(std::uint32_t sampleIndex, std::uint32_t& sampleDescriptionIdx) const +{ + if (sampleIndex >= m_decodedEntries.size()) + { + return false; + } + + sampleDescriptionIdx = m_decodedEntries.at(sampleIndex).sampleDescrIndex; + return true; +} + +bool SampleToChunkAtom::GetSampleChunkIndex(std::uint32_t sampleIndex, std::uint32_t& chunkIdx) const +{ + if (sampleIndex >= m_decodedEntries.size()) + { + return false; + } + + chunkIdx = m_decodedEntries.at(sampleIndex).chunkIndex; + return true; +} + +void SampleToChunkAtom::SetSampleNumMaxSafety(int64_t maxSampleNum) +{ + m_maxSampleNum = maxSampleNum; +} + +void SampleToChunkAtom::AddChunkEntry(const ChunkEntry& chunkEntry) +{ + m_runOfChunks.push_back(chunkEntry); +} + +void SampleToChunkAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(static_cast(m_runOfChunks.size())); + for (const auto& run : m_runOfChunks) + { + str.Write32(run.firstChunk); + str.Write32(run.oneChunkSamples); + str.Write32(run.sampleDescrIndex); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void SampleToChunkAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + const uint32_t entryCount = str.Read32(); + for (uint32_t i = 0; i < entryCount; ++i) + { + ChunkEntry chunkEntry; + chunkEntry.firstChunk = str.Read32(); + chunkEntry.oneChunkSamples = str.Read32(); + + if (m_maxSampleNum != -1 && (chunkEntry.oneChunkSamples > m_maxSampleNum)) + { + ISO_LOG(LOG_ERROR, "SampleToChunkAtom::FromStreamAtom sampChunk is larger than total number of samples\n"); + throw Exception(); + } + + chunkEntry.sampleDescrIndex = str.Read32(); + m_runOfChunks.push_back(chunkEntry); + } +} + +uint32_t SampleToChunkAtom::GetSampleNumLowerBound(uint32_t pCount) const +{ + if (pCount == 0) + { + // nothing to do. + return 0; + } + + uint64_t sampleNum = 0; + + if (m_runOfChunks.at(0).firstChunk != 1) + { + ISO_LOG(LOG_ERROR, "SampleToChunkAtom first entry first_chunk != 1\n"); + throw Exception(); + } + + for (unsigned int pIndex = 0; pIndex < m_runOfChunks.size(); ++pIndex) + { + const uint32_t firstChunk = m_runOfChunks.at(pIndex).firstChunk; + const uint32_t sampChunk = m_runOfChunks.at(pIndex).oneChunkSamples; + + uint32_t pChunkRep = 1; + if ((pIndex + 1) < m_runOfChunks.size()) + { + if (m_runOfChunks.at(pIndex + 1).firstChunk <= firstChunk) + { + ISO_LOG(LOG_ERROR, "Invalid first_chunk value in SampleToChunkAtom entry. Must be greater than previous\n"); + throw Exception(); + } + + pChunkRep = m_runOfChunks.at(pIndex + 1).firstChunk - firstChunk; + } + else if (pIndex == m_runOfChunks.size() - 1) + { + // handle last entry. + pChunkRep = pCount - m_runOfChunks.at(pIndex).firstChunk + 1; + } + + sampleNum += uint64_t(pChunkRep) * sampChunk; + } + + if (sampleNum <= std::numeric_limits::max()) + { + return static_cast(sampleNum); + } + else + { + ISO_LOG(LOG_ERROR, "SampleToChunkAtom has >= 2^32 samples\n"); + throw Exception(); + } +} + +void SampleToChunkAtom::DecodeEntries(std::uint32_t pCount) +{ + m_decodedEntries.clear(); + + if (m_runOfChunks.size() == 0 || pCount == 0) + { + // nothing to do. + return; + } + + if (m_runOfChunks.at(0).firstChunk != 1) + { + ISO_LOG(LOG_ERROR, "SampleToChunkAtom first entry first_chunk != 1\n"); + throw Exception(); + } + + for (unsigned int pIndex = 0; pIndex < m_runOfChunks.size(); ++pIndex) + { + const uint32_t firstChunk = m_runOfChunks.at(pIndex).firstChunk; + const uint32_t sampChunk = m_runOfChunks.at(pIndex).oneChunkSamples; + const uint32_t sampDescrIndex = m_runOfChunks.at(pIndex).sampleDescrIndex; + + uint32_t pChunkRep = 1; + if ((pIndex + 1) < m_runOfChunks.size()) + { + if (m_runOfChunks.at(pIndex + 1).firstChunk <= firstChunk) + { + ISO_LOG(LOG_ERROR, "Invalid first_chunk value in SampleToChunkAtom entry. Must be greater than previous\n"); + throw Exception(); + } + + pChunkRep = m_runOfChunks.at(pIndex + 1).firstChunk - firstChunk; + } + else if (pIndex == m_runOfChunks.size() - 1) + { + // handle last entry. + pChunkRep = pCount - m_runOfChunks.at(pIndex).firstChunk + 1; + } + + if (m_maxSampleNum != -1 && std::uint64_t(sampChunk) * pChunkRep > std::uint64_t(m_maxSampleNum)) + { + ISO_LOG(LOG_ERROR, "SampleToChunkAtom::FromStreamAtom sampChunk is larger than total number of samples\n"); + throw Exception(); + } + + DecEntry entry; + entry.oneChunkSamples = sampChunk; + entry.sampleDescrIndex = sampDescrIndex; + for (unsigned int i = 0; i < pChunkRep; ++i) + { + entry.chunkIndex = firstChunk + i; + for (unsigned int k = 0; k < sampChunk; ++k) + { + m_decodedEntries.push_back(entry); + } + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/SampToChunkAtom.h b/src/isolib/atoms/SampToChunkAtom.h new file mode 100644 index 00000000..4f669333 --- /dev/null +++ b/src/isolib/atoms/SampToChunkAtom.h @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampToChunkAtom.h +//! \brief: SampToChunkAtom class. +//! \detail: 'stsc' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SAMPLETOCHUNKATOM_H_ +#define _SAMPLETOCHUNKATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SampleToChunkAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SampleToChunkAtom(); + + //! + //! \brief Destructor + //! + ~SampleToChunkAtom() = default; + + //! + //! \brief Get Sample Descr Index + //! + //! \param [in] std::uint32_t + //! samp Idx + //! \param [in] std::uint32_t& + //! sample Descr Idx + //! + //! \return bool + //! success or not + //! + bool GetSampleDescrIndex(std::uint32_t sampIdx, std::uint32_t& sampleDescrIdx) const; + + //! + //! \brief Get Sample Chunk Index + //! + //! \param [in] std::uint32_t + //! samp Idx + //! \param [in] std::uint32_t& + //! chunk Idx + //! + //! \return bool + //! success or not + //! + bool GetSampleChunkIndex(std::uint32_t sampIdx, std::uint32_t& chunkIdx) const; + + //! + //! \brief Set Sample Num Max Safety + //! + //! \param [in] int64_t + //! num value + //! + //! \return void + //! + void SetSampleNumMaxSafety(int64_t num); + + struct ChunkEntry //!< chunk entry + { + std::uint32_t firstChunk; + std::uint32_t oneChunkSamples; + std::uint32_t sampleDescrIndex; + }; + + //! + //! \brief Add Chunk Entry + //! + //! \param [in] const ChunkEntry& + //! Chunk Entry value + //! + //! \return void + //! + void AddChunkEntry(const ChunkEntry& chunkEntry); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + + //! + //! \brief Get Sample Num Lower Bound + //! + //! \param [in] uint32_t + //! chunk Entry Count + //! + //! \return uint32_t + //! Sample Num Lower Bound + //! + uint32_t GetSampleNumLowerBound(uint32_t chunkEntryCount) const; + + //! + //! \brief Decode Entries + //! + //! \param [in] uint32_t + //! chunk Entry Count + //! + //! \return void + //! + void DecodeEntries(std::uint32_t chunkEntryCount); + +private: + std::vector m_runOfChunks; //!< std::vector that contains the chunk entries + + struct DecEntry //!< decode entry parameter + { + std::uint32_t chunkIndex; + std::uint32_t oneChunkSamples; + std::uint32_t sampleDescrIndex; + }; + + std::vector m_decodedEntries; //!< decoded Entry array + int64_t m_maxSampleNum; //!< max Sample Num +}; + +VCD_MP4_END; +#endif /* _SAMPLETOCHUNKATOM_H_ */ diff --git a/src/isolib/atoms/SampToGroupAtom.cpp b/src/isolib/atoms/SampToGroupAtom.cpp new file mode 100644 index 00000000..38be3719 --- /dev/null +++ b/src/isolib/atoms/SampToGroupAtom.cpp @@ -0,0 +1,169 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampToGroupAtom.cpp +//! \brief: SampToGroupAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SampToGroupAtom.h" +#include +#include + +VCD_MP4_BEGIN + +SampleToGroupAtom::SampleToGroupAtom() + : FullAtom("sbgp", 0, 0) + , m_entryCount(0) + , m_groupTypeParameter(0) + , m_runOfSamples() +{ +} + +void SampleToGroupAtom::SetEntryCount(std::uint32_t entryCount) +{ + m_entryCount = entryCount; +} + +void SampleToGroupAtom::AddSampleRun(std::uint32_t sampleNum, std::uint32_t groupDescriptionIndex) +{ + SampleRun sampleRun; + sampleRun.sampleNum = sampleNum; + sampleRun.groupDescriptionIndex = groupDescriptionIndex; + + m_runOfSamples.push_back(sampleRun); + + SetEntryCount(static_cast(m_runOfSamples.size())); + UpdateInternalIndex(); +} + +std::uint32_t SampleToGroupAtom::GetSampleGroupDescriptionIndex(const std::uint32_t sampleIndex) const +{ + if (sampleIndex >= m_sampleToGroupIndex.size()) + { + return 0; + } + std::uint32_t ret = m_sampleToGroupIndex.at(sampleIndex); + return ret; +} + +std::uint32_t SampleToGroupAtom::GetSampleId(std::uint32_t groupDescriptionIndex) const +{ + for (unsigned int i = 0; i < m_sampleToGroupIndex.size(); ++i) + { + if (groupDescriptionIndex == m_sampleToGroupIndex.at(i)) + { + return i; + } + } + ISO_LOG(LOG_ERROR, "SampleToGroupAtom::GetSampleId: no entry for requested sample id\n"); + throw Exception(); +} + +unsigned int SampleToGroupAtom::GetNumberOfSamples() const +{ + unsigned int ret = static_cast(m_sampleToGroupIndex.size()); + return ret; +} + +void SampleToGroupAtom::UpdateInternalIndex() +{ + m_sampleToGroupIndex.clear(); + for (const auto& sampleRun : m_runOfSamples) + { + m_sampleToGroupIndex.insert(m_sampleToGroupIndex.end(), sampleRun.sampleNum, sampleRun.groupDescriptionIndex); + } +} + +void SampleToGroupAtom::ToStream(Stream& str) +{ + if (m_runOfSamples.size() == 0) + { + ISO_LOG(LOG_ERROR, "SampleToGroupAtom::ToStreamAtom: not writing an invalid Atom without entries\n"); + throw Exception(); + } + + // Write Atom headers + WriteFullAtomHeader(str); + + str.Write32(m_groupType.GetUInt32()); + + if (GetVersion() == 1) + { + str.Write32(m_groupTypeParameter); + } + + str.Write32(m_entryCount); + + for (auto entry : m_runOfSamples) + { + str.Write32(entry.sampleNum); + str.Write32(entry.groupDescriptionIndex); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void SampleToGroupAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_groupType = str.Read32(); + + if (GetVersion() == 1) + { + m_groupTypeParameter = str.Read32(); + } + + m_entryCount = str.Read32(); + if (m_entryCount == 0) + { + ISO_LOG(LOG_ERROR, "Read an empty SampleToGroupAtom without entries.\n"); + throw Exception(); + } + + uint64_t sampleNum = 0; + for (unsigned int i = 0; i < m_entryCount; ++i) + { + SampleRun sampleRun; + sampleRun.sampleNum = str.Read32(); + sampleNum += sampleRun.sampleNum; + if (sampleNum > std::numeric_limits::max()) + { + ISO_LOG(LOG_ERROR, "SampleToGroupAtom sampleNum >= 2^32\n"); + throw Exception(); + } + sampleRun.groupDescriptionIndex = str.Read32(); + m_runOfSamples.push_back(sampleRun); + } + + UpdateInternalIndex(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SampToGroupAtom.h b/src/isolib/atoms/SampToGroupAtom.h new file mode 100644 index 00000000..49848392 --- /dev/null +++ b/src/isolib/atoms/SampToGroupAtom.h @@ -0,0 +1,189 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SampToGroupAtom.h +//! \brief: SampToGroupAtom class. +//! \detail: 'sbgp' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SAMPLETOGROUPATOM_H_ +#define _SAMPLETOGROUPATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SampleToGroupAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SampleToGroupAtom(); + + //! + //! \brief Destructor + //! + virtual ~SampleToGroupAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Set and Get function for m_groupType member + //! + //! \param [in] FourCCInt + //! value to set + //! \param [in] m_groupType + //! m_groupType member in class + //! \param [in] GroupingType + //! m_groupType name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(FourCCInt, m_groupType, GroupingType, const); + + //! + //! \brief Set and Get function for m_groupTypeParameter member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_groupTypeParameter + //! m_groupTypeParameter member in class + //! \param [in] GroupingTypeParameter + //! m_groupTypeParameter name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_groupTypeParameter, GroupingTypeParameter, const); + + //! + //! \brief Set Entry Count + //! + //! \param [in] std::uint32_t + //! Entry Count value + //! + //! \return void + //! + void SetEntryCount(std::uint32_t count); + + //! + //! \brief Get Entry Count + //! + //! \return std::uint32_t + //! Entry Count + //! + std::uint32_t GetEntryCount() const; + + //! + //! \brief Add Sample Run + //! + //! \param [in] std::uint32_t + //! sampleNum + //! \param [in] std::uint32_t + //! index value + //! + //! \return void + //! + void AddSampleRun(std::uint32_t sampleNum, std::uint32_t idx); + + //! + //! \brief Get Sample Group Description Index + //! + //! \param [in] std::uint32_t + //! index value + //! + //! \return std::uint32_t + //! Sample Group Description Index + //! + std::uint32_t GetSampleGroupDescriptionIndex(std::uint32_t idx) const; + + //! + //! \brief Get Sample id + //! + //! \param [in] std::uint32_t + //! index value + //! + //! \return std::uint32_t + //! Sample id + //! + std::uint32_t GetSampleId(std::uint32_t idx) const; + + //! + //! \brief Get Number Of Samples + //! + //! \return unsigned int + //! Number Of Samples + //! + unsigned int GetNumberOfSamples() const; + +private: + FourCCInt m_groupType; //!< group Type + std::uint32_t m_entryCount; //!< entry Count + std::uint32_t m_groupTypeParameter; //!< group Type Parameter + struct SampleRun //!< Sample Run + { + std::uint32_t sampleNum; + std::uint32_t groupDescriptionIndex; + }; + std::vector m_runOfSamples; //!< run Of Samples + std::vector m_sampleToGroupIndex; //!< sample To Group Index + + //! + //! \brief Update Internal Index + //! + void UpdateInternalIndex(); +}; + +VCD_MP4_END; +#endif /* _SAMPLETOGROUPATOM_H_ */ diff --git a/src/isolib/atoms/SegIndexAtom.cpp b/src/isolib/atoms/SegIndexAtom.cpp new file mode 100644 index 00000000..25583cf9 --- /dev/null +++ b/src/isolib/atoms/SegIndexAtom.cpp @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SegIndexAtom.cpp +//! \brief: SegIndexAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SegIndexAtom.h" +#include +#include +#include + +VCD_MP4_BEGIN + +SegmentIndexAtom::SegmentIndexAtom(uint8_t version) + : FullAtom("sidx", version, 0) + , m_referenceID(0) + , m_timescale(0) + , m_earliestPresentationTime(0) + , m_firstOffset(0) + , m_references() + , m_reserveTotal(0) +{ +} + +void SegmentIndexAtom::SetSpaceReserve(size_t reserveTotal) +{ + m_reserveTotal = reserveTotal; +} + +void SegmentIndexAtom::AddReference(const SegmentIndexAtom::Reference& reference) +{ + m_references.push_back(reference); + assert(m_reserveTotal == 0 || m_references.size() < m_reserveTotal); +} + +std::vector SegmentIndexAtom::GetReferences() const +{ + return m_references; +} + +void SegmentIndexAtom::ToStream(Stream& str) +{ + const uint32_t referenceSize = 3 * 4; + const uint32_t reserveBytes = static_cast((m_reserveTotal - m_references.size()) * referenceSize); + + WriteFullAtomHeader(str); + str.Write32(m_referenceID); + str.Write32(m_timescale); + if (GetVersion() == 0) + { + str.Write32(static_cast(m_earliestPresentationTime)); + str.Write32(static_cast(m_firstOffset + reserveBytes)); + } + else if (GetVersion() == 1) + { + str.Write64(m_earliestPresentationTime); + str.Write64(m_firstOffset + reserveBytes); + } + else + { + ISO_LOG(LOG_ERROR, "SegmentIndexAtom::ToStream() supports only 'sidx' version 0 or 1\n"); + throw Exception(); + } + str.Write16(0); // reserved = 0 + str.Write16(static_cast(m_references.size())); // reference_count + + for (uint32_t i = 0; i < m_references.size(); i++) + { + str.Write1(m_references.at(i).referenceType ? uint64_t(1) : uint64_t(0), 1); // bit (1) reference_type + str.Write1(uint64_t(0) | m_references.at(i).referencedSize, 31); // unsigned int(31) referenced_size + str.Write32(m_references.at(i).subsegmentDuration); // unsigned int(32) subsegment_duration + str.Write1(m_references.at(i).startsWithSAP ? uint64_t(1) : uint64_t(0), 1); // bit (1) starts_with_SAP + str.Write1(uint64_t(0) | m_references.at(i).sapType, 3); // unsigned int(3) SAP_type + str.Write1(uint64_t(0) | m_references.at(i).sapDeltaTime, 28); // unsigned int(28) SAP_delta_time + } + + UpdateSize(str); + + if (m_reserveTotal != 0) + { + str.Write32(reserveBytes); + str.Write32(FourCCInt("free").GetUInt32()); + str.Write32(0); + + for (uint32_t i = 1; i < m_reserveTotal - m_references.size(); i++) + { + str.Write32(0); + str.Write32(0); + str.Write32(0); + } + } +} + +void SegmentIndexAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_referenceID = str.Read32(); + m_timescale = str.Read32(); + if (GetVersion() == 0) + { + m_earliestPresentationTime = str.Read32(); + m_firstOffset = str.Read32(); + } + else if (GetVersion() == 1) + { + m_earliestPresentationTime = str.Read64(); + m_firstOffset = str.Read64(); + } + else + { + ISO_LOG(LOG_ERROR, "SegmentIndexAtom::FromStream() supports only 'sidx' version 0 or 1\n"); + throw Exception(); + } + + str.Read16(); // reserved = 0 + uint16_t referenceCount = str.Read16(); // reference_count + m_references.clear(); + m_references.reserve(referenceCount); + for (uint16_t i = 0; i < referenceCount; i++) + { + Reference ref; + ref.referenceType = (str.Read1(1) != 0); // bit (1) reference_type + ref.referencedSize = str.Read1(31); // unsigned int(31) referenced_size + ref.subsegmentDuration = str.Read32(); // unsigned int(32) subsegment_duration + ref.startsWithSAP = (str.Read1(1) != 0); // bit (1) starts_with_SAP + ref.sapType = static_cast(str.Read1(3)); // unsigned int(3) SAP_type + ref.sapDeltaTime = str.Read1(28); // unsigned int(28) SAP_delta_time + m_references.push_back(ref); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SegIndexAtom.h b/src/isolib/atoms/SegIndexAtom.h new file mode 100644 index 00000000..019414c2 --- /dev/null +++ b/src/isolib/atoms/SegIndexAtom.h @@ -0,0 +1,190 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SegIndexAtom.h +//! \brief: Segment Index Atom class +//! \detail: 'sidx' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SEGMENTINDEXATOM_H_ +#define _SEGMENTINDEXATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SegmentIndexAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SegmentIndexAtom(uint8_t version = 0); + + //! + //! \brief Destructor + //! + virtual ~SegmentIndexAtom() = default; + + struct Reference //!< reference + { + bool referenceType; + uint32_t referencedSize; + uint32_t subsegmentDuration; + bool startsWithSAP; + uint8_t sapType; + uint32_t sapDeltaTime; + }; + + //! + //! \brief Set Space Reserve + //! + //! \param [in] size_t + //! reserve Total + //! + //! \return void + //! + void SetSpaceReserve(size_t reserveTotal); + + //! + //! \brief Set and Get function for m_referenceID member + //! + //! \param [in] uint32_t + //! value to set + //! \param [in] m_referenceID + //! m_referenceID member in class + //! \param [in] ReferenceId + //! m_referenceID name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_referenceID, ReferenceId, const); + + //! + //! \brief Set and Get function for m_timescale member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_timescale + //! m_timescale member in class + //! \param [in] Timescale + //! m_timescale name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_timescale, Timescale, const); + + //! + //! \brief Set and Get function for m_earliestPresentationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_earliestPresentationTime + //! m_earliestPresentationTime member in class + //! \param [in] EarliestPresentationTime + //! m_earliestPresentationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint64_t, m_earliestPresentationTime, EarliestPresentationTime, const); + + //! + //! \brief Set and Get function for m_firstOffset member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_firstOffset + //! m_firstOffset member in class + //! \param [in] FirstOffset + //! m_firstOffset name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint64_t, m_firstOffset, FirstOffset, const); + + //! + //! \brief Add Reference + //! + //! \param [in] const Reference& + //! reference + //! + //! \return void + //! + void AddReference(const Reference& reference); + + //! + //! \brief Get References + //! + //! \return std::vector + //! reference array + //! + std::vector GetReferences() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint32_t m_referenceID; //!< reference ID + uint32_t m_timescale; //!< time scale + uint64_t m_earliestPresentationTime;//!< earliest Presentation Time + uint64_t m_firstOffset; //!< first Offset + std::vector m_references;//!< Reference array + size_t m_reserveTotal; //!< reserve Total +}; + +VCD_MP4_END; +#endif /* end of include guard: SEGMENTINDEXATOM_HPP */ diff --git a/src/isolib/atoms/SoundMediaHeaderAtom.cpp b/src/isolib/atoms/SoundMediaHeaderAtom.cpp new file mode 100644 index 00000000..4c35811a --- /dev/null +++ b/src/isolib/atoms/SoundMediaHeaderAtom.cpp @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SoundMediaHeaderAtom.cpp +//! \brief: SoundMediaHeaderAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SoundMediaHeaderAtom.h" + +VCD_MP4_BEGIN + +SoundMediaHeaderAtom::SoundMediaHeaderAtom() + : FullAtom("smhd", 0, 0) + , m_balance(0) +{ +} + +void SoundMediaHeaderAtom::SetBalance(const std::uint16_t balance) +{ + m_balance = balance; +} + +std::uint16_t SoundMediaHeaderAtom::GetBalance() const +{ + return m_balance; +} + +void SoundMediaHeaderAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write16(m_balance); // Balance + str.Write16(0); // Reserved + + UpdateSize(str); +} + +void SoundMediaHeaderAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_balance = str.Read16(); // Balance + str.Read16(); // Reserved +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SoundMediaHeaderAtom.h b/src/isolib/atoms/SoundMediaHeaderAtom.h new file mode 100644 index 00000000..26ca736c --- /dev/null +++ b/src/isolib/atoms/SoundMediaHeaderAtom.h @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SoundMediaHeaderAtom.h +//! \brief: Sound Media Header Atom class +//! \detail: 'smhd' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SOUNDMEDIAHEADERATOM_H_ +#define _SOUNDMEDIAHEADERATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SoundMediaHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SoundMediaHeaderAtom(); + + //! + //! \brief Destructor + //! + virtual ~SoundMediaHeaderAtom() = default; + + //! + //! \brief Set balance + //! + //! \param [in] std::uint16_t + //! balance value + //! + //! \return void + //! + void SetBalance(std::uint16_t balance); + + //! + //! \brief Get balance + //! + //! \return std::uint16_t + //! balance + //! + std::uint16_t GetBalance() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::uint16_t m_balance; //!< balance +}; + +VCD_MP4_END; +#endif /* _SOUNDMEDIAHEADERATOM_H_ */ diff --git a/src/isolib/atoms/SphereRegionSampEntryAtom.cpp b/src/isolib/atoms/SphereRegionSampEntryAtom.cpp new file mode 100644 index 00000000..a8649d8d --- /dev/null +++ b/src/isolib/atoms/SphereRegionSampEntryAtom.cpp @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SphereRegionSampEntryAtom.cpp +//! \brief: SphereRegionSampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "SphereRegionSampEntryAtom.h" + +VCD_MP4_BEGIN + +SphereRegionConfigAtom::SphereRegionConfigAtom() + : FullAtom("rosc", 0, 0) + , m_numRegions(1) +{ + m_shapeType = ShapeMode::TwoAzimuthAndTwoElevationCircles; + m_dynamicRangeFlag = false; + m_staticAzimuthRange = 0; + m_staticElevationRange = 0; +} + +void SphereRegionConfigAtom::SetShapeMode(ShapeMode shapeType) +{ + m_shapeType = shapeType; +} + +SphereRegionConfigAtom::ShapeMode SphereRegionConfigAtom::GetShapeMode() +{ + return m_shapeType; +} + +void SphereRegionConfigAtom::SetDynamicRangeFlag(bool rangeFlag) +{ + m_dynamicRangeFlag = rangeFlag; +} + +bool SphereRegionConfigAtom::GetDynamicRangeFlag() +{ + return m_dynamicRangeFlag; +} + +void SphereRegionConfigAtom::SetStaticAzimuthRange(std::uint32_t range) +{ + m_staticAzimuthRange = range; +} + +std::uint32_t SphereRegionConfigAtom::GetStaticAzimuthRange() +{ + return m_staticAzimuthRange; +} + +void SphereRegionConfigAtom::SetStaticElevationRange(std::uint32_t range) +{ + m_staticElevationRange = range; +} + +std::uint32_t SphereRegionConfigAtom::GetStaticElevationRange() +{ + return m_staticElevationRange; +} + +void SphereRegionConfigAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write8((uint8_t) m_shapeType); + str.Write8(m_dynamicRangeFlag ? 0x1 : 0x0); + if (!m_dynamicRangeFlag) + { + str.Write32(m_staticAzimuthRange); + str.Write32(m_staticElevationRange); + } + str.Write8(m_numRegions); + + UpdateSize(str); +} + +void SphereRegionConfigAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_shapeType = (ShapeMode) str.Read8(); + m_dynamicRangeFlag = str.Read8() & 0x1; + if (!m_dynamicRangeFlag) + { + m_staticAzimuthRange = str.Read32(); + m_staticElevationRange = str.Read32(); + } + m_numRegions = str.Read8(); +} + +SphereRegionSampleEntryAtom::SphereRegionSampleEntryAtom(FourCCInt codingname) + : MetaDataSampleEntryAtom(codingname) +{ +} + +SphereRegionConfigAtom& SphereRegionSampleEntryAtom::GetSphereRegionConfig() +{ + return m_sphereRegionConfig; +} + +void SphereRegionSampleEntryAtom::ToStream(Stream& str) +{ + MetaDataSampleEntryAtom::ToStream(str); + + Stream subStream; + m_sphereRegionConfig.ToStream(subStream); + str.WriteStream(subStream); + + UpdateSize(str); +} + +void SphereRegionSampleEntryAtom::FromStream(Stream& str) +{ + MetaDataSampleEntryAtom::FromStream(str); + + FourCCInt AtomType; + auto sphrereRegionConfigAtomStream = str.ReadSubAtomStream(AtomType); + m_sphereRegionConfig.ToStream(sphrereRegionConfigAtomStream); +} + +VCD_MP4_END diff --git a/src/isolib/atoms/SphereRegionSampEntryAtom.h b/src/isolib/atoms/SphereRegionSampEntryAtom.h new file mode 100644 index 00000000..a4dd76df --- /dev/null +++ b/src/isolib/atoms/SphereRegionSampEntryAtom.h @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SphereRegionSampEntryAtom.h +//! \brief: OMAF SphereRegionSampleEntryAtom class. +//! \detail: Sphere Region Sample Entry Atom +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _SPHEREREGIONSAMPLEENTRYATOM_H_ +#define _SPHEREREGIONSAMPLEENTRYATOM_H_ + +#include "Stream.h" +#include "CommonTypes.h" +#include "FormAllocator.h" +#include "MetaDataSampEntryAtom.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SphereRegionConfigAtom : public FullAtom +{ +public: + enum class ShapeMode : std::uint8_t //!< Shape Mode + { + FourGreatCircles = 0, + TwoAzimuthAndTwoElevationCircles = 1 + }; + + //! + //! \brief Constructor + //! + SphereRegionConfigAtom(); + + //! + //! \brief Destructor + //! + virtual ~SphereRegionConfigAtom() = default; + + //! + //! \brief Set Shape Mode + //! + //! \param [in] ShapeMode + //! Shape Mode + //! + //! \return void + //! + void SetShapeMode(ShapeMode shapeType); + + //! + //! \brief Get Shape Mode + //! + //! \return ShapeMode + //! Shape Mode + //! + ShapeMode GetShapeMode(); + + //! + //! \brief Set Dynamic Range Flag + //! + //! \param [in] bool + //! shape Type + //! + //! \return void + //! + void SetDynamicRangeFlag(bool shapeType); + + //! + //! \brief Get Dynamic Range Flag + //! + //! \return bool + //! Dynamic Range Flag + //! + bool GetDynamicRangeFlag(); + + //! + //! \brief Set Static Azimuth Range + //! + //! \param [in] std::uint32_t + //! range + //! + //! \return void + //! + void SetStaticAzimuthRange(std::uint32_t range); + + //! + //! \brief Get Static Azimuth Range + //! + //! \return std::uint32_t + //! range + //! + std::uint32_t GetStaticAzimuthRange(); + + //! + //! \brief Set Static Elevation Range + //! + //! \param [in] std::uint32_t + //! range + //! + //! \return void + //! + void SetStaticElevationRange(std::uint32_t range); + + //! + //! \brief Get Static Elevation Range + //! + //! \return std::uint32_t + //! range + //! + std::uint32_t GetStaticElevationRange(); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + +private: + ShapeMode m_shapeType; //!< Shape Mode + bool m_dynamicRangeFlag; //!< dynamic Range Flag + std::uint32_t m_staticAzimuthRange; //!< static Azimuth Range + std::uint32_t m_staticElevationRange; //!< static Elevation Range + std::uint8_t m_numRegions; //!< num of Regions +}; + +class SphereRegionSampleEntryAtom : public MetaDataSampleEntryAtom +{ +public: + struct SphereRegionSample //!< Sphere Region Sample + { + std::vector regions; + }; + + //! + //! \brief Constructor + //! + SphereRegionSampleEntryAtom(FourCCInt codingname); + + //! + //! \brief Destructor + //! + virtual ~SphereRegionSampleEntryAtom() = default; + + //! + //! \brief Get Sphere Region Config Atom + //! + //! \return SphereRegionConfigAtom& + //! Sphere Region Config Atom + //! + SphereRegionConfigAtom& GetSphereRegionConfig(); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + +private: + SphereRegionConfigAtom m_sphereRegionConfig; //!< Sphere Region Config Atom +}; + +VCD_MP4_END; +#endif /* _SPHEREREGIONSAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/atoms/Stream.cpp b/src/isolib/atoms/Stream.cpp new file mode 100644 index 00000000..f20adfbe --- /dev/null +++ b/src/isolib/atoms/Stream.cpp @@ -0,0 +1,565 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Stream.cpp +//! \brief: Stream implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "Stream.h" +#include +#include +#include + + +VCD_MP4_BEGIN + +Stream::Stream() + : m_storage() + , m_currByte(0) + , m_byteOffset(0) + , m_bitOffset(0) + , m_storageAllocated(true) +{ +} + +Stream::Stream(const std::vector& strData) + : m_storage(strData) + , m_currByte(0) + , m_byteOffset(0) + , m_bitOffset(0) + , m_storageAllocated(false) +{ +} + +Stream::Stream(Stream&& other) + : m_storage(std::move(other.m_storage)) + , m_currByte(other.m_currByte) + , m_byteOffset(other.m_byteOffset) + , m_bitOffset(other.m_bitOffset) + , m_storageAllocated(other.m_storageAllocated) +{ + other.m_currByte = {}; + other.m_byteOffset = {}; + other.m_bitOffset = {}; + other.m_storageAllocated = {}; + other.m_storage.clear(); +} + +Stream& Stream::operator=(Stream&& other) +{ + m_currByte = other.m_currByte; + m_byteOffset = other.m_byteOffset; + m_bitOffset = other.m_bitOffset; + m_storageAllocated = other.m_storageAllocated; + m_storage = std::move(other.m_storage); + return *this; +} + +Stream::~Stream() +{ + if (m_storageAllocated == true) + { + m_storage.clear(); + } +} + +bool Stream::IsByteAligned() const +{ + return m_bitOffset ? false : true; +} + +std::uint64_t Stream::GetSize() const +{ + std::uint64_t size = m_storage.size(); + return size; +} + +void Stream::SetSize(const std::uint64_t newSize) +{ + m_storage.resize(newSize); +} + +const std::vector& Stream::GetStorage() const +{ + return m_storage; +} + +void Stream::Reset() +{ + m_currByte = 0; + m_bitOffset = 0; + m_byteOffset = 0; +} + +void Stream::Clear() +{ + m_storage.clear(); +} + +void Stream::SkipBytes(const std::uint64_t x) +{ + m_byteOffset += x; +} + +void Stream::SetByte(const std::uint64_t offset, const std::uint8_t byte) +{ + m_storage.at(offset) = byte; +} + +std::uint8_t Stream::GetByte(const std::uint64_t offset) const +{ + std::uint8_t ret = m_storage.at(offset); + return ret; +} + +std::uint64_t Stream::ReadAtomHeaders(FourCCInt& type) +{ + auto size = Read32(); + type = Read32(); + return size == 1 ? Read64() : size; +} + +std::uint64_t Stream::BytesRemain() const +{ + return m_storage.size() - m_byteOffset; +} +void Stream::Extract(const std::uint64_t begin, const std::uint64_t end, Stream& dest) const +{ + dest.Clear(); + dest.Reset(); + if (begin <= m_storage.size() && end <= m_storage.size() && begin <= end) + { + dest.m_storage.insert(dest.m_storage.begin(), m_storage.begin() + static_cast(begin), + m_storage.begin() + static_cast(end)); + } + else + { + ISO_LOG(LOG_ERROR, "ReadSubAtomStream trying to Read 0 size Atom\n"); + throw Exception(); + } +} + +void Stream::WriteStream(const Stream& str) +{ + m_storage.insert(m_storage.end(), str.m_storage.begin(), str.m_storage.end()); +} + + +void Stream::Write8(const std::uint8_t bits) +{ + m_storage.push_back(bits); +} + +void Stream::Write16(const std::uint16_t bits) +{ + for (int i=8;i>=0;) + { + m_storage.push_back(static_cast((bits >> i) & 0xff)); + i -= 8; + } +} + +void Stream::Write24(const std::uint32_t bits) +{ + for (int i=16;i>=0;) + { + m_storage.push_back(static_cast((bits >> i) & 0xff)); + i -= 8; + } +} + +void Stream::Write32(const std::uint32_t bits) +{ + for (int i=24;i>=0;) + { + m_storage.push_back(static_cast((bits >> i) & 0xff)); + i -= 8; + } +} + +void Stream::Write64(const std::uint64_t bits) +{ + for (int i=56;i>=0;) + { + m_storage.push_back(static_cast((bits >> i) & 0xff)); + i -= 8; + } +} + +void Stream::WriteArray(const std::vector& bits, + const std::uint64_t len, + const std::uint64_t srcOffset) +{ + // if len was not given, add everything until end of the vector + auto copyLen = len == UINT64_MAX ? (bits.size() - srcOffset) : len; + + m_storage.insert(m_storage.end(), bits.begin() + static_cast(srcOffset), + bits.begin() + static_cast(srcOffset + copyLen)); +} + +void Stream::Write1(std::uint64_t bits, std::uint32_t len) +{ + if (len == 0) + { + ISO_LOG(LOG_WARNING, "Stream::Write1 called for zero-length bit sequence.\n"); + } + else + { + do + { + const unsigned int pLeftByte = 8 - m_bitOffset; + if (pLeftByte > len) + { + m_currByte = + m_currByte | + (static_cast((bits & (std::numeric_limits::max() >> (64 - len))) + << (pLeftByte - len))); + m_bitOffset += len; + len = 0; + } + else + { + m_currByte = m_currByte | (static_cast((bits >> (len - pLeftByte)) & + ~((std::numeric_limits::max() + << (64 - pLeftByte))))); + m_storage.push_back((uint8_t) m_currByte); + m_bitOffset = 0; + m_currByte = 0; + len -= pLeftByte; + } + } while (len > 0); + } +} + +void Stream::WriteString(const std::string& srcString) +{ + if (srcString.length() == 0) + { + ISO_LOG(LOG_WARNING, "Stream::WriteString called for zero-length string.\n"); + } + + for (const auto character : srcString) + { + m_storage.push_back(static_cast(character)); + } +} + +void Stream::WriteZeroEndString(const std::string& srcString) +{ + for (const auto character : srcString) + { + m_storage.push_back(static_cast(character)); + } + m_storage.push_back('\0'); +} + +void Stream::WriteFloat32(float value) +{ + std::uint32_t convertedValue = (std::uint32_t)value; + Write32(convertedValue); + //Write32(*((std::uint32_t*)(&value))); +} + +void Stream::WriteHeaders(FourCCInt type, std::uint64_t AtomPayloadSize) +{ + bool over32BitSize = AtomPayloadSize > (UINT32_MAX - 8); + Write32(over32BitSize ? 1 : (uint32_t) AtomPayloadSize + 8); + Write32(type.GetUInt32()); + if (over32BitSize) + { + Write64(AtomPayloadSize + 16); + } +} + +float Stream::ReadFloat32() +{ + std::uint32_t value = Read32(); + float convertedValue = (float)value; + return convertedValue; + //return *(float*) &value; +} + +std::uint8_t Stream::Read8() +{ + const std::uint8_t ret = m_storage.at(m_byteOffset); + ++m_byteOffset; + return ret; +} + +std::uint16_t Stream::Read16() +{ + std::uint16_t ret = m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + return ret; +} + +std::uint32_t Stream::Read24() +{ + unsigned int ret = m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + return ret; +} + +std::uint32_t Stream::Read32() +{ + unsigned int ret = m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + return ret; +} + +std::uint64_t Stream::Read64() +{ + unsigned long long int ret = m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + ret = (ret << 8) | m_storage.at(m_byteOffset); + m_byteOffset++; + + return ret; +} + +void Stream::ReadArray(std::vector& bits, const std::uint64_t len) +{ + if (static_cast(m_byteOffset + len) <= m_storage.size()) + { + bits.insert(bits.end(), m_storage.begin() + static_cast(m_byteOffset), + m_storage.begin() + static_cast(m_byteOffset + len)); + m_byteOffset += len; + } + else + { + ISO_LOG(LOG_ERROR, "ReadArray trying to Read outside of m_storage\n"); + throw Exception(); + } +} + +void Stream::ReadByteArrayToBuffer(char* buffer, const std::uint64_t len) +{ + if (static_cast(m_byteOffset + len) <= m_storage.size()) + { + std::memcpy(buffer, m_storage.data() + m_byteOffset, len); + m_byteOffset += len; + } + else + { + ISO_LOG(LOG_ERROR, "ReadArray trying to Read outside of m_storage\n"); + throw Exception(); + } +} + +std::uint32_t Stream::Read1(const std::uint32_t len) +{ + std::uint32_t retBits = 0; + std::uint32_t pLeftByte = 8 - m_bitOffset; + + if (len == 0) + { + return 0; + } + + if (pLeftByte >= len) + { + retBits = (unsigned int) ((m_storage).at(m_byteOffset) >> (pLeftByte - len)) & + (unsigned int) ((1 << len) - 1); + m_bitOffset += (unsigned int) len; + } + else + { + std::uint32_t pBitsGo = len - pLeftByte; + retBits = (m_storage).at(m_byteOffset) & (((unsigned int) 1 << pLeftByte) - 1); + m_byteOffset++; + m_bitOffset = 0; + while (pBitsGo > 0) + { + if (pBitsGo >= 8) + { + retBits = (retBits << 8) | (m_storage).at(m_byteOffset); + m_byteOffset++; + pBitsGo -= 8; + } + else + { + retBits = (retBits << pBitsGo) | + ((unsigned int) ((m_storage).at(m_byteOffset) >> (8 - pBitsGo)) & + (((unsigned int) 1 << pBitsGo) - 1)); + m_bitOffset += (unsigned int) (pBitsGo); + pBitsGo = 0; + } + } + } + + if (m_bitOffset == 8) + { + m_byteOffset++; + m_bitOffset = 0; + } + + return retBits; +} + +void Stream::ReadStringWithLen(std::string& pDst, const std::uint32_t len) +{ + pDst.clear(); + for (std::uint32_t i = 0; i < len; i++) + { + std::uint8_t pCurr = Read8(); + pDst += static_cast(pCurr); + } +} + +void Stream::ReadStringWithPosAndLen(std::string& pDst, const std::uint64_t pos, const std::uint32_t len) +{ + pDst.clear(); + for (std::uint32_t i = 0; i < len; i++) + { + std::uint8_t pCurr = GetByte(pos + i); + pDst += static_cast(pCurr); + } +} + +void Stream::ReadZeroEndString(std::string& pDst) +{ + std::uint8_t pCurr = 0xff; + pDst.clear(); + + while (m_byteOffset < m_storage.size()) + { + pCurr = Read8(); + if ((char) pCurr != '\0') + { + pDst += static_cast(pCurr); + } + else + { + break; + } + } +} + +uint32_t Stream::ReadExpGolombCode() +{ + std::int32_t pLeadZeros = -1; + std::uint32_t codeNum; + std::uint32_t tmpBit = 0; + + while (tmpBit == 0) + { + tmpBit = Read1(1); + pLeadZeros++; + } + + std::uint32_t shiftAmount = static_cast(pLeadZeros); + codeNum = ((std::uint32_t) 1 << shiftAmount) - 1 + Read1(shiftAmount); + return codeNum; +} + +int32_t Stream::ReadSignedExpGolombCode() +{ + unsigned int codeNum = ReadExpGolombCode(); + int signedVal = int((codeNum + 1) >> 1); + + if ((codeNum & 1) == 0) + { + signedVal = -signedVal; + } + + return signedVal; +} + +Stream Stream::ReadSubAtomStream(FourCCInt& pType) +{ + std::uint64_t pSize = Read32(); + + pType = Read32(); + + std::uint64_t minAtomSize = 8; + + if (pSize == 1) // Check if 'largesize' field is used + { + pSize = Read64(); + minAtomSize += 4; + m_byteOffset -= 8; + } + + m_byteOffset -= 8; + + if (pSize < minAtomSize) + { + ISO_LOG(LOG_ERROR, "Stream::ReadSubAtomStream trying to Read too small Atom\n"); + throw Exception(); + } + + Stream subBitstr; + Extract(GetPos(), GetPos() + pSize, subBitstr); + m_byteOffset += pSize; + + return subBitstr; +} + +void Stream::ReadUUID(std::vector& uuid) +{ + std::uint64_t initialOffset = GetPos(); + std::uint64_t pSize = Read32(); + FourCCInt pType = Read32(); + if (pSize == 1) // Check if 'largesize' field is used + { + pSize = Read64(); + } + if (pType != "uuid") + { + return; + } + + ReadArray(uuid, 16); + SetPos(initialOffset); +} + +VCD_MP4_END diff --git a/src/isolib/atoms/Stream.h b/src/isolib/atoms/Stream.h new file mode 100644 index 00000000..c015f884 --- /dev/null +++ b/src/isolib/atoms/Stream.h @@ -0,0 +1,485 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Stream.h +//! \brief: Stream class +//! \detail: Stream read and write operation +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef BITSTREAM_H +#define BITSTREAM_H + +#include +#include "FormAllocator.h" +#include "../include/Common.h" +#include "FourCCInt.h" + +VCD_MP4_BEGIN + +class Stream +{ +public: + + //! + //! \brief Constructor + //! + Stream(); + Stream(const std::vector& strData); + Stream(const Stream&) = default; + Stream& operator=(const Stream&) = default; + Stream(Stream&&); + Stream& operator=(Stream&&); + + //! + //! \brief Destructor + //! + ~Stream(); + + //! + //! \brief Set and Get function for m_byteOffset member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_byteOffset + //! m_byteOffset member in class + //! \param [in] Pos + //! m_byteOffset name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint64_t, m_byteOffset, Pos, const); + + //! + //! \brief Get Size + //! + //! \return std::uint64_t + //! Size + //! + std::uint64_t GetSize() const; + + //! + //! \brief Set Size + //! + //! \param [in] std::uint64_t + //! Size value + //! + //! \return void + //! + void SetSize(std::uint64_t newSize); + + //! + //! \brief Get Storage + //! + //! \return const std::vector& + //! Storage + //! + const std::vector& GetStorage() const; + + //! + //! \brief Reset function + //! + void Reset(); + + //! + //! \brief Clear function + //! + void Clear(); + + //! + //! \brief Skip Bytes + //! + //! \param [in] std::uint64_t + //! offset + //! + //! \return void + //! + void SkipBytes(std::uint64_t x); + + //! + //! \brief Set Byte + //! + //! \param [in] std::uint64_t + //! offset + //! \param [in] std::uint8_t + //! byte + //! + //! \return void + //! + void SetByte(std::uint64_t offset, std::uint8_t byte); + + //! + //! \brief Get Storage + //! + //! \param [in] std::uint64_t + //! offset + //! + //! \return std::uint8_t + //! byte data + //! + std::uint8_t GetByte(std::uint64_t offset) const; + + //! + //! \brief Write 1 bit + //! + //! \param [in] std::uint64_t + //! bits + //! \param std::uint32_t + //! length + //! + //! \return void + //! + void Write1(std::uint64_t bits, std::uint32_t len); + + //! + //! \brief Write 8 bit + //! + //! \param [in] std::uint64_t + //! bits + //! + //! \return std::uint32_t + //! length + //! + void Write8(std::uint8_t bits); + + //! + //! \brief Write Stream + //! + //! \param [in,out] const Stream& str + //! bitstream + //! + //! \return void + //! + void WriteStream(const Stream& str); + + //! + //! \brief Write 16 bit + //! + //! \param [in] std::uint16_t + //! bits + //! + //! \return void + //! + void Write16(std::uint16_t bits); + + //! + //! \brief Write 24 bit + //! + //! \param [in] std::uint32_t + //! bits + //! + //! \return void + //! + void Write24(std::uint32_t bits); + + //! + //! \brief Write 32 bit + //! + //! \param [in] std::uint32_t + //! bits + //! + //! \return void + //! + void Write32(std::uint32_t bits); + + //! + //! \brief Write 64 bit + //! + //! \param [in] std::uint64_t + //! bits + //! + //! \return void + //! + void Write64(std::uint64_t bits); + + //! + //! \brief Write array 8bit + //! + //! \param [in,out] const std::vector& + //! bits + //! \param [in] std::uint64_t + //! len + //! \param [in] std::uint64_t + //! source Offset + //! + //! \return void + //! + void WriteArray(const std::vector& bits, + std::uint64_t len = UINT64_MAX, + std::uint64_t srcOffset = 0); + + //! + //! \brief Write String + //! + //! \param [in,out] const std::string& srcString + //! source string + //! + //! \return void + //! + void WriteString(const std::string& srcString); + + //! + //! \brief Write Zero End String + //! + //! \param [in,out] const std::string& srcString + //! source string + //! + //! \return void + //! + void WriteZeroEndString(const std::string& srcString); + + //! + //! \brief Write 32bits Float + //! + //! \param [in] float + //! value + //! + //! \return void + //! + void WriteFloat32(float value); + + //! + //! \brief Write headers + //! + //! \param [in] FourCCInt + //! type + //! \param [in] std::uint64_t + //! Atom Pay load Size + //! + //! \return void + //! + void WriteHeaders(FourCCInt type, std::uint64_t AtomPayloadSize); + + //! + //! \brief Read 1 bit + //! + //! \param [in] const std::uint32_t + //! len + //! + //! \return std::uint32_t + //! bits size offset + //! + std::uint32_t Read1(const std::uint32_t len); + + //! + //! \brief Read 8 bit + //! + //! \return std::uint8_t + //! bits size offset + //! + std::uint8_t Read8(); + + //! + //! \brief Read 16 bit + //! + //! \return std::uint16_t + //! bits size offset + //! + std::uint16_t Read16(); + + //! + //! \brief Read 24 bit + //! + //! \return std::uint24_t + //! bits size offset + //! + std::uint32_t Read24(); + + //! + //! \brief Read 32 bit + //! + //! \return std::uint32_t + //! bits size offset + //! + std::uint32_t Read32(); + + //! + //! \brief Read 64 bit + //! + //! \return std::uint64_t + //! bits size offset + //! + std::uint64_t Read64(); + + //! + //! \brief Read array 8bit + //! + //! \param [in] std::vector& bits + //! bits + //! \param [in] std::uint64_t + //! len + //! + //! \return void + //! + void ReadArray(std::vector& bits, std::uint64_t len); + + //! + //! \brief Read Byte Array To Buffer + //! + //! \param [in] char* + //! buffer + //! \param [in] std::uint64_t + //! len + //! + //! \return void + //! + void ReadByteArrayToBuffer(char* buffer, std::uint64_t len); + + //! + //! \brief Read String With Len + //! + //! \param [in] std::string& + //! dst String + //! \param [in] std::uint32_t + //! len + //! + //! \return void + //! + void ReadStringWithLen(std::string& dstString, std::uint32_t len); + + //! + //! \brief Read String With Pos And Len + //! + //! \param [in] std::string& + //! dst String + //! \param [in] std::uint64_t + //! pos + //! \param [in] std::uint32_t + //! len + //! + //! \return void + //! + void ReadStringWithPosAndLen(std::string& dstString, std::uint64_t pos, std::uint32_t len); + + //! + //! \brief Read Zero End String + //! + //! \param [in] std::string& + //! dst String + //! + //! \return void + //! + void ReadZeroEndString(std::string& dstString); + + //! + //! \brief Read 32bits Float + //! + //! \return float + //! float data + //! + float ReadFloat32(); + + //! + //! \brief Read Exp Golomb Code + //! + //! \return uint32_t + //! code number + //! + uint32_t ReadExpGolombCode(); + + //! + //! \brief Read Signed Exp Golomb Code + //! + //! \return int32_t + //! code number + //! + int32_t ReadSignedExpGolombCode(); + + //! + //! \brief Read Sub Atom Stream + //! + //! \param [in] FourCCInt& + //! Atom Type + //! + //! \return Stream + //! sub Bitstream + //! + Stream ReadSubAtomStream(FourCCInt& AtomType); + + //! + //! \brief Read UUID + //! + //! \param [in] std::vector& + //! uuid + //! + //! \return void + //! + void ReadUUID(std::vector& uuid); + + //! + //! \brief Read Atom Headers + //! + //! \param [in] FourCCInt& + //! Atom Type + //! + //! \return std::uint64_t + //! size + //! + std::uint64_t ReadAtomHeaders(FourCCInt& type); + + //! + //! \brief Bytes Remain to process + //! + //! \return std::uint64_t + //! storage size + //! + std::uint64_t BytesRemain() const; + + //! + //! \brief Read String With Pos And Len + //! + //! \param [in] std::uint64_t + //! begin pos + //! \param [in] std::uint64_t + //! end pos + //! \param [in] Stream& + //! dest stream + //! + //! \return void + //! + void Extract(std::uint64_t begin, std::uint64_t end, Stream& dest) const; + + //! + //! \brief Is Byte Aligned or not + //! + //! \return bool + //! Is Byte Aligned or not + //! + bool IsByteAligned() const; + +private: + std::vector m_storage; //!< storage + unsigned int m_currByte; //!< current byte postion + std::uint64_t m_byteOffset; //!< byte offset + unsigned int m_bitOffset; //!< bit offset + bool m_storageAllocated; //!< is storage Allocated successfully +}; + +VCD_MP4_END; +#endif /* BITSTREAM_H */ diff --git a/src/isolib/atoms/SyncSampAtom.cpp b/src/isolib/atoms/SyncSampAtom.cpp new file mode 100644 index 00000000..f16d8488 --- /dev/null +++ b/src/isolib/atoms/SyncSampAtom.cpp @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SyncSampAtom.cpp +//! \brief: SyncSampAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "SyncSampAtom.h" + +VCD_MP4_BEGIN + +SyncSampleAtom::SyncSampleAtom() + : FullAtom("stss", 0, 0) + , m_sampleNumber() + , m_sampleNumMax(-1) +{ +} + +void SyncSampleAtom::AddSample(std::uint32_t sampleNumber) +{ + m_sampleNumber.push_back(sampleNumber); +} + +const std::vector SyncSampleAtom::GetSyncSampleIds() const +{ + return m_sampleNumber; +} + +void SyncSampleAtom::SetSampleNumMaxSafety(int64_t sampleNumMax) +{ + m_sampleNumMax = sampleNumMax; +} + +void SyncSampleAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(static_cast(m_sampleNumber.size())); + + for (auto sampleNumber : m_sampleNumber) + { + str.Write32(sampleNumber); + } + + // Update the size + UpdateSize(str); +} + +void SyncSampleAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + std::uint32_t entryCount = str.Read32(); + + if (m_sampleNumMax != -1 && (entryCount > m_sampleNumMax)) + { + ISO_LOG(LOG_ERROR, "FromStreamAtom entryCount is larger than total number of samples\n"); + throw Exception(); + } + + for (std::uint32_t i = 0; i < entryCount; ++i) + { + m_sampleNumber.push_back(str.Read32()); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/SyncSampAtom.h b/src/isolib/atoms/SyncSampAtom.h new file mode 100644 index 00000000..e75560b7 --- /dev/null +++ b/src/isolib/atoms/SyncSampAtom.h @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SyncSampAtom.h +//! \brief: SyncSampAtom class. +//! \detail: 'stss' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _SYNCSAMPLEATOM_H_ +#define _SYNCSAMPLEATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class SyncSampleAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + SyncSampleAtom(); + + //! + //! \brief Destructor + //! + virtual ~SyncSampleAtom() = default; + + //! + //! \brief Add Sample function + //! + //! \param [in] std::uint32_t + //! sample Number + //! + //! \return void + //! + void AddSample(std::uint32_t sampleNumber); + + //! + //! \brief Get Sync Sample Ids + //! + //! \return const std::vector + //! Sync Sample Ids + //! + const std::vector GetSyncSampleIds() const; + + //! + //! \brief Set Sample Num Max Safety + //! + //! \param [in] int64_t + //! max sample Num + //! + //! \return void + //! + void SetSampleNumMaxSafety(int64_t sampleNumMax); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector m_sampleNumber; //!< std::vector of sync sample Ids + int64_t m_sampleNumMax; //!< max sample num +}; + +VCD_MP4_END; +#endif /* _SYNCSAMPLEATOM_H_ */ diff --git a/src/isolib/atoms/TimeToSampAtom.cpp b/src/isolib/atoms/TimeToSampAtom.cpp new file mode 100644 index 00000000..2cbbbf31 --- /dev/null +++ b/src/isolib/atoms/TimeToSampAtom.cpp @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TimeToSampAtom.cpp +//! \brief: TimeToSampAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "TimeToSampAtom.h" + + +#include +#include + +VCD_MP4_BEGIN + +TimeToSampleAtom::TimeToSampleAtom() + : FullAtom("stts", 0, 0) +{ +} + +std::vector TimeToSampleAtom::GetSampleTimes() const +{ + std::vector pTimes; + uint32_t time = 0; + for (const auto& entry : m_entryVersion0) + { + for (unsigned int i = 0; i < entry.m_sampleNum; ++i) + { + pTimes.push_back(time); + time += entry.m_sampleDelta; + } + } + + return pTimes; +} + +std::vector TimeToSampleAtom::GetSampleDeltas() const +{ + std::vector pDeltas; + + if (m_entryVersion0.size()) + { + pDeltas.reserve(m_entryVersion0.at(0).m_sampleNum); + for (const auto& entry : m_entryVersion0) + { + for (unsigned int i = 0; i < entry.m_sampleNum; ++i) + { + pDeltas.push_back(entry.m_sampleDelta); + } + } + } + + return pDeltas; +} + +std::uint32_t TimeToSampleAtom::GetSampleNum() const +{ + std::uint64_t sampleNum = 0; + + if (m_entryVersion0.size()) + { + for (const auto& entry : m_entryVersion0) + { + sampleNum += entry.m_sampleNum; + if (sampleNum > std::numeric_limits::max()) + { + ISO_LOG(LOG_ERROR, "TimeToSampleAtom::sampleNum >= 2^32\n"); + throw Exception(); + } + } + } + std::uint32_t ret = std::uint32_t(sampleNum); + return ret; +} + +TimeToSampleAtom::EntryVersion0& TimeToSampleAtom::GetDecodeDeltaEntry() +{ + m_entryVersion0.resize(m_entryVersion0.size() + 1); + EntryVersion0& ret = m_entryVersion0.back(); + return ret; +} + +void TimeToSampleAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(static_cast(m_entryVersion0.size())); + for (auto entry : m_entryVersion0) + { + str.Write32(entry.m_sampleNum); + str.Write32(entry.m_sampleDelta); + } + + UpdateSize(str); +} + +void TimeToSampleAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + + std::uint32_t pCnt = str.Read32(); + for (uint32_t i = 0; i < pCnt; ++i) + { + EntryVersion0 pVersion; + pVersion.m_sampleNum = str.Read32(); + pVersion.m_sampleDelta = str.Read32(); + m_entryVersion0.push_back(pVersion); + } +} + +void TimeToSampleAtom::AddSampleDelta(std::uint32_t delta) +{ + if (!m_entryVersion0.size() || delta != m_entryVersion0.back().m_sampleDelta) + { + m_entryVersion0.push_back({1, delta}); + } + else + { + ++m_entryVersion0.back().m_sampleNum; + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TimeToSampAtom.h b/src/isolib/atoms/TimeToSampAtom.h new file mode 100644 index 00000000..4ad0d81f --- /dev/null +++ b/src/isolib/atoms/TimeToSampAtom.h @@ -0,0 +1,131 @@ + +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TimeToSampAtom.h +//! \brief: TimeToSampAtom Atom class +//! \detail: 'stts' Atom +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef TIMETOSAMPLEATOM_H +#define TIMETOSAMPLEATOM_H + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class TimeToSampleAtom : public FullAtom +{ +public: + + struct EntryVersion0 //!< Entry Version v0 + { + std::uint32_t m_sampleNum; + std::uint32_t m_sampleDelta; + }; + + //! + //! \brief Constructor + //! + TimeToSampleAtom(); + + //! + //! \brief Destructor + //! + virtual ~TimeToSampleAtom() = default; + + //! + //! \brief Get Sample Times + //! + //! \return std::vector + //! Sample Time array + //! + std::vector GetSampleTimes() const; + + //! + //! \brief Get Sample Deltas + //! + //! \return std::vector + //! Sample Time Deltas + //! + std::vector GetSampleDeltas() const; + + //! + //! \brief Get Sample number + //! + //! \return std::uint32_t + //! Sample number + //! + std::uint32_t GetSampleNum() const; + + //! + //! \brief Get Decode Delta Entry + //! + //! \return EntryVersion0& + //! Decode Delta Entry + //! + EntryVersion0& GetDecodeDeltaEntry(); + + //! + //! \brief Add Sample Delta + //! + //! \param [in] std::uint32_t + //! sample Delta + //! + //! \return void + //! + void AddSampleDelta(std::uint32_t m_sampleDelta); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector m_entryVersion0; //!< Entry Version v0 +}; + +VCD_MP4_END; +#endif /* TIMETOSAMPLEATOM_H */ diff --git a/src/isolib/atoms/TrackAtom.cpp b/src/isolib/atoms/TrackAtom.cpp new file mode 100644 index 00000000..c9c54ca8 --- /dev/null +++ b/src/isolib/atoms/TrackAtom.cpp @@ -0,0 +1,251 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackAtom.cpp +//! \brief: TrackAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include "TrackAtom.h" +#include "BasicVideoAtom.h" + +VCD_MP4_BEGIN + +TrackAtom::TrackAtom() + : Atom("trak") + , m_trackHeaderAtom() + , m_mediaAtom() + , m_trackRefAtom() + , m_hasTrackRef(false) + , m_trackGroupAtom() + , m_hasTrackGroupAtom(false) + , m_hasTrackTypeAtom(false) + , m_editAtom(nullptr) + , m_hasSphericalVideoV1Atom(false) + , m_sphericalVideoV1Atom() +{ +} + +void TrackAtom::SetHasTrackReferences(bool value) +{ + m_hasTrackRef = value; +} + +bool TrackAtom::GetHasTrackReferences() const +{ + return m_hasTrackRef; +} + +const TrackHeaderAtom& TrackAtom::GetTrackHeaderAtom() const +{ + return m_trackHeaderAtom; +} + +TrackHeaderAtom& TrackAtom::GetTrackHeaderAtom() +{ + return m_trackHeaderAtom; +} + +const MediaAtom& TrackAtom::GetMediaAtom() const +{ + return m_mediaAtom; +} + +MediaAtom& TrackAtom::GetMediaAtom() +{ + return m_mediaAtom; +} + +const TrackReferenceAtom& TrackAtom::GetTrackReferenceAtom() const +{ + return m_trackRefAtom; +} + +TrackReferenceAtom& TrackAtom::GetTrackReferenceAtom() +{ + return m_trackRefAtom; +} + +const TrackGroupAtom& TrackAtom::GetTrackGroupAtom() const +{ + return m_trackGroupAtom; +} + +TrackGroupAtom& TrackAtom::GetTrackGroupAtom() +{ + return m_trackGroupAtom; +} + +const TrackTypeAtom& TrackAtom::GetTrackTypeAtom() const +{ + return m_trackTypeAtom; +} + +TrackTypeAtom& TrackAtom::GetTrackTypeAtom() +{ + return m_trackTypeAtom; +} + +void TrackAtom::SetEditAtom(const EditAtom& editAtom) +{ + if (m_editAtom == nullptr) + { + m_editAtom = MakeShared(editAtom); + } + else + { + *m_editAtom = editAtom; + } +} + +std::shared_ptr TrackAtom::GetEditAtom() const +{ + return m_editAtom; +} + +const SphericalVideoV1Atom& TrackAtom::GetSphericalVideoV1Atom() const +{ + return m_sphericalVideoV1Atom; +} + +SphericalVideoV1Atom& TrackAtom::GetSphericalVideoV1Atom() +{ + return m_sphericalVideoV1Atom; +} + +void TrackAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + // Write other Atoms contained in the movie Atom + // The TrackHeaderAtom + m_trackHeaderAtom.ToStream(str); + + if (m_hasTrackRef) + { + m_trackRefAtom.ToStream(str); + } + + // The MediaAtom + m_mediaAtom.ToStream(str); + + if (m_hasTrackGroupAtom) + { + m_trackGroupAtom.ToStream(str); + } + + if (m_editAtom) + { + m_editAtom->ToStream(str); + } + + if (m_hasSphericalVideoV1Atom) + { + m_sphericalVideoV1Atom.ToStream(str); + } + + if (m_hasTrackTypeAtom) + { + m_trackTypeAtom.ToStream(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void TrackAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseAtomHeader(str); + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + // Extract contained Atom bitstream and type + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "tkhd") + { + m_trackHeaderAtom.FromStream(subBitstr); + } + else if (AtomType == "mdia") + { + m_mediaAtom.FromStream(subBitstr); + } + else if (AtomType == "meta") + { + // @todo Implement this when reading meta Atom in tracks is supported + } + else if (AtomType == "tref") + { + m_trackRefAtom.FromStream(subBitstr); + m_hasTrackRef = true; + } + else if (AtomType == "trgr") + { + m_hasTrackGroupAtom = true; + m_trackGroupAtom.FromStream(subBitstr); + } + else if (AtomType == "ttyp") + { + m_hasTrackTypeAtom = true; + m_trackTypeAtom.FromStream(subBitstr); + } + else if (AtomType == "edts") + { + m_editAtom = MakeShared(); + m_editAtom->FromStream(subBitstr); + } + else if (AtomType == "uuid") + { + std::vector extendedType; + subBitstr.ReadUUID(extendedType); + + std::vector comparison = SPHERICAL_VIDEOV1_GENERAL_UUID; + if (extendedType == comparison) + { + m_sphericalVideoV1Atom.FromStream(subBitstr); + m_hasSphericalVideoV1Atom = true; + } + else + { + ISO_LOG(LOG_WARNING, "Skipping an unsupported UUID Atom inside TrackAtom.\n"); + } + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside TrackAtom.\n", type); + } + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/TrackAtom.h b/src/isolib/atoms/TrackAtom.h new file mode 100644 index 00000000..bae3845c --- /dev/null +++ b/src/isolib/atoms/TrackAtom.h @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackAtom.h +//! \brief: TrackAtom class. +//! \detail: 'trak' Atom +//! +//! Created on October 14, 2019, 13:39 PM +//! + +#ifndef TRACKATOM_H +#define TRACKATOM_H + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "EditAtom.h" +#include "BasicVideoAtom.h" +#include "MediaAtom.h" +#include "TrackGroupAtom.h" +#include "TrackHeaderAtom.h" +#include "TrackRefAtom.h" +#include "TypeAtom.h" + +VCD_MP4_BEGIN + +class TrackAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + TrackAtom(); + + //! + //! \brief Destructor + //! + virtual ~TrackAtom() = default; + + //! + //! \brief Set Has Track References + //! + //! \param [in] bool + //! value + //! + //! \return void + //! + void SetHasTrackReferences(bool value = true); + + //! + //! \brief Get Has Track References + //! + //! \return bool + //! value + //! + bool GetHasTrackReferences() const; + + //! + //! \brief Get Track Header Atom + //! + //! \return TrackHeaderAtom& + //! Track Header Atom + //! + TrackHeaderAtom& GetTrackHeaderAtom(); + + //! + //! \brief Get Track Header Atom + //! + //! \return const TrackHeaderAtom& + //! Track Header Atom + //! + const TrackHeaderAtom& GetTrackHeaderAtom() const; + + //! + //! \brief Get Media Atom + //! + //! \return MediaAtom& + //! Media Atom + //! + MediaAtom& GetMediaAtom(); + + //! + //! \brief Get Media Atom + //! + //! \return const MediaAtom& + //! Media Atom + //! + const MediaAtom& GetMediaAtom() const; + + //! + //! \brief Get TrackReference Atom + //! + //! \return TrackReferenceAtom& + //! TrackReference Atom + //! + TrackReferenceAtom& GetTrackReferenceAtom(); + + //! + //! \brief Get TrackReference Atom + //! + //! \return const TrackReferenceAtom& + //! TrackReference Atom + //! + const TrackReferenceAtom& GetTrackReferenceAtom() const; + + //! + //! \brief Get TrackGroup Atom + //! + //! \return TrackGroupAtom& + //! TrackGroup Atom + //! + TrackGroupAtom& GetTrackGroupAtom(); + + //! + //! \brief Get TrackGroup Atom + //! + //! \return const TrackGroupAtom& + //! TrackGroup Atom + //! + const TrackGroupAtom& GetTrackGroupAtom() const; + + //! + //! \brief Set and Get function for m_hasTrackGroupAtom member + //! + //! \param [in] bool + //! value to set + //! \param [in] m_hasTrackGroupAtom + //! m_hasTrackGroupAtom member in class + //! \param [in] HasTrackGroup + //! m_hasTrackGroupAtom name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(bool, m_hasTrackGroupAtom, HasTrackGroup, const); + + //! + //! \brief Get TrackType Atom + //! + //! \return TrackTypeAtom& + //! TrackType Atom + //! + TrackTypeAtom& GetTrackTypeAtom(); + + //! + //! \brief Get TrackType Atom + //! + //! \return const TrackTypeAtom& + //! TrackType Atom + //! + const TrackTypeAtom& GetTrackTypeAtom() const; + + //! + //! \brief Set and Get function for m_hasTrackTypeAtom member + //! + //! \param [in] bool + //! value to set + //! \param [in] m_hasTrackTypeAtom + //! m_hasTrackTypeAtom member in class + //! \param [in] HasTrackTypeAtom + //! m_hasTrackTypeAtom name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(bool, m_hasTrackTypeAtom, HasTrackTypeAtom, const); + + //! + //! \brief Set Edit Atom + //! + //! \param [in] const EditAtom& + //! Edit Atom value + //! + //! \return void + //! + void SetEditAtom(const EditAtom& EditAtom); + + //! + //! \brief Get Edit Atom + //! + //! \return std::shared_ptr + //! Edit Atom + //! + std::shared_ptr GetEditAtom() const; + + //! + //! \brief Get SphericalVideoV1 Atom + //! + //! \return SphericalVideoV1Atom& + //! SphericalVideoV1 Atom + //! + SphericalVideoV1Atom& GetSphericalVideoV1Atom(); + + //! + //! \brief Get SphericalVideoV1 Atom + //! + //! \return const SphericalVideoV1Atom& + //! SphericalVideoV1 Atom + //! + const SphericalVideoV1Atom& GetSphericalVideoV1Atom() const; + + //! + //! \brief Set and Get function for m_hasSphericalVideoV1Atom member + //! + //! \param [in] bool + //! value to set + //! \param [in] m_hasSphericalVideoV1Atom + //! m_hasSphericalVideoV1Atom member in class + //! \param [in] HasSphericalVideoV1Atom + //! m_hasSphericalVideoV1Atom name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(bool, m_hasSphericalVideoV1Atom, HasSphericalVideoV1Atom, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + TrackHeaderAtom m_trackHeaderAtom; //!< Track Header Atom + MediaAtom m_mediaAtom; //!< Media Atom related to this track + TrackReferenceAtom m_trackRefAtom; //!< Track Reference Atom + bool m_hasTrackRef; //!< Flag that shows whether the track has references from other tracks + TrackGroupAtom m_trackGroupAtom; //!< Track Group Atom + bool m_hasTrackGroupAtom; //!< Flag that shows whether the track has a track group Atom + TrackTypeAtom m_trackTypeAtom; //!< Track Type Atom + bool m_hasTrackTypeAtom; //!< Flag that shows whether the track has a track type Atom + std::shared_ptr m_editAtom;//!< Edit Atom (optional) + bool m_hasSphericalVideoV1Atom; //!< has SphericalVideoV1 Atom + SphericalVideoV1Atom m_sphericalVideoV1Atom; //!< spherical Video V1 Atom +}; + +VCD_MP4_END; +#endif /* TRACKATOM_H */ diff --git a/src/isolib/atoms/TrackExtAtom.cpp b/src/isolib/atoms/TrackExtAtom.cpp new file mode 100644 index 00000000..302d5a4d --- /dev/null +++ b/src/isolib/atoms/TrackExtAtom.cpp @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackExtAtom.cpp +//! \brief: TrackExtAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "TrackExtAtom.h" + +VCD_MP4_BEGIN + +TrackExtendsAtom::TrackExtendsAtom() + : FullAtom("trex", 0, 0) + , m_sampleDefaults() +{ +} + +void TrackExtendsAtom::SetFragmentSampleDefaults(const SampleDefaults& fragmentSampleDefaults) +{ + m_sampleDefaults = fragmentSampleDefaults; +} + +const SampleDefaults& TrackExtendsAtom::GetFragmentSampleDefaults() const +{ + return m_sampleDefaults; +} + +void TrackExtendsAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(m_sampleDefaults.trackId); + str.Write32(m_sampleDefaults.defaultSampleDescriptionIndex); + str.Write32(m_sampleDefaults.defaultSampleDuration); + str.Write32(m_sampleDefaults.defaultSampleSize); + SampleFlags::Write(str, m_sampleDefaults.defaultSampleFlags); + UpdateSize(str); +} + +void TrackExtendsAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_sampleDefaults.trackId = str.Read32(); + m_sampleDefaults.defaultSampleDescriptionIndex = str.Read32(); + m_sampleDefaults.defaultSampleDuration = str.Read32(); + m_sampleDefaults.defaultSampleSize = str.Read32(); + m_sampleDefaults.defaultSampleFlags = SampleFlags::Read(str); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackExtAtom.h b/src/isolib/atoms/TrackExtAtom.h new file mode 100644 index 00000000..1cfb690a --- /dev/null +++ b/src/isolib/atoms/TrackExtAtom.h @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackExtAtom.h +//! \brief: Track Extends Atom class +//! \detail: 'trex' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKEXTENDSATOM_H_ +#define _TRACKEXTENDSATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" +#include "MovieFragDataTypes.h" + +VCD_MP4_BEGIN + +class TrackExtendsAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackExtendsAtom(); + + //! + //! \brief Destructor + //! + virtual ~TrackExtendsAtom() = default; + + //! + //! \brief Set Fragment Sample Defaults + //! + //! \param [in] const SampleDefaults& + //! fragment Sample Defaults + //! + //! \return void + //! + void SetFragmentSampleDefaults(const SampleDefaults& fragmentSampleDefaults); + + //! + //! \brief Get Fragment Sample Defaults + //! + //! \return const SampleDefaults& + //! Fragment Sample Defaults + //! + const SampleDefaults& GetFragmentSampleDefaults() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + SampleDefaults m_sampleDefaults; //!< Sample Defaults +}; + +VCD_MP4_END; +#endif /* _TRACKEXTENDSATOM_H_ */ diff --git a/src/isolib/atoms/TrackFragAtom.cpp b/src/isolib/atoms/TrackFragAtom.cpp new file mode 100644 index 00000000..e15f88d3 --- /dev/null +++ b/src/isolib/atoms/TrackFragAtom.cpp @@ -0,0 +1,393 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackFragAtom.cpp +//! \brief: TrackFragAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "TrackFragAtom.h" +#include + +VCD_MP4_BEGIN + +TrackFragmentHeaderAtom::TrackFragmentHeaderAtom(std::uint32_t tr_flags) + : FullAtom("tfhd", 0, tr_flags) + , m_trackId(0) + , m_baseDataOffset(0) + , m_sampleDescrIndex(0) + , m_defaultSampleDuration(0) + , m_defaultSampleSize(0) +{ + m_defaultSampleFlags.flagsAsUInt = 0; +} + +void TrackFragmentHeaderAtom::SetBaseDataOffset(const uint64_t baseDataOffset) +{ + m_baseDataOffset = baseDataOffset; + SetFlags(GetFlags() | TrackFragmentHeaderAtom::pDataOffset); +} + +uint64_t TrackFragmentHeaderAtom::GetBaseDataOffset() const +{ + if ((GetFlags() & TrackFragmentHeaderAtom::pDataOffset) != 0) + { + return m_baseDataOffset; + } + else + { + ISO_LOG(LOG_ERROR, "TrackFragmentHeaderAtom::GetBaseDataOffset() according to flags pDataOffset not present.\n"); + throw Exception(); + } +} + +void TrackFragmentHeaderAtom::SetDefaultSampleDuration(const uint32_t defaultSampleDuration) +{ + m_defaultSampleDuration = defaultSampleDuration; + SetFlags(GetFlags() | TrackFragmentHeaderAtom::pSampleDuration); +} + +uint32_t TrackFragmentHeaderAtom::GetDefaultSampleDuration() const +{ + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleDuration) != 0) + { + return m_defaultSampleDuration; + } + else + { + ISO_LOG(LOG_ERROR, "TrackFragmentHeaderAtom::GetDefaultSampleDuration() according to flags pSampleDuration\n"); + throw Exception(); + } +} + +void TrackFragmentHeaderAtom::SetDefaultSampleSize(const uint32_t defaultSampleSize) +{ + m_defaultSampleSize = defaultSampleSize; + SetFlags(GetFlags() | TrackFragmentHeaderAtom::pSampleSize); +} + +uint32_t TrackFragmentHeaderAtom::GetDefaultSampleSize() const +{ + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleSize) != 0) + { + return m_defaultSampleSize; + } + else + { + ISO_LOG(LOG_ERROR, "TrackFragmentHeaderAtom::GetDefaultSampleSize() according to flags pSampleSize not present.\n"); + throw Exception(); + } +} + +void TrackFragmentHeaderAtom::SetDefaultSampleFlags(const SampleFlags defaultSampleFlags) +{ + m_defaultSampleFlags = defaultSampleFlags; + SetFlags(GetFlags() | TrackFragmentHeaderAtom::pSampleFlags); +} + +SampleFlags TrackFragmentHeaderAtom::GetDefaultSampleFlags() const +{ + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleFlags) != 0) + { + return m_defaultSampleFlags; + } + else + { + ISO_LOG(LOG_ERROR, "TrackFragmentHeaderAtom::SetDefaultSampleFlags() according to flags pSampleFlags\n"); + throw Exception(); + } +} + +void TrackFragmentHeaderAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(m_trackId); + if ((GetFlags() & TrackFragmentHeaderAtom::pDataOffset) != 0) + { + str.Write64(m_baseDataOffset); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleDescrIndex) != 0) + { + str.Write32(m_sampleDescrIndex); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleDuration) != 0) + { + str.Write32(m_defaultSampleDuration); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleSize) != 0) + { + str.Write32(m_defaultSampleSize); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleFlags) != 0) + { + SampleFlags::Write(str, m_defaultSampleFlags); + } + + UpdateSize(str); +} + +void TrackFragmentHeaderAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + m_trackId = str.Read32(); + if ((GetFlags() & TrackFragmentHeaderAtom::pDataOffset) != 0) + { + m_baseDataOffset = str.Read64(); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleDescrIndex) != 0) + { + m_sampleDescrIndex = str.Read32(); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleDuration) != 0) + { + m_defaultSampleDuration = str.Read32(); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleSize) != 0) + { + m_defaultSampleSize = str.Read32(); + } + if ((GetFlags() & TrackFragmentHeaderAtom::pSampleFlags) != 0) + { + m_defaultSampleFlags = SampleFlags::Read(str); + } +} + +TrackFragmentBaseMediaDecodeTimeAtom::TrackFragmentBaseMediaDecodeTimeAtom() + : FullAtom("tfdt", 0, 0) + , m_baseMediaDecodeTime(0) +{ +} + +void TrackFragmentBaseMediaDecodeTimeAtom::SetBaseMediaDecodeTime(const uint64_t baseMediaDecodeTime) +{ + m_baseMediaDecodeTime = baseMediaDecodeTime; +} + +uint64_t TrackFragmentBaseMediaDecodeTimeAtom::GetBaseMediaDecodeTime() const +{ + return m_baseMediaDecodeTime; +} + +void TrackFragmentBaseMediaDecodeTimeAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + if (GetVersion() == 0) + { + str.Write32(static_cast(m_baseMediaDecodeTime)); + } + else if (GetVersion() == 1) + { + str.Write64(m_baseMediaDecodeTime); + } + else + { + ISO_LOG(LOG_ERROR, "ToStream() supports only 'tfdt' version 0 or 1\n"); + throw Exception(); + } + UpdateSize(str); +} + +void TrackFragmentBaseMediaDecodeTimeAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + if (GetVersion() == 0) + { + m_baseMediaDecodeTime = str.Read32(); + } + else if (GetVersion() == 1) + { + m_baseMediaDecodeTime = str.Read64(); + } + else + { + ISO_LOG(LOG_ERROR, "FromStream() supports only 'tfdt' version 0 or 1\n"); + throw Exception(); + } +} + +TrackFragmentAtom::TrackFragmentAtom(std::vector& sampleDefaults) + : Atom("traf") + , m_trackFragmentHeaderAtom() + , m_sampleDefaults(sampleDefaults) + , m_trackRunAtoms() + , m_trackFragmentDecodeTimeAtom() +{ +} + +TrackFragmentHeaderAtom& TrackFragmentAtom::GetTrackFragmentHeaderAtom() +{ + return m_trackFragmentHeaderAtom; +} + +void TrackFragmentAtom::AddTrackRunAtom(UniquePtr trackRunAtom) +{ + m_trackRunAtoms.push_back(std::move(trackRunAtom)); +} + +std::vector TrackFragmentAtom::GetTrackRunAtoms() +{ + std::vector trackRunAtoms; + for (auto& trackRuns : m_trackRunAtoms) + { + trackRunAtoms.push_back(trackRuns.get()); + } + return trackRunAtoms; +} + +void TrackFragmentAtom::SetTrackFragmentDecodeTimeAtom( + UniquePtr trackFragmentDecodeTimeAtom) +{ + m_trackFragmentDecodeTimeAtom = std::move(trackFragmentDecodeTimeAtom); +} + +TrackFragmentBaseMediaDecodeTimeAtom* TrackFragmentAtom::GetTrackFragmentBaseMediaDecodeTimeAtom() +{ + return m_trackFragmentDecodeTimeAtom.get(); +} + +void TrackFragmentAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + m_trackFragmentHeaderAtom.ToStream(str); + + if (m_trackFragmentDecodeTimeAtom) + { + m_trackFragmentDecodeTimeAtom->ToStream(str); + } + for (auto& trackRuns : m_trackRunAtoms) + { + trackRuns->ToStream(str); + } + UpdateSize(str); +} + +void TrackFragmentAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + bool foundTfhd = false; + + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "tfhd") + { + if (foundTfhd) + { + ISO_LOG(LOG_ERROR, "TrackFragmentAtom: exactly one tfhd expected!\n"); + throw Exception(); + } + + m_trackFragmentHeaderAtom.FromStream(subBitstr); + + bool foundDefault = false; + uint32_t defaultSampleDescriptionIndex = 0; + for (size_t i = 0; i < m_sampleDefaults.size(); i++) + { + if (m_sampleDefaults[i].trackId == m_trackFragmentHeaderAtom.GetTrackId()) + { + defaultSampleDescriptionIndex = m_sampleDefaults[i].defaultSampleDescriptionIndex; + foundDefault = true; + break; + } + } + if (!foundDefault) + { + ISO_LOG(LOG_ERROR, "default sample description index not found\n"); + throw Exception(); + } + + if ((m_trackFragmentHeaderAtom.GetFlags() & TrackFragmentHeaderAtom::pSampleDescrIndex) == 0) + { + m_trackFragmentHeaderAtom.SetSampleDescrIndex(defaultSampleDescriptionIndex); + } + foundTfhd = true; + } + else if (AtomType == "tfdt") + { + UniquePtr trackFragmentDecodeTimeAtom(new TrackFragmentBaseMediaDecodeTimeAtom()); + trackFragmentDecodeTimeAtom->FromStream(subBitstr); + m_trackFragmentDecodeTimeAtom = std::move(trackFragmentDecodeTimeAtom); + } + else if (AtomType == "trun") + { + SampleDefaults sampleDefaults{}; + bool defaultsFound = false; + for (size_t i = 0; i < m_sampleDefaults.size(); i++) + { + if (m_sampleDefaults[i].trackId == m_trackFragmentHeaderAtom.GetTrackId()) + { + sampleDefaults.trackId = m_sampleDefaults[i].trackId; + sampleDefaults.defaultSampleDescriptionIndex = m_sampleDefaults[i].defaultSampleDescriptionIndex; + sampleDefaults.defaultSampleDuration = m_sampleDefaults[i].defaultSampleDuration; + sampleDefaults.defaultSampleSize = m_sampleDefaults[i].defaultSampleSize; + sampleDefaults.defaultSampleFlags = m_sampleDefaults[i].defaultSampleFlags; + defaultsFound = true; + break; + } + } + if ((m_trackFragmentHeaderAtom.GetFlags() & TrackFragmentHeaderAtom::pSampleDuration) != 0) + { + sampleDefaults.defaultSampleDuration = m_trackFragmentHeaderAtom.GetDefaultSampleDuration(); + } + if ((m_trackFragmentHeaderAtom.GetFlags() & TrackFragmentHeaderAtom::pSampleSize) != 0) + { + sampleDefaults.defaultSampleSize = m_trackFragmentHeaderAtom.GetDefaultSampleSize(); + } + if ((m_trackFragmentHeaderAtom.GetFlags() & TrackFragmentHeaderAtom::pSampleFlags) != 0) + { + sampleDefaults.defaultSampleFlags = m_trackFragmentHeaderAtom.GetDefaultSampleFlags(); + } + UniquePtr trackRunAtom(new TrackRunAtom()); + if (defaultsFound) + { + trackRunAtom->SetSampleDefaults(sampleDefaults); + } + + trackRunAtom->FromStream(subBitstr); + m_trackRunAtoms.push_back(std::move(trackRunAtom)); + } + else + { + char type[4]; + AtomType.GetString().copy(type, 4, 0); + ISO_LOG(LOG_WARNING, "Skipping an unsupported Atom '%s' inside TrackFragmentAtom.\n", type); + } + } + if (!foundTfhd) + { + ISO_LOG(LOG_ERROR, "tfhd Atom missing (mandatory)\n"); + throw Exception(); + } +} + +VCD_MP4_END diff --git a/src/isolib/atoms/TrackFragAtom.h b/src/isolib/atoms/TrackFragAtom.h new file mode 100644 index 00000000..8e5e5067 --- /dev/null +++ b/src/isolib/atoms/TrackFragAtom.h @@ -0,0 +1,349 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackFragAtom.h +//! \brief: Track Fragment Atom class +//! \detail: 'traf' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKFRAGMENTATOM_H_ +#define _TRACKFRAGMENTATOM_H_ + +#include +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "TrackExtAtom.h" +#include "TrackRunAtom.h" +#include "FullAtom.h" +#include "MovieFragDataTypes.h" + +VCD_MP4_BEGIN + +class TrackFragmentHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackFragmentHeaderAtom(std::uint32_t tr_flags = 0); + + //! + //! \brief Destructor + //! + virtual ~TrackFragmentHeaderAtom() = default; + + enum TrackFragHeaderFlags //!< Track Frag Header Flags + { + pDataOffset = 0x000001, + pSampleDescrIndex = 0x000002, + pSampleDuration = 0x000008, + pSampleSize = 0x000010, + pSampleFlags = 0x000020, + IsDurationEmpty = 0x010000, + IsBaseMoof = 0x020000 + }; + + //! + //! \brief Set and Get function for m_trackId member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_trackId + //! m_trackId member in class + //! \param [in] TrackId + //! m_trackId name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_trackId, TrackId, const); + + //! + //! \brief Set Base Data Offset + //! + //! \param [in] const uint64_t + //! base Data Offset value + //! + //! \return void + //! + void SetBaseDataOffset(const uint64_t baseDataOffset); + + //! + //! \brief Get Base Data Offset + //! + //! \return std::uint64_t + //! Base Data Offset + //! + uint64_t GetBaseDataOffset() const; + + //! + //! \brief Set and Get function for m_sampleDescrIndex member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_sampleDescrIndex + //! m_sampleDescrIndex member in class + //! \param [in] SampleDescrIndex + //! m_sampleDescrIndex name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_sampleDescrIndex, SampleDescrIndex, const); + + //! + //! \brief Set Default Sample Duration + //! + //! \param [in] const uint32_t + //! Default Sample Duration + //! + //! \return void + //! + void SetDefaultSampleDuration(const uint32_t defaultSampleDuration); + + //! + //! \brief Get Default Sample Duration + //! + //! \return std::uint32_t + //! Default Sample Duration + //! + uint32_t GetDefaultSampleDuration() const; + + //! + //! \brief Set Default Sample Size + //! + //! \param [in] const uint32_t + //! Default Sample Size + //! + //! \return void + //! + void SetDefaultSampleSize(const uint32_t defaultSampleSize); + + //! + //! \brief Get Default Sample Size + //! + //! \return std::uint32_t + //! Default Sample Size + //! + uint32_t GetDefaultSampleSize() const; + + //! + //! \brief Set Default Sample Flags + //! + //! \param [in] const Flags + //! Default Sample Flags + //! + //! \return void + //! + void SetDefaultSampleFlags(const SampleFlags defaultSampleFlags); + + //! + //! \brief Get Default Sample Flags + //! + //! \return SampleFlags + //! Default Sample Flags + //! + SampleFlags GetDefaultSampleFlags() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint32_t m_trackId; //!< track Id + uint64_t m_baseDataOffset; //!< base Data Offset + uint32_t m_sampleDescrIndex; //!< sample Descr Index + uint32_t m_defaultSampleDuration; //!< default Sample Duration + uint32_t m_defaultSampleSize; //!< default Sample Size + SampleFlags m_defaultSampleFlags; //!< default Sample Flags +}; + +class TrackFragmentBaseMediaDecodeTimeAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackFragmentBaseMediaDecodeTimeAtom(); + + //! + //! \brief Destructor + //! + virtual ~TrackFragmentBaseMediaDecodeTimeAtom() = default; + + //! + //! \brief Set Base Media Decode Time + //! + //! \param [in] const uint64_t + //! Base Media Decode Time + //! + //! \return void + //! + void SetBaseMediaDecodeTime(const uint64_t baseMediaDecodeTime); + + //! + //! \brief Get Base Media Decode Time + //! + //! \return std::uint64_t + //! Base Media Decode Time + //! + uint64_t GetBaseMediaDecodeTime() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + uint64_t m_baseMediaDecodeTime; //!< base Media Decode Time +}; + +class TrackFragmentAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + TrackFragmentAtom(std::vector& sampleDefaults); + + //! + //! \brief Destructor + //! + virtual ~TrackFragmentAtom() = default; + + //! + //! \brief Get TrackFragmentHeader Atom + //! + //! \return TrackFragmentHeaderAtom& + //! TrackFragmentHeader Atom + //! + TrackFragmentHeaderAtom& GetTrackFragmentHeaderAtom(); + + //! + //! \brief Add TrackRun Atom + //! + //! \param [in] UniquePtr + //! TrackRun Atom + //! + //! \return void + //! + void AddTrackRunAtom(UniquePtr trackRunAtom); + + //! + //! \brief Get TrackRun Atoms + //! + //! \return std::vector + //! TrackRun Atoms + //! + std::vector GetTrackRunAtoms(); + + //! + //! \brief Set TrackFragmentDecodeTime Atom + //! + //! \param [in] UniquePtr + //! TrackFragmentDecodeTime Atom + //! + //! \return void + //! + void SetTrackFragmentDecodeTimeAtom(UniquePtr trackFragmentDecodeTimeAtom); + + //! + //! \brief Get TrackFragmentBaseMediaDecodeTime Atoms + //! + //! \return TrackFragmentBaseMediaDecodeTimeAtom* + //! TrackFragmentBaseMediaDecodeTime Atoms + //! + TrackFragmentBaseMediaDecodeTimeAtom* GetTrackFragmentBaseMediaDecodeTimeAtom(); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + TrackFragmentHeaderAtom m_trackFragmentHeaderAtom; //!< Track Fragment Header Atom + std::vector& m_sampleDefaults; //!< Sample Default array + std::vector> m_trackRunAtoms; //!< Contains TrackRunAtoms + UniquePtr m_trackFragmentDecodeTimeAtom; //!< Track Fragment Base Media Decode Time Atom +}; + +VCD_MP4_END; +#endif /* _TRACKFRAGMENTATOM_H_ */ diff --git a/src/player/MediaSource.cpp b/src/isolib/atoms/TrackGroupAtom.cpp similarity index 55% rename from src/player/MediaSource.cpp rename to src/isolib/atoms/TrackGroupAtom.cpp index 4fb38c45..f5b12888 100644 --- a/src/player/MediaSource.cpp +++ b/src/isolib/atoms/TrackGroupAtom.cpp @@ -22,46 +22,59 @@ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. - - * */ //! -//! \file MediaSource.cpp -//! \brief Implement class for MediaSource. +//! \file: TrackGroupAtom.cpp +//! \brief: TrackGroupAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM //! -#include "MediaSource.h" +#include "TrackGroupAtom.h" -VCD_NS_BEGIN +VCD_MP4_BEGIN + +TrackGroupAtom::TrackGroupAtom() + : Atom("trgr") +{ +} + +const std::vector& TrackGroupAtom::GetTrackGroupTypeAtoms() const +{ + return m_trackGroupTypeAtoms; +} -MediaSource::MediaSource() +void TrackGroupAtom::AddTrackGroupTypeAtom(const TrackGroupTypeAtom& trackGroupTypeAtom) { - m_mediaSourceInfo.width = 0; - m_mediaSourceInfo.height = 0; - m_mediaSourceInfo.projFormat = VCD::OMAF::PF_UNKNOWN; - m_mediaSourceInfo.pixFormat = PixelFormat::INVALID; - m_mediaSourceInfo.hasAudio = false; - m_mediaSourceInfo.audioChannel = 0; - m_mediaSourceInfo.stride = 0; - m_mediaSourceInfo.numberOfStreams = 0; - m_mediaSourceInfo.frameRate = 0; - m_mediaSourceInfo.duration = 0; - m_mediaSourceInfo.frameNum = 0; - m_mediaSourceInfo.currentFrameNum = 0; - m_mediaSourceInfo.sourceWH = NULL; - m_mediaSourceInfo.sourceNumber = 0; - isAllValid = false; - m_sourceType = MediaSourceType::SOURCE_NONE; + m_trackGroupTypeAtoms.push_back(trackGroupTypeAtom); } -MediaSource::~MediaSource() +void TrackGroupAtom::ToStream(Stream& str) { - if (m_mediaSourceInfo.sourceWH != NULL) + WriteAtomHeader(str); + + for (unsigned int i = 0; i < m_trackGroupTypeAtoms.size(); i++) + { + m_trackGroupTypeAtoms.at(i).ToStream(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void TrackGroupAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + while (str.BytesRemain() > 0) { - delete m_mediaSourceInfo.sourceWH; - m_mediaSourceInfo.sourceWH = NULL; + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + TrackGroupTypeAtom tracktypeAtom = TrackGroupTypeAtom(AtomType); + tracktypeAtom.FromStream(subBitstr); + m_trackGroupTypeAtoms.push_back(tracktypeAtom); } } -VCD_NS_END +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackGroupAtom.h b/src/isolib/atoms/TrackGroupAtom.h new file mode 100644 index 00000000..f46c7741 --- /dev/null +++ b/src/isolib/atoms/TrackGroupAtom.h @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackGroupAtom.h +//! \brief: Track Group Atom class. +//! \detail: Track Group Atom definition +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKGROUPATOM_H_ +#define _TRACKGROUPATOM_H_ + +#include +#include +#include "Atom.h" +#include "FormAllocator.h" +#include "TrackGroupTypeAtom.h" + +VCD_MP4_BEGIN + +class TrackGroupAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + TrackGroupAtom(); + + //! + //! \brief Destructor + //! + virtual ~TrackGroupAtom() = default; + + //! + //! \brief Get Track Group Type Atoms + //! + //! \return const std::vector& + //! Track Group Type Atoms + //! + const std::vector& GetTrackGroupTypeAtoms() const; + + //! + //! \brief Add Track Group Type Atom + //! + //! \param [in] const TrackGroupTypeAtom& + //! track Group Type Atom + //! + //! \return void + //! + void AddTrackGroupTypeAtom(const TrackGroupTypeAtom& trackGroupTypeAtom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector m_trackGroupTypeAtoms; //!< Track Group Type Atom array +}; + +VCD_MP4_END; +#endif /* _TRACKGROUPATOM_H_ */ diff --git a/src/isolib/atoms/TrackGroupTypeAtom.cpp b/src/isolib/atoms/TrackGroupTypeAtom.cpp new file mode 100644 index 00000000..e724b560 --- /dev/null +++ b/src/isolib/atoms/TrackGroupTypeAtom.cpp @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackGroupTypeAtom.cpp +//! \brief: TrackGroupTypeAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "TrackGroupTypeAtom.h" + +VCD_MP4_BEGIN + +TrackGroupTypeAtom::TrackGroupTypeAtom(FourCCInt AtomType, std::uint32_t trackGroupId) + : FullAtom(AtomType, 0, 0) + , m_trackGroupId(trackGroupId) +{ +} + +std::uint32_t TrackGroupTypeAtom::GetTrackGroupId() const +{ + return m_trackGroupId; +} + +void TrackGroupTypeAtom::SetTrackGroupId(std::uint32_t trackGroupId) +{ + m_trackGroupId = trackGroupId; +} + +void TrackGroupTypeAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.Write32(m_trackGroupId); + UpdateSize(str); +} + +void TrackGroupTypeAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_trackGroupId = str.Read32(); + if (str.BytesRemain() && GetType() == "obsp") + { + if (str.BytesRemain() >= 4) + { + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + } + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackGroupTypeAtom.h b/src/isolib/atoms/TrackGroupTypeAtom.h new file mode 100644 index 00000000..a32f0fa7 --- /dev/null +++ b/src/isolib/atoms/TrackGroupTypeAtom.h @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackGroupTypeAtom.h +//! \brief: Track Group Type Atom class. +//! \detail: 'trgr' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKGROUPTYPEATOM_H_ +#define _TRACKGROUPTYPEATOM_H_ + +#include +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class TrackGroupTypeAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackGroupTypeAtom(FourCCInt AtomType, std::uint32_t trackGroupId = 0); + + //! + //! \brief Destructor + //! + virtual ~TrackGroupTypeAtom() = default; + + //! + //! \brief Get Track Group Id + //! + //! \return std::uint32_t + //! Track Group Id + //! + std::uint32_t GetTrackGroupId() const; + + //! + //! \brief Set Track Group Id + //! + //! \param [in] std::uint32_t + //! Track Group Id value + //! + //! \return void + //! + void SetTrackGroupId(std::uint32_t trackGroupId); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + std::uint32_t m_trackGroupId; //!< indicates the grouping type +}; + +VCD_MP4_END; +#endif /* _TRACKGROUPTYPEATOM_H_ */ diff --git a/src/isolib/atoms/TrackHeaderAtom.cpp b/src/isolib/atoms/TrackHeaderAtom.cpp new file mode 100644 index 00000000..3867cddd --- /dev/null +++ b/src/isolib/atoms/TrackHeaderAtom.cpp @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackHeaderAtom.cpp +//! \brief: TrackHeaderAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "TrackHeaderAtom.h" + + +VCD_MP4_BEGIN + +TrackHeaderAtom::TrackHeaderAtom() + : FullAtom("tkhd", 0, 0) + , m_creationTime(0) + , m_modificationTime(0) + , m_trackID(0) + , m_duration(0) + , m_width(0) + , m_height(0) + , m_alternateGroup(0) + , m_volume(0) + , m_matrix({0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000}) +{ +} + +void TrackHeaderAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + + if (GetVersion() == 0) + { + str.Write32(static_cast(m_creationTime)); + str.Write32(static_cast(m_modificationTime)); + str.Write32(m_trackID); + str.Write32(0); + str.Write32(static_cast(m_duration)); + } + else if (GetVersion() == 1) + { + str.Write64(m_creationTime); + str.Write64(m_modificationTime); + str.Write32(m_trackID); + str.Write32(0); + str.Write64(m_duration); + } + else + { + ISO_LOG(LOG_ERROR, "ToStream() supports only 'tkhd' version 0 and version 1\n"); + throw Exception(); + } + + str.Write32(0); // Reserved + str.Write32(0); + + str.Write16(0); // Layer + str.Write16(m_alternateGroup); // Alternate Group + str.Write16(m_volume); // Volume + str.Write16(0); // Reserved + + for (auto value : m_matrix) + { + str.Write32(static_cast(value)); + } + + str.Write32(m_width); + str.Write32(m_height); + + UpdateSize(str); +} + +void TrackHeaderAtom::FromStream(Stream& str) +{ + // First parse the Atom header + ParseFullAtomHeader(str); + if ((GetVersion() != 0) && (GetVersion() != 1)) + { + ISO_LOG(LOG_ERROR, "FromStream() supports only 'tkhd' version 0 and version 1\n"); + throw Exception(); + } + + if (GetVersion() == 0) + { + m_creationTime = str.Read32(); + m_modificationTime = str.Read32(); + } + else + { + m_creationTime = str.Read64(); + m_modificationTime = str.Read64(); + } + m_trackID = str.Read32(); + str.Read32(); + if (GetVersion() == 0) + { + m_duration = str.Read32(); + } + else + { + m_duration = str.Read64(); + } + + str.Read32(); // Reserved + str.Read32(); + + str.Read16(); // Layer + m_alternateGroup = str.Read16(); // Alternate Group + m_volume = str.Read16(); // Volume + str.Read16(); // Reserved + + m_matrix.clear(); + for (int n = 9; n > 0; n--) // Matrix[9] + { + m_matrix.push_back(static_cast(str.Read32())); + } + + m_width = str.Read32(); + m_height = str.Read32(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackHeaderAtom.h b/src/isolib/atoms/TrackHeaderAtom.h new file mode 100644 index 00000000..7482da45 --- /dev/null +++ b/src/isolib/atoms/TrackHeaderAtom.h @@ -0,0 +1,238 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackHeaderAtom.h +//! \brief: TrackHeaderAtom class. +//! \detail: 'tkhd' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKHEADERATOM_H_CWQQ8ZSB_ +#define _TRACKHEADERATOM_H_CWQQ8ZSB_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include +#include + +VCD_MP4_BEGIN + +class TrackHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackHeaderAtom(); + + //! + //! \brief Destructor + //! + virtual ~TrackHeaderAtom() = default; + + //! + //! \brief Set and Get function for m_creationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_creationTime + //! m_creationTime member in class + //! \param [in] CreationTime + //! m_creationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_creationTime, CreationTime, const); + + //! + //! \brief Set and Get function for m_modificationTime member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_modificationTime + //! m_modificationTime member in class + //! \param [in] ModificationTime + //! m_modificationTime name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_modificationTime, ModificationTime, const); + + //! + //! \brief Set and Get function for m_trackID member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_trackID + //! m_trackID member in class + //! \param [in] TrackID + //! m_trackID name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_trackID, TrackID, const); + + //! + //! \brief Set and Get function for m_duration member + //! + //! \param [in] std::uint64_t + //! value to set + //! \param [in] m_duration + //! m_duration member in class + //! \param [in] Duration + //! m_duration name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint64_t, m_duration, Duration, const); + + //! + //! \brief Set and Get function for m_width member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_width + //! m_width member in class + //! \param [in] Width + //! m_width name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_width, Width, const); + + //! + //! \brief Set and Get function for m_height member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_height + //! m_height member in class + //! \param [in] Height + //! m_height name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_height, Height, const); + + //! + //! \brief Set and Get function for m_alternateGroup member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_alternateGroup + //! m_alternateGroup member in class + //! \param [in] AlternateGroup + //! m_alternateGroup name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_alternateGroup, AlternateGroup, const); + + //! + //! \brief Set and Get function for m_volume member + //! + //! \param [in] std::uint16_t + //! value to set + //! \param [in] m_volume + //! m_volume member in class + //! \param [in] Volume + //! m_volume name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint16_t, m_volume, Volume, const); + + //! + //! \brief Set and Get function for m_matrix member + //! + //! \param [in] std::vector + //! value to set + //! \param [in] m_matrix + //! m_matrix member in class + //! \param [in] Matrix + //! m_matrix name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::vector, m_matrix, Matrix, const); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); + +private: + std::uint64_t m_creationTime; //!< Creation time + std::uint64_t m_modificationTime; //!< Modificaiton time + std::uint32_t m_trackID; //!< Track ID + std::uint64_t m_duration; //!< Track's duration + std::uint32_t m_width; //!< Track display width + std::uint32_t m_height; //!< Track display height + std::uint16_t m_alternateGroup; //!< Alternate group Id of the track + std::uint16_t m_volume; //!< Volume (for audio tracks) + std::vector m_matrix; //!< Matrix +}; + +VCD_MP4_END; +#endif /* _TRACKHEADERATOM_H_CWQQ8ZSB_ */ diff --git a/src/isolib/atoms/TrackRefAtom.cpp b/src/isolib/atoms/TrackRefAtom.cpp new file mode 100644 index 00000000..70ba3f29 --- /dev/null +++ b/src/isolib/atoms/TrackRefAtom.cpp @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackRefAtom.cpp +//! \brief: TrackRefAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "TrackRefAtom.h" + +VCD_MP4_BEGIN + +TrackReferenceAtom::TrackReferenceAtom() + : Atom("tref") + , m_trefTypeAtoms() +{ +} + +void TrackReferenceAtom::ClearAtoms() +{ + m_trefTypeAtoms.clear(); +} + +void TrackReferenceAtom::AddAtom(TrackReferenceTypeAtom& trefTypeAtom) +{ + m_trefTypeAtoms.push_back(trefTypeAtom); +} + +const std::vector& TrackReferenceAtom::GetTypeAtoms() const +{ + return m_trefTypeAtoms; +} + +bool TrackReferenceAtom::IsRefTypePresent(FourCCInt type) const +{ + for (const auto& trackReferenceTypeAtom : m_trefTypeAtoms) + { + if (trackReferenceTypeAtom.GetType() == type) + { + return true; + } + } + + return false; +} + +void TrackReferenceAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteAtomHeader(str); + + // For each track reference type call its writeAtom method + for (auto& trefTypeAtom : m_trefTypeAtoms) + { + trefTypeAtom.ToStream(str); + } + + UpdateSize(str); +} + +void TrackReferenceAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + // Read sub-Atoms until no data is left in this Atom + while (str.BytesRemain() > 0) + { + // Extract the bitstream content of this Atom + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + TrackReferenceTypeAtom trefTypeAtom = TrackReferenceTypeAtom(AtomType); + trefTypeAtom.FromStream(subBitstr); + + m_trefTypeAtoms.push_back(trefTypeAtom); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackRefAtom.h b/src/isolib/atoms/TrackRefAtom.h new file mode 100644 index 00000000..b01f7622 --- /dev/null +++ b/src/isolib/atoms/TrackRefAtom.h @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackRefAtom.h +//! \brief: TrackRefAtom class. +//! \detail: 'tref' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKREFERENCEATOM_H_ +#define _TRACKREFERENCEATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "TrackRefTypeAtom.h" + +VCD_MP4_BEGIN + +class TrackReferenceAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + TrackReferenceAtom(); + + //! + //! \brief Destructor + //! + virtual ~TrackReferenceAtom() = default; + + //! + //! \brief Clear Atoms + //! + void ClearAtoms(); + + //! + //! \brief Add Atom function + //! + //! \param [in] TrackReferenceTypeAtom& + //! tref Type Atom value + //! + //! \return void + //! + void AddAtom(TrackReferenceTypeAtom& trefTypeAtom); + + //! + //! \brief Get Type Atoms + //! + //! \return const std::vector& + //! Type Atoms + //! + const std::vector& GetTypeAtoms() const; + + //! + //! \brief Is Ref Type Present or not + //! + //! \param [in] FourCCInt + //! type + //! + //! \return bool + //! is or not + //! + bool IsRefTypePresent(FourCCInt type) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::vector m_trefTypeAtoms; //!< Track Reference Type Atoms +}; + +VCD_MP4_END; +#endif /* _TRACKREFERENCEATOM_H_ */ diff --git a/src/isolib/atoms/TrackRefTypeAtom.cpp b/src/isolib/atoms/TrackRefTypeAtom.cpp new file mode 100644 index 00000000..104420d9 --- /dev/null +++ b/src/isolib/atoms/TrackRefTypeAtom.cpp @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackRefTypeAtom.cpp +//! \brief: TrackRefTypeAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "TrackRefTypeAtom.h" + + +VCD_MP4_BEGIN + +TrackReferenceTypeAtom::TrackReferenceTypeAtom(FourCCInt trefType) + : Atom(trefType) + , m_trackId() +{ +} + +void TrackReferenceTypeAtom::SetTrackIds(const VectorT& trackId) +{ + m_trackId = trackId; +} + +const VectorT& TrackReferenceTypeAtom::GetTrackIds() const +{ + return m_trackId; +} + +void TrackReferenceTypeAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + + for (auto trackId : m_trackId) + { + str.Write32(trackId); + } + + UpdateSize(str); +} + +void TrackReferenceTypeAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + // if there a data available in the file + while (str.BytesRemain() > 0) + { + m_trackId.push_back(str.Read32()); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackRefTypeAtom.h b/src/isolib/atoms/TrackRefTypeAtom.h new file mode 100644 index 00000000..d7d8b11b --- /dev/null +++ b/src/isolib/atoms/TrackRefTypeAtom.h @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackRefTypeAtom.h +//! \brief: TrackRefTypeAtom class. +//! \detail: Track Reference Type Atom definition +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _TRACKREFERENCETYPEATOM_H_ +#define _TRACKREFERENCETYPEATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +template +using VectorT = std::vector>; + +class TrackReferenceTypeAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + TrackReferenceTypeAtom(FourCCInt trefType); + + //! + //! \brief Destructor + //! + virtual ~TrackReferenceTypeAtom() = default; + + //! + //! \brief Set track Ids + //! + //! \param [in] const VectorT& + //! track Ids + //! + //! \return void + //! + void SetTrackIds(const VectorT& trackId); + + //! + //! \brief Get track Ids + //! + //! \return VectorT& + //! track Ids + //! + const VectorT& GetTrackIds() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + VectorT m_trackId; //!< std::vector of referenced track Ids +}; + +VCD_MP4_END; +#endif /* _TRACKREFERENCETYPEATOM_H_ */ diff --git a/src/isolib/atoms/TrackRunAtom.cpp b/src/isolib/atoms/TrackRunAtom.cpp new file mode 100644 index 00000000..5563b7bc --- /dev/null +++ b/src/isolib/atoms/TrackRunAtom.cpp @@ -0,0 +1,234 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackRunAtom.cpp +//! \brief: TrackRunAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "TrackRunAtom.h" +#include + +VCD_MP4_BEGIN + +TrackRunAtom::TrackRunAtom(uint8_t version, std::uint32_t tr_flags) + : FullAtom("trun", version, tr_flags) + , m_sampleDefaultsSet(false) + , m_sampleDefaults() + , m_sampleNum(0) + , m_dataOffset(0) + , m_firstSampleFlags() + , m_sampleDetails() +{ +} + +void TrackRunAtom::SetDataOffset(const int32_t dataOffset) +{ + m_dataOffset = dataOffset; + SetFlags(GetFlags() | TrackRunFlags::pDataOffset); +} + +int32_t TrackRunAtom::GetDataOffset() const +{ + if ((GetFlags() & TrackRunFlags::pDataOffset) != 0) + { + return m_dataOffset; + } + else + { + ISO_LOG(LOG_ERROR, "GetDataOffset() according to flags pDataOffset not present.\n"); + throw Exception(); + } +} + +void TrackRunAtom::SetFirstSampleFlags(const SampleFlags firstSampleFlags) +{ + m_firstSampleFlags = firstSampleFlags; + SetFlags(GetFlags() | TrackRunFlags::pFirstSampleFlags); +} + +SampleFlags TrackRunAtom::GetFirstSampleFlags() const +{ + if ((GetFlags() & TrackRunFlags::pFirstSampleFlags) != 0) + { + return m_firstSampleFlags; + } + else + { + ISO_LOG(LOG_ERROR, "GetFirstSampleFlags() according to flags pFirstSampleFlags not present\n"); + throw Exception(); + } +} + +void TrackRunAtom::AddSampleDetails(SampleDetails pDetails) +{ + m_sampleDetails.push_back(pDetails); +} + +const std::vector& TrackRunAtom::GetSampleDetails() const +{ + return m_sampleDetails; +} + +void TrackRunAtom::SetSampleDefaults(SampleDefaults& sampleDefaults) +{ + m_sampleDefaultsSet = true; + m_sampleDefaults = sampleDefaults; +} + +void TrackRunAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + + str.Write32(m_sampleNum); + if ((GetFlags() & TrackRunFlags::pDataOffset) != 0) + { + str.Write32(static_cast(m_dataOffset)); + } + if ((GetFlags() & TrackRunFlags::pFirstSampleFlags) != 0) + { + SampleFlags::Write(str, m_firstSampleFlags); + } + + for (uint32_t i = 0; i < m_sampleNum; i++) + { + if ((GetFlags() & TrackRunFlags::pSampleDuration) != 0) + { + str.Write32(m_sampleDetails.at(i).version0.pDuration); + } + if ((GetFlags() & TrackRunFlags::pSampleSize) != 0) + { + str.Write32(m_sampleDetails.at(i).version0.pSize); + } + if ((GetFlags() & TrackRunFlags::pFirstSampleFlags) == 0) + { + if ((GetFlags() & TrackRunFlags::pSampleFlags) != 0) + { + SampleFlags::Write(str, m_sampleDetails.at(i).version0.pFlags); + } + } + if ((GetFlags() & TrackRunFlags::pSampleCompTimeOffsets) != 0) + { + if (GetVersion() == 0) + { + str.Write32(m_sampleDetails.at(i).version0.pCompTimeOffset); + } + else + { + str.Write32(static_cast(m_sampleDetails.at(i).version1.pCompTimeOffset)); + } + } + } + UpdateSize(str); +} + +void TrackRunAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + m_sampleNum = str.Read32(); + if (m_sampleNum > ABMAX_SAMP_CNT) + { + ISO_LOG(LOG_ERROR, "Over max sample counts from TrackRunAtom::FromStreamAtom\n"); + throw Exception(); + } + if ((GetFlags() & TrackRunFlags::pDataOffset) != 0) + { + m_dataOffset = static_cast(str.Read32()); + } + if ((GetFlags() & TrackRunFlags::pFirstSampleFlags) != 0) + { + m_firstSampleFlags = SampleFlags::Read(str); + } + + SampleDetails pDetails; + for (uint32_t i = 0; i < m_sampleNum; i++) + { + if (m_sampleDefaultsSet) + { + pDetails.version0.pDuration = m_sampleDefaults.defaultSampleDuration; + pDetails.version0.pSize = m_sampleDefaults.defaultSampleSize; + pDetails.version0.pFlags.flagsAsUInt = m_sampleDefaults.defaultSampleFlags.flagsAsUInt; + } + else + { + // these should never be used if right Atoms are present. + pDetails.version0.pDuration = 0; + pDetails.version0.pSize = 0; + pDetails.version0.pFlags.flagsAsUInt = 0; + } + + if ((GetFlags() & TrackRunFlags::pSampleDuration) != 0) + { + pDetails.version0.pDuration = str.Read32(); + } + + if ((GetFlags() & TrackRunFlags::pSampleSize) != 0) + { + pDetails.version0.pSize = str.Read32(); + } + + if ((GetFlags() & TrackRunFlags::pFirstSampleFlags) != 0) + { + pDetails.version0.pFlags.flagsAsUInt = m_firstSampleFlags.flagsAsUInt; + + if (i > 0) + { + pDetails.version0.pFlags.flags.sample_is_non_sync_sample = 1; + } + } + else if ((GetFlags() & TrackRunFlags::pSampleFlags) != 0) + { + pDetails.version0.pFlags = SampleFlags::Read(str); + } + + if ((GetFlags() & TrackRunFlags::pSampleCompTimeOffsets) != 0) + { + if (GetVersion() == 0) + { + pDetails.version0.pCompTimeOffset = str.Read32(); + } + else + { + pDetails.version1.pCompTimeOffset = static_cast(str.Read32()); + } + } + else + { + if (GetVersion() == 0) + { + pDetails.version0.pCompTimeOffset = 0; + } + else + { + pDetails.version1.pCompTimeOffset = 0; + } + } + m_sampleDetails.push_back(pDetails); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TrackRunAtom.h b/src/isolib/atoms/TrackRunAtom.h new file mode 100644 index 00000000..a7cd5b4f --- /dev/null +++ b/src/isolib/atoms/TrackRunAtom.h @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TrackRunAtom.h +//! \brief: Track Run Atom class +//! \detail: 'trun' Atom +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef TRACKRUNATOM_H +#define TRACKRUNATOM_H + +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" +#include "MovieFragDataTypes.h" + +VCD_MP4_BEGIN + +class TrackRunAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackRunAtom(uint8_t version = 0, std::uint32_t tr_flags = 0); + + //! + //! \brief Destructor + //! + virtual ~TrackRunAtom() = default; + + struct SampleConfig0 //!< Sample Configuratin v0 + { + uint32_t pDuration; + uint32_t pSize; + SampleFlags pFlags; + uint32_t pCompTimeOffset; + }; + + struct SampleConfig1 //!< Sample Configuratin v1 + { + uint32_t pDuration; + uint32_t pSize; + SampleFlags pFlags; + int32_t pCompTimeOffset; + }; + + union SampleDetails { //!< Sample Details + SampleConfig0 version0; + SampleConfig1 version1; + }; + + enum TrackRunFlags //!< Track Run Flags + { + pDataOffset = 0x000001, + pFirstSampleFlags = 0x000004, + pSampleDuration = 0x000100, + pSampleSize = 0x000200, + pSampleFlags = 0x000400, + pSampleCompTimeOffsets = 0x000800 + }; + + //! + //! \brief Set and Get function for m_sampleNum member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_sampleNum + //! m_sampleNum member in class + //! \param [in] SampleNum + //! m_sampleNum name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(uint32_t, m_sampleNum, SampleNum, const); + + //! + //! \brief Set Data Offset + //! + //! \param [in] const int32_t + //! Data Offset value + //! + //! \return void + //! + void SetDataOffset(const int32_t dataOffset); + + //! + //! \brief Get Data Offset + //! + //! \return int32_t + //! Data Offset + //! + int32_t GetDataOffset() const; + + //! + //! \brief Set First Sample Flags + //! + //! \param [in] const SampleFlags + //! First Sample Flags + //! + //! \return void + //! + void SetFirstSampleFlags(const SampleFlags firstSampleFlags); + + //! + //! \brief Get First Sample Flags + //! + //! \return SampleFlags + //! First Sample Flags + //! + SampleFlags GetFirstSampleFlags() const; + + //! + //! \brief Add Sample Details + //! + //! \param [in] SampleDetails + //! Sample Details + //! + //! \return void + //! + void AddSampleDetails(SampleDetails sampleDetails); + + //! + //! \brief Get First Sample Details + //! + //! \return const std::vector& + //! First Sample Details + //! + const std::vector& GetSampleDetails() const; + + //! + //! \brief Set Sample Defaults + //! + //! \param [in] SampleDefaults + //! Sample Defaults + //! + //! \return void + //! + void SetSampleDefaults(SampleDefaults& sampleDefaults); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + bool m_sampleDefaultsSet; //!< sample Defaults Set + SampleDefaults m_sampleDefaults; //!< Sample Defaults + uint32_t m_sampleNum; //!< sample Num + int32_t m_dataOffset; //!< data Offset + SampleFlags m_firstSampleFlags; //!< first Sample Flags + std::vector m_sampleDetails; //!< sample Details +}; + +VCD_MP4_END; +#endif /* TRACKRUNATOM_H */ diff --git a/src/isolib/atoms/TypeAtom.cpp b/src/isolib/atoms/TypeAtom.cpp new file mode 100644 index 00000000..bf1c79bb --- /dev/null +++ b/src/isolib/atoms/TypeAtom.cpp @@ -0,0 +1,190 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TypeAtom.cpp +//! \brief: TypeAtom class implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! +#include "TypeAtom.h" +#include +#include "FullAtom.h" + + +VCD_MP4_BEGIN + +template <> +BrandAtom::BrandAtom(FourCCInt AtomType) + : Atom(AtomType) + , m_majorBrand() + , m_minorVersion(0) + , m_compatibleBrands() +{ +} + +template <> +BrandAtom::BrandAtom(FourCCInt AtomType, + std::uint8_t version, + std::uint32_t flags) + : FullAtom(AtomType, version, flags) + , m_majorBrand() + , m_minorVersion(0) + , m_compatibleBrands() +{ +} + +template +void BrandAtom::SetMajorBrand(const std::string& majorBrand) +{ + m_majorBrand = majorBrand; +} + +template +const std::string& BrandAtom::GetMajorBrand() const +{ + return m_majorBrand; +} + +template +void BrandAtom::SetMinorVersion(uint32_t minorVersion) +{ + m_minorVersion = minorVersion; +} + +template +uint32_t BrandAtom::GetMinorVersion() const +{ + return m_minorVersion; +} + +template +void BrandAtom::AddCompatibleBrand(const std::string& compatibleBrand) +{ + m_compatibleBrands.push_back(compatibleBrand); +} + +template +std::vector BrandAtom::GetCompatibleBrands() const +{ + return m_compatibleBrands; +} + +template +bool BrandAtom::CheckCompatibleBrand(const std::string& brand) const +{ + if (find(m_compatibleBrands.begin(), m_compatibleBrands.end(), brand) != m_compatibleBrands.end()) + { + return true; + } + else + { + return false; + } +} + +template <> +void BrandAtom::WriteHeader(Stream& str) +{ + WriteAtomHeader(str); +} + +template <> +void BrandAtom::WriteHeader(Stream& str) +{ + WriteFullAtomHeader(str); +} + +template +void BrandAtom::ToStream(Stream& str) +{ + // Write the Atom header + WriteHeader(str); // write parent Atom + + // major_brand + str.WriteString(m_majorBrand); + // minor_version + str.Write32(m_minorVersion); + // compatible_brands[] + for (auto const& brand : m_compatibleBrands) + { + str.WriteString(brand); + } + + // Update the size field of this Atom in the bitstream + T::UpdateSize(str); +} + +template <> +void BrandAtom::ParseHeader(Stream& str) +{ + ParseAtomHeader(str); +} + +template <> +void BrandAtom::ParseHeader(Stream& str) +{ + ParseFullAtomHeader(str); +} + +template +void BrandAtom::FromStream(Stream& str) +{ + // Parse the Atom or fullAtom header + ParseHeader(str); + + // major_brand + str.ReadStringWithLen(m_majorBrand, 4); + // minor_version + m_minorVersion = str.Read32(); + // compatible_brands[] + while (str.BytesRemain() >= 4) + { + std::string compatibleBrand; + str.ReadStringWithLen(compatibleBrand, 4); + m_compatibleBrands.push_back(compatibleBrand); + } +} + +template class BrandAtom; +template class BrandAtom; + +FileTypeAtom::FileTypeAtom() +: BrandAtom("ftyp") +{ +} + +TrackTypeAtom::TrackTypeAtom() + : BrandAtom("ttyp", 0, 0) +{ +} + +SegmentTypeAtom::SegmentTypeAtom() + : BrandAtom("styp") +{ +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/TypeAtom.h b/src/isolib/atoms/TypeAtom.h new file mode 100644 index 00000000..68e30f5a --- /dev/null +++ b/src/isolib/atoms/TypeAtom.h @@ -0,0 +1,222 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: TypeAtom.h +//! \brief: TypeAtom definition +//! \detail: Contains brandAtom, FileTypeAtom, TrackTypeAtom, SegmentTypeAtom +//! +//! Created on October 14, 2019, 13:39 PM +//! +#ifndef _TYPEATOM_H_ +#define _TYPEATOM_H_ + +#include "Atom.h" +#include "Stream.h" +#include "FormAllocator.h" +#include "FullAtom.h" + +#include + +VCD_MP4_BEGIN + +template +class BrandAtom : public T +{ +public: + + //! + //! \brief Constructor + //! + BrandAtom(FourCCInt AtomType); + BrandAtom(FourCCInt AtomType, std::uint8_t version, std::uint32_t flags = 0); + + //! + //! \brief Destructor + //! + virtual ~BrandAtom() = default; + + //! + //! \brief Set MajorBrand + //! + //! \param [in] const std::string& + //! MajorBrand value + //! + //! \return void + //! + void SetMajorBrand(const std::string& value); + + //! + //! \brief Get MajorBrand + //! + //! \return const std::string& + //! MajorBrand + //! + const std::string& GetMajorBrand() const; + + //! + //! \brief Set MinorVersion + //! + //! \param [in] std::uint32_t + //! MinorVersion value + //! + //! \return void + //! + void SetMinorVersion(std::uint32_t version); + + //! + //! \brief Get MinorVersion + //! + //! \return uint32_t + //! MinorVersion + //! + uint32_t GetMinorVersion() const; + + //! + //! \brief Add Compatible Brand + //! + //! \param [in] const std::string& + //! Compatible Brand value + //! + //! \return void + //! + void AddCompatibleBrand(const std::string& value); + + //! + //! \brief Get CompatibleBrands + //! + //! \return std::vector + //! CompatibleBrands + //! + std::vector GetCompatibleBrands() const; + + //! + //! \brief Check CompatibleBrand existed + //! + //! \param [in] const std::string& + //! value + //! + //! \return bool + //! check result + //! + bool CheckCompatibleBrand(const std::string& value) const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + + //! + //! \brief Write Header information + //! + //! \param [in] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void WriteHeader(Stream& str); + + //! + //! \brief Parse Header information + //! + //! \param [in] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ParseHeader(Stream& str); + + std::string m_majorBrand; //!< Major Brand as string value + std::uint32_t m_minorVersion; //!< Minor version as an unsigned integer + std::vector m_compatibleBrands; //!< std::vector containing the Compatible Brands as strings +}; + + +class FileTypeAtom : public BrandAtom +{ +public: + + //! + //! \brief Constructor + //! + FileTypeAtom(); + + //! + //! \brief Destructor + //! + virtual ~FileTypeAtom() = default; + +}; + +class TrackTypeAtom : public BrandAtom +{ +public: + + //! + //! \brief Constructor + //! + TrackTypeAtom(); + + //! + //! \brief Constructor + //! + virtual ~TrackTypeAtom() = default; +}; + +class SegmentTypeAtom : public BrandAtom +{ +public: + + //! + //! \brief Constructor + //! + SegmentTypeAtom(); + + //! + //! \brief Constructor + //! + virtual ~SegmentTypeAtom() = default; +}; + +VCD_MP4_END; +#endif /* _TYPEATOM_H_ */ diff --git a/src/isolib/atoms/UriMetaSampEntryAtom.cpp b/src/isolib/atoms/UriMetaSampEntryAtom.cpp new file mode 100644 index 00000000..5e7e0921 --- /dev/null +++ b/src/isolib/atoms/UriMetaSampEntryAtom.cpp @@ -0,0 +1,220 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: UriMetaSampEntryAtom.cpp +//! \brief: UriMetaSampEntryAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "UriMetaSampEntryAtom.h" + +VCD_MP4_BEGIN + +UriAtom::UriAtom() + : FullAtom("uri ", 0, 0) + , m_uri() +{ +} + +void UriAtom::SetUri(const std::string& uri) +{ + m_uri = uri; +} + +const std::string& UriAtom::GetUri() const +{ + return m_uri; +} + +void UriAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.WriteZeroEndString(m_uri); + UpdateSize(str); +} + +void UriAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + str.ReadZeroEndString(m_uri); +} + +UriInitAtom::UriInitAtom() + : FullAtom("uriI", 0, 0) + , m_initAtomType(UriInitAtom::InitAtomMode::UNKNOWN) + , m_uriInitializationData() +{ +} + +UriInitAtom::InitAtomMode UriInitAtom::GetInitAtomMode() const +{ + return m_initAtomType; +} + +void UriInitAtom::SetInitAtomMode(UriInitAtom::InitAtomMode dataType) +{ + m_initAtomType = dataType; +} + +void UriInitAtom::SetUriInitializationData(const std::vector& uriInitData) +{ + m_initAtomType = InitAtomMode::UNKNOWN; + m_uriInitializationData = uriInitData; +} + +std::vector UriInitAtom::GetUriInitializationData() const +{ + return m_uriInitializationData; +} + +void UriInitAtom::ToStream(Stream& str) +{ + WriteFullAtomHeader(str); + str.WriteArray(m_uriInitializationData, static_cast(m_uriInitializationData.size())); + + UpdateSize(str); +} + +void UriInitAtom::FromStream(Stream& str) +{ + ParseFullAtomHeader(str); + + const uint64_t BytesRemain = str.BytesRemain(); + if (BytesRemain >= 8) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + m_uriInitializationData.clear(); + m_uriInitializationData.reserve(BytesRemain); + subBitstr.ReadArray(m_uriInitializationData, BytesRemain); + } +} + +UriMetaSampleEntryAtom::UriMetaSampleEntryAtom() + : MetaDataSampleEntryAtom("urim") + , m_uriAtom() + , m_vRMetaDataType(VRTMDType::UNKNOWN) + , m_hasUriInitAtom(false) + , m_uriInitAtom() +{ +} + +UriAtom& UriMetaSampleEntryAtom::GetUriAtom() +{ + return m_uriAtom; +} + +bool UriMetaSampleEntryAtom::HasUriInitAtom() +{ + return m_hasUriInitAtom; +} + +UriInitAtom& UriMetaSampleEntryAtom::GetUriInitAtom() +{ + return m_uriInitAtom; +} + +UriMetaSampleEntryAtom::VRTMDType UriMetaSampleEntryAtom::GetVRTMDType() const +{ + return m_vRMetaDataType; +} + +void UriMetaSampleEntryAtom::ToStream(Stream& str) +{ + MetaDataSampleEntryAtom::ToStream(str); + m_uriAtom.ToStream(str); + + if (m_hasUriInitAtom) + { + m_uriInitAtom.ToStream(str); + } + + UpdateSize(str); +} + +void UriMetaSampleEntryAtom::FromStream(Stream& str) +{ + MetaDataSampleEntryAtom::FromStream(str); + UriInitAtom::InitAtomMode initAtomType = UriInitAtom::InitAtomMode::UNKNOWN; + + bool uriFound = false; + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + if (AtomType == "uri ") + { + m_uriAtom.FromStream(subBitstr); + uriFound = true; + m_vRMetaDataType = VRTMDType::UNKNOWN; + } + else if (AtomType == "uriI") + { + m_hasUriInitAtom = true; + m_uriInitAtom.SetInitAtomMode(initAtomType); + m_uriInitAtom.FromStream(subBitstr); + } + // unsupported Atoms are skipped + } + if (!uriFound) + { + ISO_LOG(LOG_ERROR, "UriMetaSampleEntryAtom couldn't found URI Atom\n"); + throw Exception(); + } +} + +UriMetaSampleEntryAtom* UriMetaSampleEntryAtom::Clone() const +{ + UriMetaSampleEntryAtom* Atom = new UriMetaSampleEntryAtom(); + + auto mutableThis = const_cast(this); + + { + Stream bs; + mutableThis->ToStream(bs); + bs.Reset(); + Atom->FromStream(bs); + } + + return Atom; +} + +const Atom* UriMetaSampleEntryAtom::GetConfigurationAtom() const +{ + ISO_LOG(LOG_ERROR, "UriMetaSampleEntryAtom::GetConfigurationAtom() not impelmented \n"); + return nullptr; +} + +const DecoderConfigurationRecord* UriMetaSampleEntryAtom::GetConfigurationRecord() const +{ + ISO_LOG(LOG_ERROR, "UriMetaSampleEntryAtom::GetConfigurationRecord() not impelmented\n"); + return nullptr; +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/UriMetaSampEntryAtom.h b/src/isolib/atoms/UriMetaSampEntryAtom.h new file mode 100644 index 00000000..be9aef8b --- /dev/null +++ b/src/isolib/atoms/UriMetaSampEntryAtom.h @@ -0,0 +1,284 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: UriMetaSampEntryAtom.h +//! \brief: UriMetaSampEntryAtom class. +//! \detail: Uri Meta Sample Entry Atom +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _URIMETASAMPLEENTRY_H_ +#define _URIMETASAMPLEENTRY_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "MetaDataSampEntryAtom.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class UriAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + UriAtom(); + + //! + //! \brief Destructor + //! + virtual ~UriAtom() = default; + + //! + //! \brief Set Uri + //! + //! \param [in] const std::string& + //! Uri value + //! + //! \return void + //! + void SetUri(const std::string& uri); + + //! + //! \brief Get Uri + //! + //! \return const std::string& + //! Uri + //! + const std::string& GetUri() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::string m_uri; //!< the URI +}; + +class UriInitAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + UriInitAtom(); + + //! + //! \brief Destructor + //! + virtual ~UriInitAtom() = default; + + enum class InitAtomMode //!< Init Atom Mode + { + UNKNOWN = 0 + }; + + //! + //! \brief Get Init Atom Mode + //! + //! \return InitAtomMode + //! Init Atom Mode + //! + InitAtomMode GetInitAtomMode() const; + + //! + //! \brief Set Init Atom Mode + //! + //! \param [in] InitAtomMode + //! Init Atom Mode value + //! + //! \return void + //! + void SetInitAtomMode(InitAtomMode dataType); + + //! + //! \brief Set Uri Initialization Data + //! + //! \param [in] const std::vector& + //! uri Init Data + //! + //! \return void + //! + void SetUriInitializationData(const std::vector& uriInitData); + + //! + //! \brief Get Uri Initialization Data + //! + //! \return std::vector + //! Uri Initialization Data + //! + std::vector GetUriInitializationData() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + InitAtomMode m_initAtomType; //!< Init Atom Mode + std::vector m_uriInitializationData; //!< uri Initialization Data +}; + +class UriMetaSampleEntryAtom : public MetaDataSampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + UriMetaSampleEntryAtom(); + + //! + //! \brief Destructor + //! + virtual ~UriMetaSampleEntryAtom() = default; + + enum class VRTMDType //!< VRTMD Type + { + UNKNOWN = 0 + }; + + //! + //! \brief Get UriAtom + //! + //! \return UriAtom& + //! UriAtom + //! + UriAtom& GetUriAtom(); + + //! + //! \brief has Uri init Atom + //! + //! \return bool + //! has or not + //! + bool HasUriInitAtom(); + + //! + //! \brief get Uri init Atom + //! + //! \return UriInitAtom& + //! Uri Init Atom + //! + UriInitAtom& GetUriInitAtom(); + + //! + //! \brief get VRTMD Type + //! + //! \return VRTMDType + //! VRTMD Type + //! + VRTMDType GetVRTMDType() const; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief Get Copy of UriMetaSampleEntryAtom + //! + //! \return UriMetaSampleEntryAtom* + //! UriMetaSampleEntry Atom + //! + virtual UriMetaSampleEntryAtom* Clone() const; + + //! + //! \brief Get ConfigurationRecord + //! + //! \return const DecoderConfigurationRecord* + //! DecoderConfigurationRecord value + //! + virtual const DecoderConfigurationRecord* GetConfigurationRecord() const override; + + //! + //! \brief Get Configuration Atom + //! + //! \return const Atom* + //! Configuration Atom + //! + virtual const Atom* GetConfigurationAtom() const override; + +private: + UriAtom m_uriAtom; //!< Uri Atom + VRTMDType m_vRMetaDataType; //!< VRTMD Type + bool m_hasUriInitAtom; //!< has Uri Init Atom + UriInitAtom m_uriInitAtom; //!< Uri Init Atom +}; + +VCD_MP4_END; +#endif /* _URIMETASAMPLEENTRY_H_ */ diff --git a/src/isolib/atoms/UserDataAtom.cpp b/src/isolib/atoms/UserDataAtom.cpp new file mode 100644 index 00000000..06cd3ca4 --- /dev/null +++ b/src/isolib/atoms/UserDataAtom.cpp @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: UserDataAtom.cpp +//! \brief: UserDataAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "UserDataAtom.h" +#include "Stream.h" + +VCD_MP4_BEGIN + +UserDataAtom::UserDataAtom() + : Atom("udta") +{ +} + +/** @brief Retrieve the Atoms inside the Atom **/ +bool UserDataAtom::GetAtom(Atom& atom) const +{ + FourCCInt AtomType = atom.GetType(); + auto bsIt = m_bitStreams.find(AtomType); + if (bsIt != m_bitStreams.end()) + { + // Copy as Atom::FromStreamAtom takes a non-const argument + Stream bs = bsIt->second; + atom.FromStream(bs); + return true; + } + else + { + return false; + } +} + +void UserDataAtom::AddAtom(Atom& atom) +{ + Stream bitstream; + atom.ToStream(bitstream); + bitstream.Reset(); + m_bitStreams[atom.GetType()] = std::move(bitstream); +} + +void UserDataAtom::ToStream(Stream& str) +{ + WriteAtomHeader(str); + + for (auto& typeAndBitstream : m_bitStreams) + { + str.WriteStream(typeAndBitstream.second); + } + + UpdateSize(str); +} + +void UserDataAtom::FromStream(Stream& str) +{ + ParseAtomHeader(str); + + while (str.BytesRemain() > 0) + { + FourCCInt AtomType; + Stream subBitstr = str.ReadSubAtomStream(AtomType); + + m_bitStreams[AtomType] = std::move(subBitstr); + } +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/UserDataAtom.h b/src/isolib/atoms/UserDataAtom.h new file mode 100644 index 00000000..18abd0ce --- /dev/null +++ b/src/isolib/atoms/UserDataAtom.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: UserDataAtom.h +//! \brief: User Data Atom class +//! \detail: User Data Atom definition +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _USERDATAATOM_H_ +#define _USERDATAATOM_H_ + +#include +#include +#include "Atom.h" +#include "FormAllocator.h" + +VCD_MP4_BEGIN + +class UserDataAtom : public Atom +{ +public: + + //! + //! \brief Constructor + //! + UserDataAtom(); + + //! + //! \brief Destructor + //! + virtual ~UserDataAtom() = default; + + //! + //! \brief Get Atom + //! + //! \param [in] Atom& + //! atom + //! + //! \return bool + //! success or not + //! + bool GetAtom(Atom& Atom) const; + + //! + //! \brief Add Atom + //! + //! \param [in] Atom& + //! atom + //! + //! \return void + //! + void AddAtom(Atom& Atom); + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + +private: + std::map m_bitStreams; //!< bitStreams +}; + +VCD_MP4_END; +#endif /* _USERDATAATOM_H_ */ diff --git a/src/isolib/atoms/VideoMediaHeaderAtom.cpp b/src/isolib/atoms/VideoMediaHeaderAtom.cpp new file mode 100644 index 00000000..52994244 --- /dev/null +++ b/src/isolib/atoms/VideoMediaHeaderAtom.cpp @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: VideoMediaHeaderAtom.cpp +//! \brief: VideoMediaHeaderAtom class implementation +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#include "VideoMediaHeaderAtom.h" +#include "Stream.h" + +VCD_MP4_BEGIN + +VideoMediaHeaderAtom::VideoMediaHeaderAtom() + : FullAtom("vmhd", 0, 1) +{ +} + +void VideoMediaHeaderAtom::ToStream(Stream& str) +{ + // Write Atom headers + WriteFullAtomHeader(str); + + str.Write16(0); // graphicsmode = 0 + str.Write16(0); // opcolor = {0, 0, 0} + str.Write16(0); + str.Write16(0); + + // Update the size of the movie Atom + UpdateSize(str); +} + +void VideoMediaHeaderAtom::FromStream(Stream& str) +{ + Stream subBitstr; + + // First parse the Atom header + ParseFullAtomHeader(str); + + str.Read16(); // graphicsmode = 0 + str.Read16(); // opcolor = {0, 0, 0} + str.Read16(); + str.Read16(); +} + +VCD_MP4_END \ No newline at end of file diff --git a/src/isolib/atoms/VideoMediaHeaderAtom.h b/src/isolib/atoms/VideoMediaHeaderAtom.h new file mode 100644 index 00000000..220db727 --- /dev/null +++ b/src/isolib/atoms/VideoMediaHeaderAtom.h @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: VideoMediaHeaderAtom.h +//! \brief: VideoMediaHeaderAtom class. +//! \detail: 'vmhd' Atom +//! +//! Created on October 16, 2019, 13:39 PM +//! + +#ifndef _VIDEOMEDIAHEADERATOM_H_ +#define _VIDEOMEDIAHEADERATOM_H_ + +#include "FormAllocator.h" +#include "FullAtom.h" + +VCD_MP4_BEGIN + +class VideoMediaHeaderAtom : public FullAtom +{ +public: + + //! + //! \brief Constructor + //! + VideoMediaHeaderAtom(); + + //! + //! \brief Destructor + //! + virtual ~VideoMediaHeaderAtom() = default; + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + void FromStream(Stream& str); +}; + +VCD_MP4_END; +#endif /* _VIDEOMEDIAHEADERATOM_H_ */ diff --git a/src/isolib/atoms/VisualSampEntryAtom.cpp b/src/isolib/atoms/VisualSampEntryAtom.cpp new file mode 100644 index 00000000..076dc74f --- /dev/null +++ b/src/isolib/atoms/VisualSampEntryAtom.cpp @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: VisualSampEntryAtom.cpp +//! \brief: VisualSampEntryAtom class implementation +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#include "VisualSampEntryAtom.h" +#include "CleanApertureAtom.h" + + +#include +#include +#include + +VCD_MP4_BEGIN + +/// Length of compressorname string in VisualSampleEntry class +static const unsigned int COMPRESSORNAME_STRING_LENGTH = 31; + +VisualSampleEntryAtom::VisualSampleEntryAtom(FourCCInt codingName, const std::string& compressorName) + : SampleEntryAtom(codingName) + , m_width(0) + , m_height(0) + , m_compressorName(compressorName) + , m_clap(nullptr) +{ + assert(m_compressorName.length() <= COMPRESSORNAME_STRING_LENGTH); + m_compressorName.resize(COMPRESSORNAME_STRING_LENGTH, '\0'); // Make fixed length +} + +VisualSampleEntryAtom::VisualSampleEntryAtom(const VisualSampleEntryAtom& Atom) + : SampleEntryAtom(Atom) + , m_width(Atom.m_width) + , m_height(Atom.m_height) + , m_compressorName(Atom.m_compressorName) + , m_clap(Atom.m_clap) +{ + assert(m_compressorName.length() <= COMPRESSORNAME_STRING_LENGTH); + m_compressorName.resize(COMPRESSORNAME_STRING_LENGTH, '\0'); // Make fixed length +} + +void VisualSampleEntryAtom::CreateClap() +{ + if (m_clap.get() == nullptr) + { + m_clap = MakeShared(); + } +} + +const CleanApertureAtom* VisualSampleEntryAtom::GetClap() const +{ + return m_clap.get(); +} + +CleanApertureAtom* VisualSampleEntryAtom::GetClap() +{ + return m_clap.get(); +} + +void VisualSampleEntryAtom::ToStream(Stream& str) +{ + SampleEntryAtom::ToStream(str); + + str.Write16(0); // pre_defined = 0 + str.Write16(0); // reserved = 0 + str.Write32(0); // reserved = 0 + str.Write32(0); // reserved = 0 + str.Write32(0); // reserved = 0 + str.Write16(m_width); // width + str.Write16(m_height); // height + str.Write32(0x00480000); // horizresolution 72 dpi + str.Write32(0x00480000); // vertresolution 72 dpi + str.Write32(0); // reserved = 0 + str.Write16(1); // frame_count = 1 + + assert(m_compressorName.length() <= COMPRESSORNAME_STRING_LENGTH); + + // Length-byte prefixed compressor name padded to 32 bytes total. + str.Write8(static_cast(m_compressorName.length())); + m_compressorName.resize(COMPRESSORNAME_STRING_LENGTH, '\0'); // Make fixed length + str.WriteString(m_compressorName); // Write entire string buffer, including padding zeros + + str.Write16(0x0018); // depth + str.Write16(static_cast(int16_t(-1))); // pre_defined + + auto rinfAtom = GetRestrictedSchemeInfoAtom(); + if (rinfAtom != nullptr) + { + str.SetByte(4, 'r'); + str.SetByte(5, 'e'); + str.SetByte(6, 's'); + str.SetByte(7, 'v'); + + rinfAtom->ToStream(str); + } + + // Update the size of the movie Atom + UpdateSize(str); +} + +void VisualSampleEntryAtom::FromStream(Stream& str) +{ + SampleEntryAtom::FromStream(str); + + str.Read16(); // pre_defined + str.Read16(); // reserved + str.Read32(); // predefined + str.Read32(); // predefined + str.Read32(); // predefined + m_width = str.Read16(); // width + m_height = str.Read16(); // height + str.Read32(); // horizontal resolution + str.Read32(); // vertical resolution + str.Read32(); // reserved + str.Read16(); // frame_count + + const uint8_t compressorNameLength = str.Read8(); + if (compressorNameLength > COMPRESSORNAME_STRING_LENGTH) + { + ISO_LOG(LOG_ERROR, "Too long compressorname string length read from VisualSampleEntry (>31 bytes).\n"); + throw Exception(); + } + std::string codecName; + str.ReadStringWithLen(codecName, compressorNameLength); // compressor name + for (unsigned int i = compressorNameLength; i < COMPRESSORNAME_STRING_LENGTH; ++i) + { + str.Read8(); // discard padding + } + + str.Read16(); // depth + str.Read16(); // pre_defined + + // Read the optional clap Atom, if present + if (str.BytesRemain() > 8) + { + const uint64_t startOffset = str.GetPos(); + FourCCInt AtomType; + Stream subStream = str.ReadSubAtomStream(AtomType); + if (AtomType == "clap") + { + const auto clap = MakeShared(); + clap->FromStream(subStream); + m_clap = clap; + } + else + { + // It was not 'clap', so the contained Atom probably belongs to the Atom Atom extending VisualSampleEntryAtom. + // Reset bitstream position so it will be possible to read the whole extending Atom. + str.SetPos(startOffset); + } + } +} + +bool VisualSampleEntryAtom::IsVisual() const +{ + return true; +} + +VCD_MP4_END diff --git a/src/isolib/atoms/VisualSampEntryAtom.h b/src/isolib/atoms/VisualSampEntryAtom.h new file mode 100644 index 00000000..6d04d2ef --- /dev/null +++ b/src/isolib/atoms/VisualSampEntryAtom.h @@ -0,0 +1,201 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: VisualSampEntryAtom.h +//! \brief: VisualSampEntryAtom class. +//! \detail: Visual Sample Entry Atom definition +//! +//! Created on October 15, 2019, 13:39 PM +//! + +#ifndef _VISUALSAMPLEENTRYATOM_H_ +#define _VISUALSAMPLEENTRYATOM_H_ + +#include "Stream.h" +#include "FormAllocator.h" +#include "SampEntryAtom.h" + +VCD_MP4_BEGIN + +class CleanApertureAtom; +class Stereoscopic3D; +class SphericalVideoV2Atom; + +class VisualSampleEntryAtom : public SampleEntryAtom +{ +public: + + //! + //! \brief Constructor + //! + VisualSampleEntryAtom(FourCCInt codingName, const std::string& compressorName); + + VisualSampleEntryAtom(const VisualSampleEntryAtom& Atom); + + VisualSampleEntryAtom& operator=(const VisualSampleEntryAtom&) = default; + + //! + //! \brief Destructor + //! + virtual ~VisualSampleEntryAtom() = default; + + //! + //! \brief Set and Get function for m_width member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_width + //! m_width member in class + //! \param [in] Width + //! m_width name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_width, Width, const); + + //! + //! \brief Set and Get function for m_height member + //! + //! \param [in] std::uint32_t + //! value to set + //! \param [in] m_height + //! m_height member in class + //! \param [in] Height + //! m_height name in class + //! \param [in] const or blank + //! return const type or not + //! + //! \return void + //! + MEMBER_SETANDGET_FUNC_WITH_OPTION(std::uint32_t, m_height, Height, const); + + //! + //! \brief Create Clap + //! + void CreateClap(); + + //! + //! \brief Get Clap + //! + //! \return const CleanApertureAtom* + //! Clap + //! + const CleanApertureAtom* GetClap() const; + + //! + //! \brief Get Clap + //! + //! \return CleanApertureAtom* + //! Clap + //! + CleanApertureAtom* GetClap(); + + //! + //! \brief Get Stereoscopic3DAtom + //! + //! \return const Stereoscopic3D* + //! Stereoscopic3D + //! + virtual const Stereoscopic3D* GetStereoscopic3DAtom() const + { + return nullptr; + } + + //! + //! \brief is Stereoscopic3DAtom Present or not + //! + //! \return bool + //! is or not + //! + bool IsStereoscopic3DAtomPresent() const + { + bool value = (const_cast(this)->GetStereoscopic3DAtom() != nullptr); + return value; + } + + //! + //! \brief Get SphericalVideoV2 atom + //! + //! \return const SphericalVideoV2Atom* + //! SphericalVideoV2Atom + //! + virtual const SphericalVideoV2Atom* GetSphericalVideoV2Atom() const + { + return nullptr; + } + + //! + //! \brief is SphericalVideoV2 atom Present or not + //! + //! \return bool + //! is or not + //! + bool IsSphericalVideoV2AtomAtomPresent() const + { + bool value = (const_cast(this)->GetSphericalVideoV2Atom() != nullptr); + return value; + } + + //! + //! \brief Write atom information to stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void ToStream(Stream& str); + + //! + //! \brief Parse atom information from stream + //! + //! \param [in,out] Stream& + //! bitstream that contains atom information + //! + //! \return void + //! + virtual void FromStream(Stream& str); + + //! + //! \brief is virtual or not + //! + //! \return bool + //! is or not + //! + virtual bool IsVisual() const override; + +private: + std::uint16_t m_width; //!< Sample display width + std::uint16_t m_height; //!< Sample display height + std::string m_compressorName; //!< Compressor name used, e.g. "HEVC Coding" + std::shared_ptr m_clap; //!< Clean Aperture data structure +}; + +VCD_MP4_END; +#endif /* _VISUALSAMPLEENTRYATOM_H_ */ diff --git a/src/isolib/common/ISOLog.cpp b/src/isolib/common/ISOLog.cpp new file mode 100644 index 00000000..272728fa --- /dev/null +++ b/src/isolib/common/ISOLog.cpp @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ISOLog.cpp +//! \brief: Include the log function implementation +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#include "ISOLog.h" + + +LogFunction logCallBack = GlogFunction; diff --git a/src/isolib/common/ISOLog.h b/src/isolib/common/ISOLog.h new file mode 100644 index 00000000..1b482789 --- /dev/null +++ b/src/isolib/common/ISOLog.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: ISOLog.h +//! \brief: Include the log function declaration +//! +//! Created on April 30, 2019, 6:04 AM +//! + +#ifndef _ISOLOG_H_ +#define _ISOLOG_H_ + +#include "../../utils/Log.h" + +//global logging callback function +extern LogFunction logCallBack; + +#define FILE_NAME(x) (strrchr(x, '/') ? strrchr(x, '/')+1:x) + +#define PRINT_LOG(logLevel, source, line, fmt, args...) \ + logCallBack(logLevel, source, line, fmt, ##args); \ + +#define ISO_LOG(logLevel, fmt, args...) \ + PRINT_LOG(logLevel, FILE_NAME(__FILE__), __LINE__, fmt, ##args) \ + +#endif /* _ISOLOG_H_ */ diff --git a/src/isolib/dash_parser/CMakeLists.txt b/src/isolib/dash_parser/CMakeLists.txt new file mode 100644 index 00000000..a4720745 --- /dev/null +++ b/src/isolib/dash_parser/CMakeLists.txt @@ -0,0 +1,37 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) + +option(USE_ANDROID_NDK + "Use android ndk" + OFF +) + +PROJECT(dashparser) + +AUX_SOURCE_DIRECTORY(../atoms DIR_ATOMS_SRCS) +AUX_SOURCE_DIRECTORY(../common DIR_COMMON_SRCS) +AUX_SOURCE_DIRECTORY(. DIR_DASHPARSER_SRCS) + +if(NOT USE_ANDROID_NDK) +ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -z noexecstack -z relro -z now -fstack-protector-strong + -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wl,-S -Wall -Werror") +else() +ADD_DEFINITIONS("-g -c -fPIC -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -fPIE -fPIC -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wall") +endif() + +INCLUDE_DIRECTORIES(/usr/local/include ../include ../atoms ../common) + +LINK_DIRECTORIES(/usr/local/lib ../atoms ../common) + +set(DIR_DASHPARSER_SRCS + ${DIR_DASHPARSER_SRCS} + ${DIR_ATOMS_SRCS} + ${DIR_COMMON_SRCS} + ) + +ADD_LIBRARY(dashparser STATIC ${DIR_DASHPARSER_SRCS}) + +TARGET_LINK_LIBRARIES(dashparser glog) diff --git a/src/isolib/dash_parser/Mp4DataTypes.cpp b/src/isolib/dash_parser/Mp4DataTypes.cpp new file mode 100644 index 00000000..2eb5de02 --- /dev/null +++ b/src/isolib/dash_parser/Mp4DataTypes.cpp @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4DataTypes.cpp +//! \brief: Mp4 file related data types operation implementation +//! + +#define _SCL_SECURE_NO_WARNINGS + +#include + +#include "Mp4DataTypes.h" +#include "../atoms/Stream.h" +#include "../atoms/FormAllocator.h" + +using namespace std; + +VCD_MP4_BEGIN + +template +VarLenArray::~VarLenArray() +{ + // CUSTOM_DELETE_ARRAY(arrayElets, T); + if (NULL != arrayElets) + { + delete []arrayElets; + arrayElets = NULL; + } +} + +template +VarLenArray::VarLenArray() + : size(0) + , arrayElets(nullptr) +{ +} + +template +VarLenArray::VarLenArray(size_t n) + : size(n) + , arrayElets(new T[n]) +{ +} + +template +VarLenArray::VarLenArray(const VarLenArray& other) + : size(other.size) + , arrayElets(new T[other.size]) +{ + copy(other.arrayElets, other.arrayElets + other.size, arrayElets); +} + +template +VarLenArray& VarLenArray::operator=(const VarLenArray& other) +{ + if (this != &other) + { + // CUSTOM_DELETE_ARRAY(arrayElets, T); + if (NULL != arrayElets) + { + delete []arrayElets; + arrayElets = NULL; + } + size = other.size; + arrayElets = new T[size]; + copy(other.arrayElets, other.arrayElets + other.size, arrayElets); + } + return *this; +} + +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; +template struct VarLenArray; + +VCD_MP4_END diff --git a/src/isolib/dash_parser/Mp4DataTypes.h b/src/isolib/dash_parser/Mp4DataTypes.h new file mode 100644 index 00000000..c44b1acf --- /dev/null +++ b/src/isolib/dash_parser/Mp4DataTypes.h @@ -0,0 +1,792 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4DataTypes.h +//! \brief: Mp4 file related information definition +//! \detail: Define related information about mp4 file, like +//! track description and so on. +//! + +#ifndef _MP4DATATYPES_H_ +#define _MP4DATATYPES_H_ + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "../include/Common.h" +#include "../include/Index.h" +#include "../atoms/FormAllocator.h" + +#include "../atoms/Atom.h" +#include "../atoms/RestSchemeInfoAtom.h" +#include "../atoms/DecPts.h" +#include "../atoms/TypeAtom.h" + +#include "Mp4StreamIO.h" +#include "../atoms/TrackExtAtom.h" +#include "../atoms/TrackRefTypeAtom.h" + +//#ifndef _SCL_SECURE_NO_WARNINGS +//#define _SCL_SECURE_NO_WARNINGS +//#endif + +using namespace std; + +VCD_MP4_BEGIN + +template +struct VarLenArray +{ + typedef T valType; + typedef T* iterator; + typedef const T* constIter; + typedef size_t sizeType; + typedef ptrdiff_t diffType; + size_t size; + T* arrayElets; + VarLenArray(); + VarLenArray(size_t n); + VarLenArray(const VarLenArray& other); + VarLenArray& operator=(const VarLenArray& other); + inline T& operator[](size_t index) + { + return arrayElets[index]; + } + inline const T& operator[](size_t index) const + { + return arrayElets[index]; + } + inline T* GetBegin() + { + return arrayElets; + } + inline T* GetEnd() + { + return arrayElets + size; + } + inline const T* GetBegin() const + { + return arrayElets; + } + inline const T* GetEnd() const + { + return arrayElets + size; + } + ~VarLenArray(); +}; + +enum SampleFrameType +{ + OUT_REF_PIC, + OUT_NONREF_PIC, + NON_OUT_REFPIC, + DISPLAY_PIC, + SAMPLES_PIC, +}; + +struct ChannelLayout +{ + uint8_t speakerPosition; + int16_t azimuth; + int8_t elevation; +}; + +struct ChnlProperty +{ + uint8_t strStruct; + uint8_t layOut; + uint64_t omitChnlMap; + uint8_t objCnt; + uint16_t chnlCnt; + VarLenArray chnlLayOuts; +}; + +enum class OmniRWPKType : uint8_t +{ + OMNI_RECTANGULAR = 0 +}; + +struct RectRWPKRegion +{ + uint8_t transformType; + bool guardBandFlag; + uint32_t projRegWidth; + uint32_t projRegHeight; + uint32_t projRegTop; + uint32_t projRegLeft; + uint16_t packedRegWidth; + uint16_t packedRegHeight; + uint16_t packedRegTop; + uint16_t packedRegLeft; + + uint8_t leftGbWidth; + uint8_t rightGbWidth; + uint8_t topGbHeight; + uint8_t bottomGbHeight; + bool gbNotUsedForPredFlag; + uint8_t gbType0; + uint8_t gbType1; + uint8_t gbType2; + uint8_t gbType3; +}; + +struct RWPKRegion +{ + bool guardBandFlag; + OmniRWPKType packingType; + union Region { + RectRWPKRegion rectReg; + } region; +}; + +struct RWPKProperty +{ + bool constituentPictureMatching = false; + uint32_t projPicWidth; + uint32_t projPicHeight; + uint16_t packedPicWidth; + uint16_t packedPicHeight; + VarLenArray regions; +}; + +struct COVIInformation +{ + COVIShapeType coviShapeType; + bool viewIdcPresenceFlag; + OmniViewIdc defaultViewIdc; + VarLenArray sphereRegions; +}; + +struct SpatialAudioProperty +{ + uint8_t version; + uint8_t ambisonicType; + uint32_t ambisonicOrder; + uint8_t ambisonicChnlSeq; + uint8_t ambisonicNorml; + VarLenArray chnlMap; +}; + +enum class OmniStereoScopic3D : uint8_t +{ + OMNI_MONOSCOPIC = 0, + OMNI_STEREOSCOPIC_TOP_BOTTOM = 1, + OMNI_STEREOSCOPIC_LEFT_RIGHT = 2, + OMNI_STEREOSCOPIC_STEREO_CUSTOM = 3 +}; + +struct RotateDegree1616FP +{ + int32_t fixedPntYaw; + int32_t fixedPntPitch; + int32_t fixedPntRoll; +}; + +struct CubemapProjectionSetting +{ + uint32_t layout; + uint32_t padding; +}; + +struct EquirectangularProjectionSetting +{ + uint32_t topBoundFP; + uint32_t btmBoundFP; + uint32_t leftBoundFP; + uint32_t rightBoundFP; +}; + +enum class OmniProjType : uint8_t +{ + OMNI_UNKOWN = 0, + OMNI_CUBEMAP = 1, + OMNI_ERP = 2, + OMNI_MESH = 3 +}; + +struct SphericalVideoV2Property +{ + RotateDegree1616FP rotateDegree; + OmniProjType projType; + union Projection { + CubemapProjectionSetting cubemap; + EquirectangularProjectionSetting equirectangular; + } projection; +}; + +struct SphericalVideoV1Property +{ + bool isSphe; + bool isStitched; + OmniProjType projType; + uint32_t srcCnt; + + RotateDegree1616FP initView; + uint64_t timestamp; + uint32_t panoWidth; + uint32_t panoHeight; + uint32_t croppedWidth; + uint32_t croppedHeight; + uint32_t croppedLeftW; + uint32_t croppedTopH; +}; + +enum MediaCodecInfoType +{ + AVC_SPS = 7, + AVC_PPS = 8, + HEVC_VPS = 32, + HEVC_SPS = 33, + HEVC_PPS = 34, + AudioSpecificConfig +}; + +struct MediaCodecSpecInfo +{ + MediaCodecInfoType codecSpecInfoType; + VarLenArray codecSpecInfoBits; + + MediaCodecSpecInfo() : codecSpecInfoType(HEVC_VPS) {}; +}; + +struct TStampID +{ + uint64_t timeStamp; + uint32_t itemId; +}; + +enum FeatureOfFile +{ + CONTAIN_ALT_TRACKS = 1u << 3 +}; + +typedef uint32_t FeatureBitMask; + +struct FileFeaturesInfo +{ + FeatureBitMask features; +}; + +enum FeatureOfTrack +{ + IsVideo = 1u, + IsAudio = 1u << 1, + IsMetadata = 1u << 2, + HasAlternatives = 1u << 3, + HasSampleGroups = 1u << 4, + HasAssociatedDepthTrack = 1u << 5 +}; + +enum ImmersiveProperty +{ + IsAudioLSpeakerChnlStructTrack = 1u << 2, + IsVRSpatialAudioTrack = 1u << 8, + IsVRNonDiegeticAudioTrack = 1u << 9, + HasVRStereoscopic3D = 1u << 12, + HasVRV1SpericalVideo = 1u << 13, + HasVRV2SpericalVideo = 1u << 14, +}; + +struct TypeToTrackIDs +{ + FourCC type; + VarLenArray trackIds; +}; + +enum FrameCodecType +{ + OUTPUT_NONREF_FRAME, + OUTPUT_REF_FRAME, + NON_OUTPUT_REF_FRAME +}; + +struct TrackSampInfo +{ + uint32_t sampleId; + FourCC sampleEntryType; + uint32_t sampleDescriptionIndex; + FrameCodecType sampleType; + uint32_t initSegmentId; + uint32_t segmentId; + uint64_t earliestTStamp; + FlagsOfSample sampleFlags; + uint64_t sampleDurationTS; + uint64_t earliestTStampTS; +}; + +struct RatValue +{ + uint64_t num; + uint64_t den; +}; + +struct TrackTypeInformation +{ + FourCC majorBrand; + uint32_t minorVersion; + VarLenArray compatibleBrands; +}; + +struct TrackInformation +{ + uint32_t initSegmentId; + uint32_t trackId; + uint32_t alternateGroupId; + FeatureBitMask features; + FeatureBitMask vrFeatures; + VarLenArray trackURI; + VarLenArray alternateTrackIds; + VarLenArray referenceTrackIds; + VarLenArray trackGroupIds; + VarLenArray sampleProperties; + uint32_t maxSampleSize; + uint32_t timeScale; + RatValue frameRate; + bool hasTypeInformation; + TrackTypeInformation type; + + TrackInformation() : + initSegmentId(0), trackId(0), alternateGroupId(0), features(0), + vrFeatures(0), maxSampleSize(0), timeScale(1), hasTypeInformation(false) {}; +}; + +struct SegInfo +{ + uint32_t segmentId; + uint32_t referenceId; + uint32_t timescale; + bool referenceType; + uint64_t earliestPTSinTS; + uint32_t durationInTS; + uint64_t startDataOffset; + uint32_t dataSize; + bool startsWithSAP; + uint8_t SAPType; +}; + +struct SchemeType +{ + FourCC type; + uint32_t version; + VarLenArray uri; +}; + +struct SchemeTypesProperty +{ + SchemeType mainScheme; + VarLenArray compatibleSchemeTypes; +}; + +struct ProjFormat +{ + OmniProjFormat format; +}; + +struct ChnlPropertyInternal +{ + uint8_t strStruct; + uint8_t layOut; + uint64_t omitChnlMap; + uint8_t objCnt; + uint16_t chnlCnt; + vector chnlLayOuts; +}; + +struct RWPKPropertyInternal +{ + bool constituentPicMatching; + uint32_t projPicWidth; + uint32_t projPicHeight; + uint16_t packedPicWidth; + uint16_t packedPicHeight; + vector regions; +}; + +struct COVIInformationInternal +{ + COVIShapeType coviShapeType; + bool viewIdcPresenceFlag; + OmniViewIdc defaultViewIdc; + vector sphereRegions; +}; + +struct SA3DPropertyInternal +{ + uint8_t version; + uint8_t ambisonicType; + uint32_t ambisonicOrder; + uint8_t ambisonicChnlSeq; + uint8_t ambisonicNorml; + vector chnlMap; +}; + +struct SchemeTypesPropertyInternal +{ + SchemeTypeAtom mainScheme; + vector compatibleSchemes; +}; + +struct SampleRes +{ + uint32_t width; + uint32_t height; +}; + +// Convenience types +class SegmentIdTag +{ +}; +class InitSegmentIdTag +{ +}; +class ContextIdTag +{ +}; +class ItemIdTag +{ +}; +class SmpDesIndexTag +{ +}; +class SequenceTag +{ +}; + +typedef Index SegmentId; +typedef Index InitSegmentId; +typedef IndexExplicit ContextId; +typedef Index SmpDesIndex; +typedef Index Sequence; +typedef uint64_t TStamp; +typedef IndexCalculation ItemId; +typedef std::vector> DecodingOrderVector; +typedef std::vector IdVector; +typedef VectorT ContextIdVector; +typedef std::vector DataVector; +typedef std::vector TStampVector; +typedef std::map TStampMap; +typedef std::map TypeToIdsMap; +typedef std::map TypeToCtxIdsMap; +typedef std::map> GroupingMap; +typedef std::map ParameterSetMap; +typedef VarLenArray SegmentIndex; + +typedef pair InitSegmentTrackId; +typedef pair SegmentTrackId; + +class FileProperty +{ +public: + FileProperty() + : m_trackId(0){}; + // typedef Set FilePropertySet; + typedef set FilePropertySet; + + bool HasProperty(FeatureOfFile feature) const + { + return m_filePro.count(feature) != 0; + } + void SetProperty(FeatureOfFile feature) + { + m_filePro.insert(feature); + } + uint32_t GetTrackIndex() const + { + return m_trackId; + } + void SetTrackIndex(uint32_t id) + { + m_trackId = id; + } + unsigned int GetFeatureMask() const + { + unsigned int mask = 0; + for (auto set : m_filePro) + { + mask |= (unsigned int) set; + } + return mask; + } + +private: + uint32_t m_trackId; + FilePropertySet m_filePro; +}; + +class TrackProperty +{ +public: + // typedef Set TrackPropertySet; + // typedef Set TrackVRFeatureSet; + typedef set TrackPropertySet; + typedef set TrackVRFeatureSet; + + bool HasProperty(FeatureOfTrack feature) const + { + return m_trackProSet.count(feature) != 0; + } + void SetProperty(FeatureOfTrack feature) + { + m_trackProSet.insert(feature); + } + bool HasImmersiveProperty(ImmersiveProperty vrFeature) const + { + return m_trackVRFeaSet.count(vrFeature) != 0; + } + void SetImmersiveProperty(ImmersiveProperty vrFeature) + { + m_trackVRFeaSet.insert(vrFeature); + } + unsigned int GetFeatureMask() const + { + unsigned int mask = 0; + for (auto set : m_trackProSet) + { + mask |= (unsigned int) set; + } + return mask; + } + unsigned int GetVRFeatureMask() const + { + unsigned int mask = 0; + for (auto set : m_trackVRFeaSet) + { + mask |= (unsigned int) set; + } + return mask; + } + +private: + TrackPropertySet m_trackProSet; + TrackVRFeatureSet m_trackVRFeaSet; +}; + +class MoovProperty +{ +public: + /// Enumerated list of Moov Features + enum Property + { + HasMoovLevelMetaBox, + HasCoverImage + }; + // typedef Set MoovPropertySet; + typedef set MoovPropertySet; + + bool HasProperty(Property property) const + { + return m_moovProSet.count(property) != 0; + } + void SetProperty(Property property) + { + m_moovProSet.insert(property); + } + +private: + MoovPropertySet m_moovProSet; +}; + +typedef pair InitSegTrackIdPair; +typedef pair ItemIdTStampPair; + +struct MoovProperties +{ + MoovProperty moovFeature; + uint64_t fragmentDuration; + std::vector fragmentSampleDefaults; +}; + +struct TrackProperties +{ + uint32_t alternateGroupId; + std::string trackURI; + TrackProperty trackProperty; + IdVector alternateTrackIds; ///< other tracks IDs with the same alternate_group id. + TypeToCtxIdsMap referenceTrackIds; ///< (coming from 'tref') + TypeToIdsMap trackGroupIds; ///< ... coming from Track Group Box 'trgr' + shared_ptr editBox; ///< If set, an edit list box exists +}; +typedef std::map TrackPropertiesMap; + +struct SampleInfo +{ + SegmentId segmentId; + uint32_t sampleId; + vector compositionTimes; + vector compositionTimesTS; + uint64_t dataOffset = 0; + uint32_t dataLength = 0; + uint32_t width = 0; + uint32_t height = 0; + uint32_t sampleDuration = 0; + + FourCCInt sampleEntryType; + SmpDesIndex sampleDescriptionIndex; + FrameCodecType sampleType; + FlagsOfSample sampleFlags; +}; +typedef vector SampleInfoVector; + +struct TrackBasicInfo +{ + uint32_t timeScale; + uint32_t width; + uint32_t height; + + FourCCInt sampleEntryType; + + std::map parameterSetMaps; + std::map chnlProperties; + std::map sa3dProperties; + std::map st3dProperties; + std::map sphericalV1Properties; + std::map sv3dProperties; + std::map sampleRes; + std::map pfrmProperties; + std::map rwpkProperties; + std::map rotnProperties; + std::map stviProperties; + std::map coviProperties; + std::map schemeTypesProperties; + std::map nalLengthSizeMinus1; +}; + +struct TrackDecInfo +{ + ItemId itemIdBase; + SampleInfoVector samples; + + DecodePts::PresentTimeTS durationTS = 0; + DecodePts::PresentTimeTS earliestPTSTS = 0; + DecodePts::PresentTimeTS noSidxFallbackPTSTS = 0; + + DecodePts::PresentTimeTS nextPTSTS = 0; + + std::map decoderCodeTypeMap; + + DecodePts::PMap pMap; + DecodePts::PMapTS pMapTS; + + bool hasEditList = false; + bool hasTtyp = false; + TrackTypeAtom ttyp; +}; + +struct SegmentIO +{ + UniquePtr strIO; + UniquePtr fileStrIO; + int64_t size = 0; +}; + +typedef std::map ItemToParameterSetMap; + +struct SegmentProperties +{ + InitSegmentId initSegmentId; + SegmentId segmentId; + set sequences; + SegmentIO io; + + SegmentTypeAtom styp; + + std::map trackDecInfos; + ItemToParameterSetMap itemToParameterSetMap; +}; + +typedef std::map SegPropMap; +typedef std::map SeqToSegMap; + +struct InitSegmentProperties +{ + FileProperty fileProperty; + MoovProperties moovProperties; + TrackPropertiesMap trackProperties; + uint32_t movieTScale; + + FileTypeAtom ftyp; ///< File Type Box for later information retrieval + + std::map basicTrackInfos; + + SegPropMap segPropMap; + SeqToSegMap seqToSeg; + + SegmentIndex segmentIndex; + + ContextId corresTrackId; +}; + +struct ExtNalHdr +{ + uint8_t forbidden_zero_bit = 0; + uint8_t nal_unit_type = 0; + uint8_t nuh_layer_id = 0; + uint8_t nuh_temporal_id_plus1 = 0; +}; + +struct ExtSample +{ + struct SampleConstruct + { + uint8_t order_idx; + uint8_t constructor_type; + uint8_t track_ref_index; + int8_t sample_offset; + uint32_t data_offset; + uint32_t data_length; + }; + struct InlineConstruct + { + uint8_t order_idx; + uint8_t constructor_type; + uint8_t data_length; + vector inline_data; + }; + + struct Extractor + { + vector inlineConstruct; + vector sampleConstruct; + }; + vector extractors; +}; + +struct Hvc2ExtractorNal +{ + ExtNalHdr extNalHdr = {}; + ExtSample extNalDat = {}; +}; + +VCD_MP4_END; +#endif /* _MP4DATATYPES_H_ */ diff --git a/src/isolib/dash_parser/Mp4ReaderImpl.cpp b/src/isolib/dash_parser/Mp4ReaderImpl.cpp new file mode 100644 index 00000000..8bf25e3a --- /dev/null +++ b/src/isolib/dash_parser/Mp4ReaderImpl.cpp @@ -0,0 +1,4794 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4ReaderImpl.cpp +//! \brief: Mp4Reader class implementation +//! + +#include "Mp4ReaderImpl.h" +#include "Mp4DataTypes.h" +#include "../atoms/AudSampEntryAtom.h" +#include "../atoms/AvcConfigAtom.h" +#include "../atoms/AvcSampEntry.h" +#include "../atoms/CleanApertureAtom.h" +#include "../atoms/FormAllocator.h" +#include "../atoms/HevcSampEntry.h" +#include "../atoms/InitViewOrientationSampEntry.h" +#include "../atoms/MediaDataAtom.h" +#include "../atoms/MetaAtom.h" +#include "../atoms/MetaDataSampEntryAtom.h" +#include "../atoms/MovieAtom.h" +#include "../atoms/MovieFragAtom.h" +#include "../atoms/Mp4AudSampEntryAtom.h" +#include "Mp4ReaderUtil.h" +#include "../atoms/SegIndexAtom.h" +#include "../atoms/TypeAtom.h" +#include "../atoms/UriMetaSampEntryAtom.h" + +#include +#include +#include +#include +#include +#include + +#if defined(__GNUC__) && defined(__DEPRECATED) +#undef __DEPRECATED +#endif +#include + +using namespace std; + +VCD_MP4_BEGIN + +using PrestTS = DecodePts::PresentTimeTS; +const char* ident = "$Id: MP4VR version " MP4_BUILD_VERSION " $"; + +Mp4Reader* Mp4Reader::Create() +{ + return (new Mp4Reader()); +} + +void Mp4Reader::Destroy(Mp4Reader* mp4Reader) +{ + if (NULL != mp4Reader) + { + delete mp4Reader; + } +} + +int32_t Mp4Reader::Initialize(StreamIO* stream) +{ + InitSegmentId initSegId = 0; + SegmentId segIndex = 0; + + UniquePtr internalStream(new StreamIOInternal(stream)); + + if (!internalStream->IsStreamGood()) + { + return OMAF_FILE_OPEN_ERROR; + } + + auto& io = m_initSegProps[initSegId].segPropMap[segIndex].io; + io.strIO = move(internalStream); + io.size = io.strIO->GetStreamSize(); + + try + { + int32_t error = ReadStream(initSegId, segIndex); + if (error) + { + return error; + } + } + catch (const Exception& exc) + { + ISO_LOG(LOG_ERROR, "Error: %s\n", exc.what()); + return OMAF_FILE_READ_ERROR; + } + catch (const exception& e) + { + ISO_LOG(LOG_ERROR, "Error: %s\n", e.what()); + return OMAF_FILE_READ_ERROR; + } + return ERROR_NONE; +} + +Mp4Reader::Mp4Reader() + : m_readerSte(ReaderState::UNINITIALIZED) +{ +} + +const TrackBasicInfo& Mp4Reader::GetTrackBasicInfo(InitSegmentTrackId trackIdPair) const +{ + return m_initSegProps.at(trackIdPair.first).basicTrackInfos.at(trackIdPair.second); +} + +TrackBasicInfo& Mp4Reader::GetTrackBasicInfo(InitSegmentTrackId trackIdPair) +{ + return m_initSegProps.at(trackIdPair.first).basicTrackInfos.at(trackIdPair.second); +} + +bool Mp4Reader::CanFindTrackDecInfo(InitSegmentId initSegId, SegmentTrackId segTrackId) const +{ + const auto& segProps = + m_initSegProps.at(initSegId).segPropMap.find(segTrackId.first); + if (segProps != m_initSegProps.at(initSegId).segPropMap.end()) + { + const auto& trackDecInfo = segProps->second.trackDecInfos.find(segTrackId.second); + return trackDecInfo != segProps->second.trackDecInfos.end(); + } + else + { + return false; + } +} + +const TrackDecInfo& Mp4Reader::GetTrackDecInfo(InitSegmentId initSegId, SegmentTrackId segTrackId) const +{ + return m_initSegProps.at(initSegId) + .segPropMap.at(segTrackId.first) + .trackDecInfos.at(segTrackId.second); +} + +TrackDecInfo& Mp4Reader::GetTrackDecInfo(InitSegmentId initSegId, SegmentTrackId segTrackId) +{ + return m_initSegProps.at(initSegId) + .segPropMap.at(segTrackId.first) + .trackDecInfos.at(segTrackId.second); +} + +bool Mp4Reader::FoundPrevSeg(InitSegmentId initSegId, + SegmentId curSegmentId, + SegmentId& prevSegId) const +{ + auto& segProps = m_initSegProps.at(initSegId).segPropMap.at(curSegmentId); + auto& seqToSeg = m_initSegProps.at(segProps.initSegmentId).seqToSeg; + auto iterator = segProps.sequences.empty() + ? seqToSeg.end() + : seqToSeg.find(*segProps.sequences.begin()); + + if (iterator == seqToSeg.end()) + { + return false; + } + else + { + if (iterator != seqToSeg.begin()) + { + --iterator; + prevSegId = iterator->second; + return true; + } + else + { + return false; + } + } +} + +const TrackDecInfo* Mp4Reader::GetPrevTrackDecInfo(InitSegmentId initSegId, + SegmentTrackId segTrackId) const +{ + SegmentId curSegId = segTrackId.first; + ContextId ctxId = segTrackId.second; + SegmentId prevSegId; + const TrackDecInfo* trackDecInfo = NULL; + while (FoundPrevSeg(initSegId, curSegId, prevSegId)) + { + auto& segProps = + m_initSegProps.at(initSegId).segPropMap.at(prevSegId); + + if (segProps.trackDecInfos.count(ctxId)) + { + trackDecInfo = &segProps.trackDecInfos.at(ctxId); + } + + if (trackDecInfo) + { + return trackDecInfo; + } + + curSegId = prevSegId; + } + + return NULL; +} + +const SampleInfoVector& Mp4Reader::GetSampInfos( + InitSegmentId initSegId, + SegmentTrackId segTrackId, + ItemId& sampId) const +{ + auto& segProps = m_initSegProps.at(initSegId).segPropMap.at(segTrackId.first); + sampId = GetTrackDecInfo(initSegId, segTrackId).itemIdBase; + auto& sampInfoVec = segProps.trackDecInfos.at(segTrackId.second).samples; + return sampInfoVec; +} + +const ParameterSetMap* Mp4Reader::GetParameterSetMap(InitSegTrackIdPair indexPair) const +{ + auto ctxIndex = indexPair.first.second; + InitSegmentId initSegId = indexPair.first.first; + SegmentId segIndex; + int result = GetSegIndex(indexPair, segIndex); + if (result) + { + return NULL; + } + const auto& segProps = m_initSegProps.at(initSegId).segPropMap.at(segIndex); + const auto& initSegProps = m_initSegProps.at(segProps.initSegmentId); + SegmentTrackId segTrackId = SegmentTrackId(segIndex, ctxIndex); + const auto& idIter = segProps.itemToParameterSetMap.find( + make_pair(indexPair.first, indexPair.second - GetTrackDecInfo(initSegId, segTrackId).itemIdBase)); + if (idIter == segProps.itemToParameterSetMap.end()) + { + return NULL; + } + + int foundCnt = initSegProps.basicTrackInfos.at(indexPair.first.second) + .parameterSetMaps.count(idIter->second); + if (foundCnt) + { + return &initSegProps.basicTrackInfos.at(indexPair.first.second) + .parameterSetMaps.at(idIter->second); + } + else + { + return NULL; + } +} + +void Mp4Reader::Close() +{ + m_readerSte = ReaderState::UNINITIALIZED; + while (!m_initSegProps.empty()) + { + DisableInitSeg(m_initSegProps.begin()->first.GetIndex()); + } +} + +void Mp4Reader::CfgSegSidxFallback(InitSegmentId initSegId, SegmentTrackId segTrackId) +{ + SegmentId segIndex = segTrackId.first; + ContextId ctxId = segTrackId.second; + auto& segProps = m_initSegProps.at(initSegId).segPropMap.at(segIndex); + + PrestTS segDurTS = 0; + for (auto& trackDecInfo : segProps.trackDecInfos) + { + segDurTS = max(segDurTS, trackDecInfo.second.durationTS); + } + + SegmentId prevSegId; + PrestTS curSegStartTS = 0; + const TrackDecInfo* trackDecInfo = GetPrevTrackDecInfo(initSegId, segTrackId); + if (trackDecInfo) + { + curSegStartTS = trackDecInfo->noSidxFallbackPTSTS; + } + PrestTS followingTS = curSegStartTS + segDurTS; + + if (followingTS > segProps.trackDecInfos.at(ctxId).noSidxFallbackPTSTS) + { + segProps.trackDecInfos.at(ctxId).noSidxFallbackPTSTS = followingTS; + } +} + +void Mp4Reader::RefreshCompTimes(InitSegmentId initSegId, SegmentId segIndex) +{ + using PMapIt = DecodePts::PMap::iterator; + using PMapTSIt = DecodePts::PMapTS::iterator; + using PMapTSRevIt = DecodePts::PMapTS::reverse_iterator; + + std::map::iterator iter; + for (iter = m_initSegProps.at(initSegId).segPropMap.at(segIndex).trackDecInfos.begin(); + iter != m_initSegProps.at(initSegId).segPropMap.at(segIndex).trackDecInfos.end(); + iter++) + { + auto& trackDecInfo = iter->second; + if (trackDecInfo.pMap.size()) + { + PMapIt iter1 = trackDecInfo.pMap.begin(); + for ( ; iter1 != trackDecInfo.pMap.end(); iter1++) + { + trackDecInfo.samples.at(iter1->second).compositionTimes.push_back(uint64_t(iter1->first)); + } + + PMapTSIt iter2 = trackDecInfo.pMapTS.begin(); + for ( ; iter2 != trackDecInfo.pMapTS.end(); iter2++) + { + trackDecInfo.samples.at(iter2->second).compositionTimesTS.push_back(uint64_t(iter2->first)); + } + + if (trackDecInfo.hasEditList) + { + PMapTSRevIt iter3 = trackDecInfo.pMapTS.rbegin(); + if (iter3 == trackDecInfo.pMapTS.rend()) + { + ISO_LOG(LOG_ERROR, "Failed to get TimeStamp !\n"); + throw exception(); + } + trackDecInfo.samples.at(iter3->second).sampleDuration = + min(trackDecInfo.samples.at(iter3->second).sampleDuration, + static_cast(trackDecInfo.durationTS - iter3->first)); + } + } + } +} + +DashSegGroup Mp4Reader::CreateDashSegs(InitSegmentId initSegId) +{ + return DashSegGroup(*this, initSegId); +} + +ConstDashSegGroup Mp4Reader::CreateDashSegs(InitSegmentId initSegId) const +{ + return ConstDashSegGroup(*this, initSegId); +} + +int32_t Mp4Reader::ParseInitSeg(StreamIO* strIO, uint32_t initSegId) +{ + InitSegmentId initSegmentId = initSegId; + SegmentId segIndex = 0; + + if (m_initSegProps.count(initSegmentId)) + { + return ERROR_NONE; + } + + ReaderState prevState = m_readerSte; + m_readerSte = ReaderState::INITIALIZING; + + SegmentProperties& segProps = m_initSegProps[initSegmentId].segPropMap[segIndex]; + SegmentIO& io = segProps.io; + io.strIO.reset(new StreamIOInternal(strIO)); + if (io.strIO->PeekEOS()) + { + m_readerSte = prevState; + io.strIO.reset(); + ISO_LOG(LOG_ERROR, "Peek to EOS\n"); + return OMAF_FILE_READ_ERROR; + } + io.size = strIO->GetStreamSize(); + + segProps.initSegmentId = initSegmentId; + segProps.segmentId = segIndex; + + bool ftypFound = false; + bool moovFound = false; + bool earliestPTSRead = false; + + int32_t error = ERROR_NONE; + if (io.strIO->PeekEOS()) + { + error = OMAF_INVALID_FILE_HEADER; + } + + try + { + while (!error && !io.strIO->PeekEOS()) + { + std::string boxType; + int64_t boxSize = 0; + Stream bitstream; + error = ReadAtomParams(io, boxType, boxSize); + if (!error) + { + ISO_LOG(LOG_INFO, "boxType is %s\n", boxType.c_str()); + if (boxType == "ftyp") + { + if (ftypFound == true) + { + ISO_LOG(LOG_ERROR, "boxType is ftyp and is True!!!\n"); + error = OMAF_FILE_READ_ERROR; + break; + } + ftypFound = true; + + error = ReadAtom(io, bitstream); + if (!error) + { + FileTypeAtom ftyp; + ftyp.FromStream(bitstream); + + std::set supportedBrands; + + if (ftyp.CheckCompatibleBrand("nvr1")) + { + supportedBrands.insert("[nvr1] "); + } + + ISO_LOG(LOG_INFO, "Compatible brands found\n"); + + m_initSegProps[initSegmentId].ftyp = ftyp; + } + } + else if (boxType == "sidx") + { + error = ReadAtom(io, bitstream); + if (!error) + { + SegmentIndexAtom sidx; + sidx.FromStream(bitstream); + + if (!earliestPTSRead) + { + earliestPTSRead = true; + } + GenSegInAtom(sidx, m_initSegProps[initSegmentId].segmentIndex, + io.strIO->TellOffset()); + } + } + else if (boxType == "moov") + { + if (moovFound == true) + { + ISO_LOG(LOG_ERROR, "boxType is moov and is Found!!!\n"); + error = OMAF_FILE_READ_ERROR; + break; + } + moovFound = true; + + error = ReadAtom(io, bitstream); + if (!error) + { + MovieAtom moov; + moov.FromStream(bitstream); + m_initSegProps[initSegmentId].moovProperties = ExtractMoovProps(moov); + m_initSegProps[initSegmentId].trackProperties = + FillTrackProps(initSegmentId, segIndex, moov); + m_initSegProps[initSegmentId].movieTScale = + moov.GetMovieHeaderAtom().GetTimeScale(); + m_matrix = moov.GetMovieHeaderAtom().GetMatrix(); + } + } + else if (boxType == "moof") + { + ISO_LOG(LOG_WARNING, "Skipping root level 'moof' box - not allowed in Initialization Segment\n"); + error = SkipAtom(io); + } + else if (boxType == "mdat") + { + error = SkipAtom(io); + } + else + { + ISO_LOG(LOG_WARNING, "Skipping root level box of unknown type '%s'\n", boxType.c_str()); + error = SkipAtom(io); + } + } + } + } + catch (Exception& exc) + { + ISO_LOG(LOG_ERROR, "ParseInitSegment Exception Error: %s\n", exc.what()); + error = OMAF_FILE_READ_ERROR; + } + catch (exception& e) + { + ISO_LOG(LOG_ERROR, "ParseInitSegment exception Error:: %s\n", e.what()); + error = OMAF_FILE_READ_ERROR; + } + + if (!error && (!ftypFound || !moovFound)) + { + error = OMAF_INVALID_FILE_HEADER; + } + + if (!error) + { + for (auto& trackDecInfo : segProps.trackDecInfos) + { + CfgSegSidxFallback(initSegmentId, make_pair(segIndex, trackDecInfo.first)); + } + + RefreshCompTimes(initSegmentId, segIndex); + + if ((!io.strIO->IsStreamGood()) && (!io.strIO->IsReachEOS())) + { + ISO_LOG(LOG_ERROR, "Stream is Good? %d\n", int32_t(io.strIO->IsStreamGood())); + ISO_LOG(LOG_ERROR, "Reach EOS? %d\n", int32_t(io.strIO->IsReachEOS())); + return OMAF_FILE_READ_ERROR; + } + io.strIO->ClearStatus(); + + m_initSegProps[initSegmentId].fileProperty = GetFileProps(); + + m_readerSte = ReaderState::READY; + } + else + { + DisableInitSeg(initSegmentId.GetIndex()); + } + return error; +} + +int32_t Mp4Reader::DisableInitSeg(uint32_t initSegId) +{ + bool isInitSegment = !!m_initSegProps.count(initSegId); + if (isInitSegment) + { + for (auto& basicTrackInfo : m_initSegProps.at(initSegId).basicTrackInfos) + { + m_ctxInfoMap.erase(InitSegmentTrackId(initSegId, basicTrackInfo.first)); + } + m_initSegProps.erase(initSegId); + } + return (isInitSegment) ? ERROR_NONE : OMAF_INVALID_SEGMENT; +} + +int32_t Mp4Reader::ParseSeg(StreamIO* strIO, + uint32_t initSegId, + uint32_t segIndex, + uint64_t earliestPTSinTS) +{ + if (m_initSegProps.count(initSegId) && + m_initSegProps.at(initSegId).segPropMap.count(segIndex)) + { + return ERROR_NONE; + } + if (!m_initSegProps.count(initSegId)) + { + return OMAF_INVALID_SEGMENT; + } + + ReaderState prevState = m_readerSte; + m_readerSte = ReaderState::INITIALIZING; + + SegmentProperties& segProps = + m_initSegProps.at(initSegId).segPropMap[segIndex]; + SegmentIO& io = segProps.io; + io.strIO.reset(new StreamIOInternal(strIO)); + if (io.strIO->PeekEOS()) + { + m_readerSte = prevState; + io.strIO.reset(); + ISO_LOG(LOG_ERROR, "Peek to EOS!!!\n"); + return OMAF_FILE_READ_ERROR; + } + io.size = strIO->GetStreamSize(); + + segProps.initSegmentId = initSegId; + segProps.segmentId = segIndex; + + bool stypFound = false; + bool earliestPTSRead = false; + std::map earliestPTSTS; + + int32_t error = ERROR_NONE; + try + { + while (!error && !io.strIO->PeekEOS()) + { + std::string boxType; + int64_t boxSize = 0; + Stream bitstream; + error = ReadAtomParams(io, boxType, boxSize); + if (!error) + { + ISO_LOG(LOG_INFO, "boxType is %s\n", boxType.c_str()); + if (boxType == "styp") + { + error = ReadAtom(io, bitstream); + if (!error) + { + SegmentTypeAtom styp; + styp.FromStream(bitstream); + + if (stypFound == false) + { + segProps.styp = styp; + stypFound = true; + } + } + } + else if (boxType == "sidx") + { + error = ReadAtom(io, bitstream); + if (!error) + { + SegmentIndexAtom sidx; + sidx.FromStream(bitstream); + + if (!earliestPTSRead) + { + earliestPTSRead = true; + + for (auto& basicTrackInfo : m_initSegProps.at(initSegId).basicTrackInfos) + { + ContextId ctxId = basicTrackInfo.first; + earliestPTSTS[ctxId] = + PrestTS(sidx.GetEarliestPresentationTime()); + } + } + } + } + else if (boxType == "moof") + { + const StreamIO::offset_t moofFirstByte = io.strIO->TellOffset(); + + error = ReadAtom(io, bitstream); + if (!error) + { + MovieFragmentAtom moof( + m_initSegProps.at(initSegId).moovProperties.fragmentSampleDefaults); + moof.SetMoofFirstByteOffset(static_cast(moofFirstByte)); + moof.FromStream(bitstream); + + if (!earliestPTSRead) + { + for (auto& basicTrackInfo : m_initSegProps.at(initSegId).basicTrackInfos) + { + ContextId ctxId = basicTrackInfo.first; + if (earliestPTSinTS != UINT64_MAX) + { + earliestPTSTS[ctxId] = PrestTS(earliestPTSinTS); + } + else if (const TrackDecInfo* precTrackDecInfo = GetPrevTrackDecInfo( + initSegId, SegmentTrackId(segIndex, ctxId))) + { + if (precTrackDecInfo) + { + earliestPTSTS[ctxId] = precTrackDecInfo->noSidxFallbackPTSTS; + } + } + else + { + earliestPTSTS[ctxId] = 0; + } + } + + earliestPTSRead = true; + } + + CtxIdPresentTSMap earliestPTSTSForTrack; + for (auto& trackFragmentAtom : moof.GetTrackFragmentAtoms()) + { + auto ctxId = ContextId(trackFragmentAtom->GetTrackFragmentHeaderAtom().GetTrackId()); + earliestPTSTSForTrack.insert(make_pair(ctxId, earliestPTSTS.at(ctxId))); + } + + AddTrackProps(initSegId, segIndex, moof, earliestPTSTSForTrack); + } + } + else if (boxType == "mdat") + { + error = SkipAtom(io); + } + else + { + ISO_LOG(LOG_WARNING, "Skipping root level box of unknown type '%s'\n", boxType.c_str()); + error = SkipAtom(io); + } + } + } + } + catch (Exception& exc) + { + ISO_LOG(LOG_ERROR, "parseSegment Exception Error: %s\n", exc.what()); + error = OMAF_FILE_READ_ERROR; + } + catch (exception& e) + { + ISO_LOG(LOG_ERROR, "parseSegment exception Error: %s\n", e.what()); + error = OMAF_FILE_READ_ERROR; + } + + if (!error) + { + for (auto& trackDecInfo : segProps.trackDecInfos) + { + CfgSegSidxFallback(initSegId, make_pair(segIndex, trackDecInfo.first)); + } + + RefreshCompTimes(initSegId, segIndex); + + if ((!io.strIO->IsStreamGood()) && (!io.strIO->IsReachEOS())) + { + ISO_LOG(LOG_ERROR, "Stream is Good? %d\n", int32_t(io.strIO->IsStreamGood())); + ISO_LOG(LOG_ERROR, "Reach to EOS? %d\n", int32_t(io.strIO->IsReachEOS())); + return OMAF_FILE_READ_ERROR; + } + io.strIO->ClearStatus(); + m_readerSte = ReaderState::READY; + } + else + { + DisableSeg(initSegId, segIndex); + } + return error; +} + +int32_t Mp4Reader::DisableSeg(uint32_t initSegId, uint32_t segIndex) +{ + if (!m_initSegProps.count(initSegId)) + { + return OMAF_INVALID_SEGMENT; + } + + bool isSegment = !!m_initSegProps.at(initSegId).segPropMap.count(segIndex); + if (isSegment) + { + auto& segProps = m_initSegProps.at(initSegId).segPropMap.at(segIndex); + + bool hasValidInitSeg = !!m_initSegProps.count(segProps.initSegmentId); + if (hasValidInitSeg) + { + auto& seqToSeg = + m_initSegProps.at(segProps.initSegmentId).seqToSeg; + for (auto& sequence : segProps.sequences) + { + seqToSeg.erase(sequence); + } + SegmentProperties& segProps = m_initSegProps.at(initSegId).segPropMap[segIndex]; + SegmentIO& io = segProps.io; + io.strIO.reset(NULL); + m_initSegProps.at(initSegId).segPropMap.erase(segIndex); + } + else + { + return OMAF_INVALID_SEGMENT; + } + } + + return (isSegment) ? ERROR_NONE : OMAF_INVALID_SEGMENT; +} + +int32_t Mp4Reader::GetSegIndex(uint32_t initSegId, VarLenArray& segIndex) +{ + bool isInitSegment = !!m_initSegProps.count(initSegId); + + if (isInitSegment) + { + segIndex = m_initSegProps.at(initSegId).segmentIndex; + } + else + { + return OMAF_INVALID_SEGMENT; + } + return ERROR_NONE; +} + + +int32_t Mp4Reader::ParseSegIndex(StreamIO* strIO, + VarLenArray& segIndex) +{ + SegmentIO io; + io.strIO.reset(new StreamIOInternal(strIO)); + if (io.strIO->PeekEOS()) + { + io.strIO.reset(); + ISO_LOG(LOG_ERROR, "PeekEOS is true!!!\n"); + return OMAF_FILE_READ_ERROR; + } + io.size = strIO->GetStreamSize(); + + int32_t error = ERROR_NONE; + bool segIndexFound = false; + try + { + while (!error && !io.strIO->PeekEOS()) + { + std::string boxType; + int64_t boxSize = 0; + Stream bitstream; + error = ReadAtomParams(io, boxType, boxSize); + if (!error) + { + if (boxType == "sidx") + { + error = ReadAtom(io, bitstream); + if (!error) + { + SegmentIndexAtom sidx; + sidx.FromStream(bitstream); + GenSegInAtom(sidx, segIndex, io.strIO->TellOffset()); + segIndexFound = true; + break; + } + } + else if (boxType == "styp" || boxType == "moof" || boxType == "mdat") + { + error = SkipAtom(io); + } + else + { + ISO_LOG(LOG_WARNING, "Skipping root level box of unknown type '%s\n'", boxType.c_str()); + error = SkipAtom(io); + } + } + } + } + catch (Exception& exc) + { + ISO_LOG(LOG_ERROR, "ParseSegmentIndex Exception Error: %s\n", exc.what()); + error = OMAF_FILE_READ_ERROR; + } + catch (exception& e) + { + ISO_LOG(LOG_ERROR, "ParseSegmentIndex exception Error: %s\n", e.what()); + error = OMAF_FILE_READ_ERROR; + } + + if (!segIndexFound) + { + ISO_LOG(LOG_ERROR, "ParseSegmentIndex couldn't find sidx box!\n"); + error = OMAF_INVALID_SEGMENT; + } + + if (!error) + { + if ((!io.strIO->IsStreamGood()) && (!io.strIO->IsReachEOS())) + { + return OMAF_FILE_READ_ERROR; + } + io.strIO->ClearStatus(); + } + return error; +} + +void Mp4Reader::IsInited() const +{ + if (!(m_readerSte == ReaderState::INITIALIZING || m_readerSte == ReaderState::READY)) + { + ISO_LOG(LOG_ERROR, "Mp4Reader is not initialized !\n"); + throw exception(); + } +} + +int Mp4Reader::IsInitErr() const +{ + if (!(m_readerSte == ReaderState::INITIALIZING || m_readerSte == ReaderState::READY)) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + return ERROR_NONE; +} + +Mp4Reader::CtxType Mp4Reader::GetCtxType(const InitSegmentTrackId trackIdPair) const +{ + const auto contextInfo = m_ctxInfoMap.find(trackIdPair); + if (contextInfo == m_ctxInfoMap.end()) + { + ISO_LOG(LOG_ERROR, "Context ID is invalide !\n"); + throw exception(); + } + + return contextInfo->second.ctxType; +} + +int Mp4Reader::GetCtxTypeError(const InitSegmentTrackId trackIdPair, + Mp4Reader::CtxType& ctxType) const +{ + const auto contextInfo = m_ctxInfoMap.find(trackIdPair); + if (contextInfo == m_ctxInfoMap.end()) + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + + ctxType = contextInfo->second.ctxType; + return ERROR_NONE; +} + +int32_t Mp4Reader::ReadStream(InitSegmentId initSegId, SegmentId segIndex) +{ + m_readerSte = ReaderState::INITIALIZING; + + SegmentIO& io = m_initSegProps[initSegId].segPropMap[segIndex].io; + m_initSegProps.at(initSegId).segPropMap.at(segIndex).initSegmentId = initSegId; + m_initSegProps.at(initSegId).segPropMap.at(segIndex).segmentId = segIndex; + + bool ftypFound = false; + bool moovFound = false; + + int32_t error = ERROR_NONE; + if (io.strIO->PeekEOS()) + { + error = OMAF_INVALID_FILE_HEADER; + } + + while (!error && !io.strIO->PeekEOS()) + { + std::string boxType; + int64_t boxSize = 0; + Stream bitstream; + error = ReadAtomParams(io, boxType, boxSize); + if (!error) + { + ISO_LOG(LOG_INFO, "boxType is %s\n", boxType.c_str()); + if (boxType == "ftyp") + { + if (ftypFound == true) + { + return OMAF_FILE_READ_ERROR; + } + ftypFound = true; + + error = ReadAtom(io, bitstream); + if (!error) + { + FileTypeAtom ftyp; + ftyp.FromStream(bitstream); + + std::set supportedBrands; + + if (ftyp.CheckCompatibleBrand("nvr1")) + { + supportedBrands.insert("[nvr1] "); + } + + ISO_LOG(LOG_INFO, "Compatible brands found\n"); + + m_initSegProps[initSegId].ftyp = ftyp; + } + } + else if (boxType == "styp") + { + error = ReadAtom(io, bitstream); + if (!error) + { + SegmentTypeAtom styp; + styp.FromStream(bitstream); + + std::set supportedBrands; + + ISO_LOG(LOG_INFO, "Compatible brands found\n"); + } + } + else if (boxType == "sidx") + { + error = ReadAtom(io, bitstream); + if (!error) + { + SegmentIndexAtom sidx; + sidx.FromStream(bitstream); + } + } + else if (boxType == "moov") + { + if (moovFound == true) + { + ISO_LOG(LOG_ERROR, "boxType is moov and is True!!!\n"); + return OMAF_FILE_READ_ERROR; + } + moovFound = true; + + error = ReadAtom(io, bitstream); + if (!error) + { + MovieAtom moov; + moov.FromStream(bitstream); + m_initSegProps[initSegId].moovProperties = ExtractMoovProps(moov); + m_initSegProps[initSegId].trackProperties = + FillTrackProps(initSegId, segIndex, moov); + m_initSegProps[initSegId].movieTScale = + moov.GetMovieHeaderAtom().GetTimeScale(); + AddSegSeq(initSegId, segIndex, 0); + m_matrix = moov.GetMovieHeaderAtom().GetMatrix(); + } + } + else if (boxType == "moof") + { + const StreamIO::offset_t moofFirstByte = io.strIO->TellOffset(); + + error = ReadAtom(io, bitstream); + if (!error) + { + MovieFragmentAtom moof( + m_initSegProps.at(initSegId).moovProperties.fragmentSampleDefaults); + moof.SetMoofFirstByteOffset(static_cast(moofFirstByte)); + moof.FromStream(bitstream); + + CtxIdPresentTSMap earliestPTSTSForTrack; + AddTrackProps(initSegId, segIndex, moof, earliestPTSTSForTrack); + + SegmentProperties& segProps = + m_initSegProps[initSegId].segPropMap[segIndex]; + for (auto& trackFragmentAtom : moof.GetTrackFragmentAtoms()) + { + auto ctxId = ContextId(trackFragmentAtom->GetTrackFragmentHeaderAtom().GetTrackId()); + TrackDecInfo& trackDecInfo = segProps.trackDecInfos[ctxId]; + + if (trackDecInfo.samples.size()) + { + int64_t sampleDataEndOffset = static_cast( + trackDecInfo.samples.rbegin()->dataOffset + trackDecInfo.samples.rbegin()->dataLength); + if (sampleDataEndOffset > io.size || sampleDataEndOffset < 0) + { + ISO_LOG(LOG_ERROR, "Sample data offset exceeds movie fragment !\n"); + throw exception(); + } + } + } + } + } + else if (boxType == "mdat") + { + error = SkipAtom(io); + } + else + { + ISO_LOG(LOG_WARNING, "Skipping root level box of unknown type '%s'\n", boxType.c_str()); + error = SkipAtom(io); + } + } + } + + if (!error && (!ftypFound || !moovFound)) + { + error = OMAF_INVALID_FILE_HEADER; + } + + if (!error) + { + RefreshCompTimes(initSegId, segIndex); + + if ((!io.strIO->IsStreamGood()) && (!io.strIO->IsReachEOS())) + { + ISO_LOG(LOG_ERROR, "Stream is Good? %d\n", int32_t(io.strIO->IsStreamGood())); + ISO_LOG(LOG_ERROR, "Reach to EOS? %d\n", int32_t(io.strIO->IsReachEOS())); + return OMAF_FILE_READ_ERROR; + } + io.strIO->ClearStatus(); + m_initSegProps[initSegId].fileProperty = GetFileProps(); + m_readerSte = ReaderState::READY; + } + else + { + DisableInitSeg(initSegId.GetIndex()); + } + return error; +} + + +Mp4Reader::ItemInfoMap Mp4Reader::ExtractItemInfoMap(const MetaAtom& metaAtom) const +{ + ItemInfoMap itemInfoMap; + + const uint32_t countNumberOfItems = metaAtom.GetItemInfoAtom().GetEntryCount(); + for (uint32_t i = 0; i < countNumberOfItems; ++i) + { + const ItemInfoEntry& item = metaAtom.GetItemInfoAtom().GetItemInfoEntry(i); + FourCCInt type = item.GetItemType(); + const uint32_t itemId = item.GetItemID(); + if (!IsImageType(type)) + { + ItemInfo itemInfo; + itemInfo.type = type.GetString(); + itemInfoMap.insert({itemId, itemInfo}); + } + } + + return itemInfoMap; +} + +FileProperty Mp4Reader::GetFileProps() const +{ + FileProperty fileProperty; + + for (const auto& initSegment : m_initSegProps) + { + for (const auto& trackProps : initSegment.second.trackProperties) + { + if (trackProps.second.trackProperty.HasProperty(FeatureOfTrack::HasAlternatives)) + { + fileProperty.SetProperty(FeatureOfFile::CONTAIN_ALT_TRACKS); + } + } + } + + return fileProperty; +} + +int32_t Mp4Reader::ConvertStrBytesToInt(SegmentIO& io, const unsigned int count, int64_t& result) +{ + int64_t value = 0; + for (unsigned int i = 0; i < count; ++i) + { + value = (value << 8) | static_cast(io.strIO->GetOneByte()); + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + } + + result = value; + return ERROR_NONE; +} + +int32_t Mp4Reader::ReadAtomParams(SegmentIO& io, std::string& boxType, int64_t& boxSize) +{ + const int64_t startLocation = io.strIO->TellOffset(); + + int32_t error = ConvertStrBytesToInt(io, 4, boxSize); + if (error) + { + return error; + } + + static const size_t TYPE_LENGTH = 4; + boxType.resize(TYPE_LENGTH); + io.strIO->ReadStream(&boxType[0], TYPE_LENGTH); + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + + if (boxSize == 1) + { + error = ConvertStrBytesToInt(io, 8, boxSize); + if (error) + { + return error; + } + } + + int64_t boxEndOffset = startLocation + boxSize; + if (boxSize < 8 || (boxEndOffset < 8) || ((io.size > 0) && (boxEndOffset > io.size))) + { + return OMAF_FILE_READ_ERROR; + } + + LocateToOffset(io, startLocation); + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + return ERROR_NONE; +} + +int32_t Mp4Reader::ReadAtom(SegmentIO& io, Stream& bitstream) +{ + std::string boxType; + int64_t boxSize = 0; + + int32_t error = ReadAtomParams(io, boxType, boxSize); + if (error) + { + return error; + } + + std::vector data((uint64_t) boxSize); + io.strIO->ReadStream(reinterpret_cast(data.data()), boxSize); + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + bitstream.Clear(); + bitstream.Reset(); + bitstream.WriteArray(data, uint64_t(boxSize)); + return ERROR_NONE; +} + +int32_t Mp4Reader::SkipAtom(SegmentIO& io) +{ + const int64_t startLocation = io.strIO->TellOffset(); + + std::string boxType; + int64_t boxSize = 0; + int32_t error = ReadAtomParams(io, boxType, boxSize); + if (error) + { + return error; + } + + LocateToOffset(io, startLocation + boxSize); + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + return ERROR_NONE; +} + +int Mp4Reader::GetImgDims(unsigned int trackId, + const uint32_t itemId, + uint32_t& imgW, + uint32_t& imgH) const +{ + CtxType ctxType; + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + ItemId internalId(itemId); + SegmentId segIndex; + int result = GetSegIndex(trackIdPair, internalId, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + switch (ctxType) + { + case CtxType::META: + imgH = m_metaInfo.at(segTrackId).imageInfoMap.at(internalId).height; + imgW = m_metaInfo.at(segTrackId).imageInfoMap.at(internalId).width; + break; + + case CtxType::TRACK: + { + ItemId baseId; + auto& sampInfos = GetSampInfos(initSegId, segTrackId, baseId); + auto& sampInfo = sampInfos.at((internalId - baseId).GetIndex()); + imgH = sampInfo.height; + imgW = sampInfo.width; + break; + } + + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } + return ERROR_NONE; +} + +int32_t Mp4Reader::GetCtxItems(InitSegmentTrackId trackIdPair, IdVector& ctxItms) const +{ + ctxItms.clear(); + auto trackId = trackIdPair.second; + for (const auto& segment : CreateDashSegs(trackIdPair.first)) + { + SegmentId segIndex = segment.segmentId; + SegmentTrackId segTrackId = make_pair(segIndex, trackId); + switch(GetCtxType(trackIdPair)) + { + case CtxType::META: + { + ctxItms.reserve(m_metaInfo.at(segTrackId).imageInfoMap.size()); + for (const auto& imageInfo : m_metaInfo.at(segTrackId).imageInfoMap) + { + ctxItms.push_back(imageInfo.first.GetIndex()); + } + break; + } + case CtxType::TRACK: + { + if (CanFindTrackDecInfo(trackIdPair.first, segTrackId)) + { + ctxItms.reserve(GetTrackDecInfo(trackIdPair.first, segTrackId).samples.size()); + for (const auto& infoOfSamp : GetTrackDecInfo(trackIdPair.first, segTrackId).samples) + { + ctxItms.push_back(infoOfSamp.sampleId); + } + } + break; + } + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } + } + return ERROR_NONE; +} + +void Mp4Reader::GetAvcSpecData(const DataVector& rawItemData, DataVector& itemData) +{ + Stream bitstream(rawItemData); + while (bitstream.BytesRemain() > 0) + { + const unsigned int nalLength = bitstream.Read32(); + const uint8_t firstByte = bitstream.Read8(); + + itemData.push_back(0); + itemData.push_back(0); + itemData.push_back(0); + itemData.push_back(1); + + itemData.push_back(firstByte); + bitstream.ReadArray(itemData, nalLength - 1); + } +} + +void Mp4Reader::GetHevcSpecData(const DataVector& rawItemData, DataVector& itemData) +{ + Stream bitstream(rawItemData); + while (bitstream.BytesRemain() > 0) + { + const unsigned int nalLength = bitstream.Read32(); + const uint8_t firstByte = bitstream.Read8(); + HevcNalDefs naluType = HevcNalDefs((firstByte >> 1) & 0x3f); + + if (itemData.size() == 0 || naluType == HevcNalDefs::VPS || naluType == HevcNalDefs::SPS || + naluType == HevcNalDefs::PPS) + { + itemData.push_back(0); + } + itemData.push_back(0); + itemData.push_back(0); + itemData.push_back(1); + + itemData.push_back(firstByte); + bitstream.ReadArray(itemData, nalLength - 1); + } +} + +int32_t Mp4Reader::ParseAvcData( + char* buf, + uint32_t& bufSize) +{ + uint32_t outputOffset = 0; + uint32_t byteOffset = 0; + uint32_t nalLength = 0; + + while (outputOffset < bufSize) + { + nalLength = (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 0; + byteOffset++; + nalLength = (nalLength << 8) | (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 0; + byteOffset++; + nalLength = (nalLength << 8) | (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 0; + byteOffset++; + nalLength = (nalLength << 8) | (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 1; + byteOffset++; + outputOffset += nalLength + 4; + byteOffset = 0; + } + return ERROR_NONE; +} + +int32_t Mp4Reader::ParseHevcData( + char* buf, + uint32_t& bufSize) +{ + uint32_t outputOffset = 0; + uint32_t byteOffset = 0; + uint32_t nalLength = 0; + + while (outputOffset < bufSize) + { + nalLength = (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 0; + byteOffset++; + nalLength = (nalLength << 8) | (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 0; + byteOffset++; + nalLength = (nalLength << 8) | (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 0; + byteOffset++; + nalLength = (nalLength << 8) | (uint8_t) buf[outputOffset + byteOffset]; + buf[outputOffset + byteOffset] = 1; + byteOffset++; + outputOffset += nalLength + 4; + byteOffset = 0; + } + return ERROR_NONE; +} + +void Mp4Reader::ProcessDecoderConfigProperties(const InitSegmentTrackId /*trackIdPair*/) +{ +} + +void Mp4Reader::RefreshDecCodeType(InitSegmentId initSegIndex, + SegmentTrackId segTrackId, + const SampleInfoVector& sampleInfo, + size_t prevSampInfoSize) +{ + auto& decCodeType = m_initSegProps.at(initSegIndex) + .segPropMap.at(segTrackId.first) + .trackDecInfos.at(segTrackId.second) + .decoderCodeTypeMap; + for (size_t sampId = prevSampInfoSize; sampId < sampleInfo.size(); ++sampId) + { + auto& info = sampleInfo[sampId]; + decCodeType.insert( + make_pair(info.sampleId, info.sampleEntryType)); + } +} + +void Mp4Reader::RefreshItemToParamSet(ItemToParameterSetMap& itemToParameterSetMap, + InitSegmentTrackId trackIdPair, + const SampleInfoVector& sampleInfo, + size_t prevSampInfoSize) +{ + for (size_t sampId = prevSampInfoSize; sampId < sampleInfo.size(); ++sampId) + { + auto itemId = InitSegTrackIdPair(trackIdPair, (uint32_t) sampId); + auto parameterSetMapId = sampleInfo[sampId].sampleDescriptionIndex; + itemToParameterSetMap.insert(make_pair(itemId, parameterSetMapId)); + } +} + +void Mp4Reader::AddSegSeq(InitSegmentId initSegIndex, + SegmentId segIndex, + Sequence sequence) +{ + SegmentProperties& segProps = + m_initSegProps.at(initSegIndex).segPropMap[segIndex]; + segProps.sequences.insert(sequence); + auto& seqToSeg = m_initSegProps.at(initSegIndex).seqToSeg; + seqToSeg.insert(make_pair(sequence, segIndex)); +} + +TrackPropertiesMap Mp4Reader::FillTrackProps(InitSegmentId initSegId, + SegmentId segIndex, + MovieAtom& moovAtom) +{ + TrackPropertiesMap trackPropsMap; + + std::vector trackAtoms = moovAtom.GetTrackAtoms(); + for (auto trackAtom : trackAtoms) + { + TrackProperties trackProps; + pair initAndTrackDecInfo = + ExtractTrackDecInfo(trackAtom, moovAtom.GetMovieHeaderAtom().GetTimeScale()); + TrackBasicInfo& basicTrackInfo = initAndTrackDecInfo.first; + TrackDecInfo& trackDecInfo = initAndTrackDecInfo.second; + + if (basicTrackInfo.sampleEntryType == "hvc1" || basicTrackInfo.sampleEntryType == "hev1" || + basicTrackInfo.sampleEntryType == "hvc2" || basicTrackInfo.sampleEntryType == "avc1" || + basicTrackInfo.sampleEntryType == "avc3" || basicTrackInfo.sampleEntryType == "mp4a" || + basicTrackInfo.sampleEntryType == "urim" || basicTrackInfo.sampleEntryType == "mp4v" || + basicTrackInfo.sampleEntryType == "invo") + { + ContextId ctxIndex = ContextId(trackAtom->GetTrackHeaderAtom().GetTrackID()); + InitSegmentTrackId trackIdPair = make_pair(initSegId, ctxIndex); + SegmentTrackId segTrackId = make_pair(segIndex, ctxIndex); + + trackDecInfo.samples = GenSampInfo(trackAtom); + auto& initSegProps = m_initSegProps.at(initSegId); + RefreshItemToParamSet( + initSegProps.segPropMap[segIndex].itemToParameterSetMap, trackIdPair, + trackDecInfo.samples); + + auto& storedTrackInfo = + initSegProps.segPropMap[segIndex].trackDecInfos[trackIdPair.second]; + storedTrackInfo = move(trackDecInfo); + m_initSegProps[initSegId].basicTrackInfos[trackIdPair.second] = + move(basicTrackInfo); + + FillSampEntryMap(trackAtom, initSegId); + RefreshDecCodeType(initSegId, segTrackId, storedTrackInfo.samples); + + trackProps.trackProperty = GetTrackProps(trackAtom); + trackProps.referenceTrackIds = GetRefTrackIds(trackAtom); + trackProps.trackGroupIds = GetTrackGroupIds(trackAtom); + trackProps.alternateTrackIds = GetAlternateTrackIds(trackAtom, moovAtom); + trackProps.alternateGroupId = trackAtom->GetTrackHeaderAtom().GetAlternateGroup(); + if (trackAtom->GetEditAtom().get() && trackAtom->GetEditAtom()->GetEditListAtom()) + { + trackProps.editBox = trackAtom->GetEditAtom(); + } + + CtxInfo contextInfo; + contextInfo.ctxType = CtxType::TRACK; + m_ctxInfoMap[trackIdPair] = contextInfo; + + trackPropsMap.insert(make_pair(trackIdPair.second, move(trackProps))); + } + } + + for (auto& trackProps : trackPropsMap) + { + if (trackProps.second.trackProperty.HasProperty(FeatureOfTrack::IsVideo)) + { + for (auto& associatedTrack : trackProps.second.referenceTrackIds["vdep"]) + { + if (trackPropsMap.count(associatedTrack)) + { + trackPropsMap[associatedTrack].trackProperty.SetProperty( + FeatureOfTrack::HasAssociatedDepthTrack); + } + + } + + if (trackProps.second.referenceTrackIds["vdep"].empty()) + { + m_initSegProps.at(initSegId).corresTrackId = trackProps.first; + } + + if (!(trackProps.second.referenceTrackIds["scal"].empty())) + { + m_initSegProps.at(initSegId).corresTrackId = trackProps.first; + } + } + } + + return trackPropsMap; +} + +ItemId Mp4Reader::GetSuccedentItmId(InitSegmentId initSegIndex, + SegmentTrackId segTrackId) const +{ + SegmentId segIndex = segTrackId.first; + ContextId ctxId = segTrackId.second; + ItemId nextItemIdBase; + if (m_initSegProps.count(initSegIndex) && + m_initSegProps.at(initSegIndex).segPropMap.count(segIndex)) + { + auto& segProps = + m_initSegProps.at(initSegIndex).segPropMap.at(segIndex); + if (segProps.trackDecInfos.count(ctxId)) + { + auto& trackDecInfo = segProps.trackDecInfos.at(ctxId); + if (trackDecInfo.samples.size()) + { + nextItemIdBase = trackDecInfo.samples.rbegin()->sampleId + 1; + } + else + { + nextItemIdBase = trackDecInfo.itemIdBase; + } + } + } + return nextItemIdBase; +} + +ItemId Mp4Reader::GetPrevItemId(InitSegmentId initSegIndex, + SegmentTrackId segTrackId) const +{ + SegmentId prevSegId; + ContextId ctxId = segTrackId.second; + ItemId itemId; + SegmentId curSegmentId = segTrackId.first; + + while (FoundPrevSeg(initSegIndex, curSegmentId, prevSegId) && itemId == ItemId(0)) + { + if (m_initSegProps.at(initSegIndex) + .segPropMap.at(prevSegId) + .trackDecInfos.count(ctxId)) + { + itemId = GetSuccedentItmId(initSegIndex, SegmentTrackId(prevSegId, ctxId)); + } + curSegmentId = prevSegId; + } + return itemId; +} + +void Mp4Reader::AddTrackProps( + InitSegmentId initSegId, + SegmentId segIndex, + MovieFragmentAtom& moofAtom, + const CtxIdPresentTSMap& earliestPTSTS) +{ + InitSegmentProperties& initSegProps = m_initSegProps[initSegId]; + SegmentProperties& segProps = initSegProps.segPropMap[segIndex]; + + AddSegSeq(initSegId, segIndex, m_nextSeq); + m_nextSeq = m_nextSeq.GetIndex() + 1; + + uint64_t sampDataOffset = 0; + bool firstTrackFragment = true; + + std::vector trackFragmentAtoms = moofAtom.GetTrackFragmentAtoms(); + for (auto& trackFragmentAtom : trackFragmentAtoms) + { + auto ctxId = ContextId(trackFragmentAtom->GetTrackFragmentHeaderAtom().GetTrackId()); + SegmentId prevSegId; + + TrackDecInfo& trackDecInfo = segProps.trackDecInfos[ctxId]; + size_t prevSampInfoSize = trackDecInfo.samples.size(); + bool hasSamps = prevSampInfoSize > 0; + if (auto* timeAtom = trackFragmentAtom->GetTrackFragmentBaseMediaDecodeTimeAtom()) + { + trackDecInfo.nextPTSTS = PrestTS(timeAtom->GetBaseMediaDecodeTime()); + } + else if (!hasSamps) + { + auto it = earliestPTSTS.find(ctxId); + if (it != earliestPTSTS.end()) + { + trackDecInfo.nextPTSTS = it->second; + } + else + { + trackDecInfo.nextPTSTS = 0; + } + } + ItemId segmentItemIdBase = + hasSamps ? trackDecInfo.itemIdBase + : GetPrevItemId(initSegId, SegmentTrackId(segIndex, ContextId(ctxId))); + InitSegmentTrackId trackIdPair = make_pair(initSegId, ctxId); + const TrackBasicInfo& basicTrackInfo = GetTrackBasicInfo(trackIdPair); + uint32_t sampDescId = trackFragmentAtom->GetTrackFragmentHeaderAtom().GetSampleDescrIndex(); + + std::vector trackRunAtoms = trackFragmentAtom->GetTrackRunAtoms(); + for (const auto trackRunAtom : trackRunAtoms) + { + ItemId trackrunItemIdBase = + trackDecInfo.samples.size() > 0 ? trackDecInfo.samples.back().sampleId + 1 : segmentItemIdBase; + uint64_t baseDataOffset = 0; + if ((trackFragmentAtom->GetTrackFragmentHeaderAtom().GetFlags() & + TrackFragmentHeaderAtom::pDataOffset) != 0) + { + baseDataOffset = trackFragmentAtom->GetTrackFragmentHeaderAtom().GetBaseDataOffset(); + } + else if ((trackFragmentAtom->GetTrackFragmentHeaderAtom().GetFlags() & + TrackFragmentHeaderAtom::IsBaseMoof) != 0) + { + baseDataOffset = moofAtom.GetMoofFirstByteOffset(); + } + else + { + if (firstTrackFragment) + { + baseDataOffset = moofAtom.GetMoofFirstByteOffset(); + } + else + { + baseDataOffset = sampDataOffset; + } + } + if ((trackRunAtom->GetFlags() & TrackRunAtom::pDataOffset) != 0) + { + baseDataOffset += uint32_t(trackRunAtom->GetDataOffset()); + } + + AddSampsToTrackDecInfo(trackDecInfo, initSegProps, basicTrackInfo, + initSegProps.trackProperties.at(ctxId), baseDataOffset, + sampDescId, segmentItemIdBase, trackrunItemIdBase, trackRunAtom); + } + trackDecInfo.itemIdBase = segmentItemIdBase; + SegmentTrackId segTrackId = make_pair(segIndex, ctxId); + RefreshDecCodeType(initSegId, segTrackId, trackDecInfo.samples, prevSampInfoSize); + RefreshItemToParamSet( + m_initSegProps[initSegId].segPropMap[segIndex].itemToParameterSetMap, + trackIdPair, trackDecInfo.samples, prevSampInfoSize); + + if (trackDecInfo.samples.size()) + { + sampDataOffset = + trackDecInfo.samples.rbegin()->dataOffset + trackDecInfo.samples.rbegin()->dataLength; + } + firstTrackFragment = false; + } +} + +void Mp4Reader::AddSampsToTrackDecInfo(TrackDecInfo& trackDecInfo, + const InitSegmentProperties& initSegProps, + const TrackBasicInfo& basicTrackInfo, + const TrackProperties& trackProps, + const uint64_t baseDataOffset, + const uint32_t sampDescId, + ItemId itemIdBase, + ItemId trackrunItemIdBase, + const TrackRunAtom* trackRunAtom) +{ + using PMapWrap = DecodePts::PMap; + using PMapTSWrap = DecodePts::PMapTS; + using PMapIt = DecodePts::PMap::iterator; + using PMapTSIt = DecodePts::PMapTS::iterator; + const std::vector& samples = trackRunAtom->GetSampleDetails(); + const uint32_t sampCnt = static_cast(samples.size()); + const DecodePts::SampleIndex itemIdOffset = trackrunItemIdBase.GetIndex() - itemIdBase.GetIndex(); + + DecodePts ptsInfo; + if (trackProps.editBox) + { + trackDecInfo.hasEditList = true; + ptsInfo.SetAtom(trackProps.editBox->GetEditListAtom(), initSegProps.movieTScale, + basicTrackInfo.timeScale); + } + ptsInfo.SetAtom(trackRunAtom); + ptsInfo.UnravelTrackRun(); + PMapWrap sampPrestTMap; + PMapTSWrap sampPrestTMapTS; + ptsInfo.SetLocalTime(static_cast(trackDecInfo.nextPTSTS)); + ptsInfo.GetTimeTrackRun(basicTrackInfo.timeScale, sampPrestTMap); + ptsInfo.GetTimeTrackRunTS(sampPrestTMapTS); + PMapIt iter1 = sampPrestTMap.begin(); + for ( ; iter1 != sampPrestTMap.end(); iter1++) + { + trackDecInfo.pMap.insert(make_pair(iter1->first, iter1->second + itemIdOffset)); + } + + PMapTSIt iter2 = sampPrestTMapTS.begin(); + for ( ; iter2 != sampPrestTMapTS.end(); iter2++) + { + trackDecInfo.pMapTS.insert(make_pair(iter2->first, iter2->second + itemIdOffset)); + } + + int64_t durTS = 0; + uint64_t sampDataOffset = baseDataOffset; + for (uint32_t sampId = 0; sampId < sampCnt; ++sampId) + { + SampleInfo infoOfSamp{}; + infoOfSamp.sampleId = (trackrunItemIdBase + ItemId(sampId)).GetIndex(); + infoOfSamp.sampleEntryType = basicTrackInfo.sampleEntryType; + infoOfSamp.sampleDescriptionIndex = sampDescId; + + infoOfSamp.dataOffset = sampDataOffset; + infoOfSamp.dataLength = samples.at(sampId).version0.pSize; + sampDataOffset += infoOfSamp.dataLength; + if (basicTrackInfo.sampleRes.count(sampDescId)) + { + infoOfSamp.width = basicTrackInfo.sampleRes.at(sampDescId).width; + infoOfSamp.height = basicTrackInfo.sampleRes.at(sampDescId).height; + } + else + { + infoOfSamp.width = 0; + infoOfSamp.height = 0; + } + infoOfSamp.sampleDuration = samples.at(sampId).version0.pDuration; + infoOfSamp.sampleType = samples.at(sampId).version0.pFlags.flags.sample_is_non_sync_sample == 0 + ? OUTPUT_REF_FRAME + : OUTPUT_NONREF_FRAME; + infoOfSamp.sampleFlags.flagsAsUInt = samples.at(sampId).version0.pFlags.flagsAsUInt; + durTS += samples.at(sampId).version0.pDuration; + trackDecInfo.samples.push_back(infoOfSamp); + } + trackDecInfo.durationTS += durTS; + trackDecInfo.nextPTSTS += durTS; +} + +IdVector Mp4Reader::GetAlternateTrackIds(TrackAtom* trackAtom, MovieAtom& moovAtom) const +{ + IdVector trackIds; + const uint16_t groupId = trackAtom->GetTrackHeaderAtom().GetAlternateGroup(); + + if (groupId == 0) + { + return trackIds; + } + + unsigned int trackId = trackAtom->GetTrackHeaderAtom().GetTrackID(); + std::vector trackAtomVec = moovAtom.GetTrackAtoms(); + std::vector::iterator iter = trackAtomVec.begin(); + for ( ; iter != trackAtomVec.end(); iter++) + { + const uint32_t foundTrackId = (*iter)->GetTrackHeaderAtom().GetTrackID(); + if ((trackId != foundTrackId) && + (groupId == (*iter)->GetTrackHeaderAtom().GetAlternateGroup())) + { + trackIds.push_back(foundTrackId); + } + } + + return trackIds; +} + +TrackProperty Mp4Reader::GetTrackProps(TrackAtom* trackAtom) const +{ + TrackProperty trackProperty; + + TrackHeaderAtom tkhdAtom = trackAtom->GetTrackHeaderAtom(); + HandlerAtom& handlerAtom = trackAtom->GetMediaAtom().GetHandlerAtom(); + SampleTableAtom& stbl = trackAtom->GetMediaAtom().GetMediaInformationAtom().GetSampleTableAtom(); + SampleDescriptionAtom& stsd = stbl.GetSampleDescriptionAtom(); + + if (tkhdAtom.GetAlternateGroup() != 0) + { + trackProperty.SetProperty(FeatureOfTrack::HasAlternatives); + } + + if (stbl.GetSampleToGroupAtoms().size() != 0) + { + trackProperty.SetProperty(FeatureOfTrack::HasSampleGroups); + } + + if (handlerAtom.GetHandlerType() == "vide") + { + trackProperty.SetProperty(FeatureOfTrack::IsVideo); + + const std::vector sampleEntries = stsd.GetSampleEntries(); + for (const auto& sampleEntry : sampleEntries) + { + if (sampleEntry->IsStereoscopic3DAtomPresent()) + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::HasVRStereoscopic3D); + } + if (sampleEntry->IsSphericalVideoV2AtomAtomPresent()) + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::HasVRV2SpericalVideo); + } + } + + if (trackAtom->GetHasSphericalVideoV1Atom()) + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::HasVRV1SpericalVideo); + if (trackAtom->GetSphericalVideoV1Atom().GetGeneralMetaData().stereoType != + SphericalVideoV1Atom::StereoTypeV1 ::UNDEFINED) + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::HasVRStereoscopic3D); + } + } + } + + if (handlerAtom.GetHandlerType() == "soun") + { + trackProperty.SetProperty(FeatureOfTrack::IsAudio); + + const std::vector sampleEntries = stsd.GetSampleEntries(); + for (const auto& sampleEntry : sampleEntries) + { + if (sampleEntry->GetType() == "mp4a") + { + if (sampleEntry->HasChannelLayoutAtom()) + { + if (sampleEntry->GetChannelLayoutAtom().GetStreamStructure() == 1) // = channelStructured + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::IsAudioLSpeakerChnlStructTrack); + } + } + if (sampleEntry->HasSpatialAudioAtom()) + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::IsVRSpatialAudioTrack); + } + if (sampleEntry->HasNonDiegeticAudioAtom()) + { + trackProperty.SetImmersiveProperty(ImmersiveProperty::IsVRNonDiegeticAudioTrack); + } + break; + } + } + } + + if (handlerAtom.GetHandlerType() == "meta") + { + trackProperty.SetProperty(FeatureOfTrack::IsMetadata); + + const std::vector sampleEntries = stsd.GetSampleEntries(); + for (const auto& sampleEntry : sampleEntries) + { + if (sampleEntry->GetType() == "urim") + { + UriMetaSampleEntryAtom* uriMetaSampEntry = (UriMetaSampleEntryAtom*) sampleEntry; + UriMetaSampleEntryAtom::VRTMDType vrTMDType = uriMetaSampEntry->GetVRTMDType(); + + switch (vrTMDType) + { + case UriMetaSampleEntryAtom::VRTMDType::UNKNOWN: + default: + break; + } + break; + } + } + } + return trackProperty; +} + +TypeToCtxIdsMap Mp4Reader::GetRefTrackIds(TrackAtom* trackAtom) const +{ + TypeToCtxIdsMap trackReferenceMap; + + if (trackAtom->GetHasTrackReferences()) + { + const std::vector& trackReferenceTypeAtoms = + trackAtom->GetTrackReferenceAtom().GetTypeAtoms(); + for (const auto& trackReferenceTypeAtom : trackReferenceTypeAtoms) + { + trackReferenceMap[trackReferenceTypeAtom.GetType()] = map( + trackReferenceTypeAtom.GetTrackIds(), [](std::uint32_t x) { + if ((x >> 16) != 0) + { + ISO_LOG(LOG_ERROR, "Context ID is invalid !\n"); + throw exception(); + } + return ContextId(x); + }); + } + } + + return trackReferenceMap; +} + +TypeToIdsMap Mp4Reader::GetTrackGroupIds(TrackAtom* trackAtom) const +{ + std::map trackGroupMap; + + if (trackAtom->GetHasTrackGroup()) + { + const std::vector& trackGroupTypeAtoms = + trackAtom->GetTrackGroupAtom().GetTrackGroupTypeAtoms(); + for (unsigned int i = 0; i < trackGroupTypeAtoms.size(); i++) + { + IdVector trackGroupID; + trackGroupID.push_back(trackGroupTypeAtoms.at(i).GetTrackGroupId()); + trackGroupMap[trackGroupTypeAtoms.at(i).GetType()] = trackGroupID; + } + } + + return trackGroupMap; +} + +TypeToIdsMap Mp4Reader::GetSampGroupIds(TrackAtom* trackAtom) const +{ + std::map sampleGroupIdsMap; + + SampleTableAtom& stbl = trackAtom->GetMediaAtom().GetMediaInformationAtom().GetSampleTableAtom(); + const std::vector sampleToGroupAtomes = stbl.GetSampleToGroupAtoms(); + for (const auto& sampleToGroupAtom : sampleToGroupAtomes) + { + const unsigned int numberOfSamps = sampleToGroupAtom.GetNumberOfSamples(); + IdVector sampleIds(numberOfSamps); + for (unsigned int i = 0; i < numberOfSamps; ++i) + { + if (sampleToGroupAtom.GetSampleGroupDescriptionIndex(i) != 0) + { + sampleIds.push_back(i); + } + } + sampleGroupIdsMap[sampleToGroupAtom.GetGroupingType()] = sampleIds; + } + + return sampleGroupIdsMap; +} + +pair Mp4Reader::ExtractTrackDecInfo(TrackAtom* trackAtom, + uint32_t movieTimescale) const +{ + TrackBasicInfo basicTrackInfo; + TrackDecInfo trackDecInfo; + + MediaHeaderAtom& mdhdAtom = trackAtom->GetMediaAtom().GetMediaHeaderAtom(); + SampleTableAtom& stbl = trackAtom->GetMediaAtom().GetMediaInformationAtom().GetSampleTableAtom(); + TrackHeaderAtom& trackHeaderAtom = trackAtom->GetTrackHeaderAtom(); + const TimeToSampleAtom& timeToSampAtom = stbl.GetTimeToSampleAtom(); + shared_ptr compositionOffsetAtom = stbl.GetCompositionOffsetAtom(); + + basicTrackInfo.width = trackHeaderAtom.GetWidth(); + basicTrackInfo.height = trackHeaderAtom.GetHeight(); + + SampleDescriptionAtom& stsd = stbl.GetSampleDescriptionAtom(); + FourCCInt handlerType = trackAtom->GetMediaAtom().GetHandlerAtom().GetHandlerType(); + + if (handlerType == "vide") + { + VisualSampleEntryAtom* sampleEntry = + stsd.GetSampleEntry(1); + if (sampleEntry) + { + basicTrackInfo.sampleEntryType = sampleEntry->GetType(); + } + } + else if (handlerType == "soun") + { + AudioSampleEntryAtom* sampleEntry = + stsd.GetSampleEntry(1); + if (sampleEntry) + { + basicTrackInfo.sampleEntryType = sampleEntry->GetType(); + } + } + else if (handlerType == "meta") + { + MetaDataSampleEntryAtom* sampleEntry = + stsd.GetSampleEntry(1); + if (sampleEntry) + { + basicTrackInfo.sampleEntryType = sampleEntry->GetType(); + } + } + + basicTrackInfo.timeScale = mdhdAtom.GetTimeScale(); + + shared_ptr editAtom = trackAtom->GetEditAtom(); + DecodePts ptsInfo; + ptsInfo.SetAtom(&timeToSampAtom); + ptsInfo.SetAtom(compositionOffsetAtom.get()); + if (editAtom) + { + trackDecInfo.hasEditList = true; + const EditListAtom* editListAtom = editAtom->GetEditListAtom(); + ptsInfo.SetAtom(editListAtom, movieTimescale, mdhdAtom.GetTimeScale()); + } + ptsInfo.Unravel(); + + trackDecInfo.durationTS = PrestTS(ptsInfo.GetSpan()); + trackDecInfo.pMap = ptsInfo.GetTime(basicTrackInfo.timeScale); + trackDecInfo.pMapTS = ptsInfo.GetTimeTS(); + + if (trackAtom->GetHasTrackTypeAtom()) + { + trackDecInfo.hasTtyp = true; + trackDecInfo.ttyp = trackAtom->GetTrackTypeAtom(); + } + + return make_pair(basicTrackInfo, trackDecInfo); +} + +SampleInfoVector Mp4Reader::GenSampInfo(TrackAtom* trackAtom) const +{ + SampleInfoVector sampInfos; + + SampleTableAtom& stbl = trackAtom->GetMediaAtom().GetMediaInformationAtom().GetSampleTableAtom(); + SampleDescriptionAtom& stsd = stbl.GetSampleDescriptionAtom(); + SampleToChunkAtom& stsc = stbl.GetSampleToChunkAtom(); + ChunkOffsetAtom& stco = stbl.GetChunkOffsetAtom(); + SampleSizeAtom& stsz = stbl.GetSampleSizeAtom(); + TimeToSampleAtom& stts = stbl.GetTimeToSampleAtom(); + const FourCCInt handlerType = trackAtom->GetMediaAtom().GetHandlerAtom().GetHandlerType(); + + const std::vector entrySize = stsz.GetEntrySize(); + const std::vector chunkOffsets = stco.GetChunkOffsets(); + const std::vector sampleDeltas = stts.GetSampleDeltas(); + + const unsigned int sampCnt = stsz.GetSampleNum(); + + if (sampCnt > entrySize.size() || sampCnt > sampleDeltas.size()) + { + ISO_LOG(LOG_ERROR, "Segment file header is not correct !\n"); + throw exception(); + } + + sampInfos.reserve(sampCnt); + + uint32_t prevChunkId = 0; + for (uint32_t sampId = 0; sampId < sampCnt; ++sampId) + { + SampleInfo infoOfSamp{}; + uint32_t sampDescId = 0; + if (!stsc.GetSampleDescrIndex(sampId, sampDescId)) + { + ISO_LOG(LOG_ERROR, "Segment file header is not correct !\n"); + throw exception(); + } + + infoOfSamp.sampleId = sampId; + infoOfSamp.dataLength = entrySize.at(sampId); + infoOfSamp.sampleDescriptionIndex = sampDescId; + + uint32_t chunkIndex = 0; + if (!stsc.GetSampleChunkIndex(sampId, chunkIndex)) + { + ISO_LOG(LOG_ERROR, "Segment file header is not correct !\n"); + throw exception(); + } + + if (chunkIndex != prevChunkId) + { + infoOfSamp.dataOffset = chunkOffsets.at(chunkIndex - 1); + prevChunkId = chunkIndex; + } + else + { + infoOfSamp.dataOffset = sampInfos.back().dataOffset + sampInfos.back().dataLength; + } + + infoOfSamp.sampleDuration = sampleDeltas.at(sampId); + + if (handlerType == "vide") + { + if (const VisualSampleEntryAtom* sampleEntry = + stsd.GetSampleEntry(sampDescId)) + { + infoOfSamp.width = sampleEntry->GetWidth(); + infoOfSamp.height = sampleEntry->GetHeight(); + infoOfSamp.sampleEntryType = sampleEntry->GetType(); + + infoOfSamp.sampleType = OUTPUT_NONREF_FRAME; + infoOfSamp.sampleFlags.flags.sample_is_non_sync_sample = 1; + } + } + else if (handlerType == "soun") + { + if (const AudioSampleEntryAtom* sampleEntry = + stsd.GetSampleEntry(infoOfSamp.sampleDescriptionIndex.GetIndex())) + { + infoOfSamp.sampleEntryType = sampleEntry->GetType(); + infoOfSamp.sampleType = OUTPUT_REF_FRAME; + } + } + else if (handlerType == "meta") + { + if (const MetaDataSampleEntryAtom* sampleEntry = + stsd.GetSampleEntry(infoOfSamp.sampleDescriptionIndex.GetIndex())) + { + infoOfSamp.sampleEntryType = sampleEntry->GetType(); + infoOfSamp.sampleType = OUTPUT_REF_FRAME; + } + } + + sampInfos.push_back(infoOfSamp); + } + + if (stbl.HasSyncSampleAtom() && (handlerType == "vide")) + { + const std::vector syncSamps = stbl.GetSyncSampleAtom().get()->GetSyncSampleIds(); + for (unsigned int i = 0; i < syncSamps.size(); ++i) + { + uint32_t syncSamp = syncSamps.at(i) - 1; + auto& infoOfSamp = sampInfos.at(syncSamp); + infoOfSamp.sampleType = OUTPUT_REF_FRAME; + infoOfSamp.sampleFlags.flags.sample_is_non_sync_sample = 0; + } + } + + return sampInfos; +} + +MoovProperties Mp4Reader::ExtractMoovProps(const MovieAtom& moovAtom) const +{ + MoovProperties moovProperties; + moovProperties.fragmentDuration = 0; + + if (moovAtom.IsMovieExtendsAtomPresent()) + { + const MovieExtendsAtom* mvexAtom = moovAtom.GetMovieExtendsAtom(); + if (mvexAtom->IsMovieExtendsHeaderAtomPresent()) + { + const MovieExtendsHeaderAtom& mehdAtom = mvexAtom->GetMovieExtendsHeaderAtom(); + moovProperties.fragmentDuration = mehdAtom.GetFragmentDuration(); + } + + moovProperties.fragmentSampleDefaults.clear(); + for (const auto& track : mvexAtom->GetTrackExtendsAtoms()) + { + moovProperties.fragmentSampleDefaults.push_back(track->GetFragmentSampleDefaults()); + } + } + + return moovProperties; +} + +void Mp4Reader::FillSampEntryMap(TrackAtom* trackAtom, InitSegmentId initSegId) +{ + InitSegmentTrackId trackIdPair = + make_pair(initSegId, ContextId(trackAtom->GetTrackHeaderAtom().GetTrackID())); + SampleDescriptionAtom& stsd = + trackAtom->GetMediaAtom().GetMediaInformationAtom().GetSampleTableAtom().GetSampleDescriptionAtom(); + auto& parameterSetMaps = + m_initSegProps[trackIdPair.first].basicTrackInfos[trackIdPair.second].parameterSetMaps; + + { + const std::vector sampleEntries = stsd.GetSampleEntries(); + unsigned int index = 1; + for (auto& entry : sampleEntries) + { + ParameterSetMap parameterSetMap = + GenDecoderParameterSetMap(entry->GetHevcConfigurationAtom().GetConfiguration()); + parameterSetMaps.insert(std::make_pair(SmpDesIndex(index), parameterSetMap)); + + const auto* clapAtom = entry->GetClap(); + if (clapAtom != NULL) + { + ISO_LOG(LOG_INFO, "CleanApertureAtom reading not implemented\n"); + } + + SampleRes size = {entry->GetWidth(), entry->GetHeight()}; + GetTrackBasicInfo(trackIdPair).sampleRes.insert(make_pair(index, size)); + + if (entry->IsStereoscopic3DAtomPresent()) + { + GetTrackBasicInfo(trackIdPair) + .st3dProperties.insert(make_pair(index, Genst3d(entry->GetStereoscopic3DAtom()))); + } + else if (trackAtom->GetHasSphericalVideoV1Atom()) + { + GetTrackBasicInfo(trackIdPair) + .st3dProperties.insert(make_pair(index, Genst3d(trackAtom->GetSphericalVideoV1Atom()))); + } + + if (entry->IsSphericalVideoV2AtomAtomPresent()) + { + GetTrackBasicInfo(trackIdPair) + .sv3dProperties.insert(make_pair(index, Gensv3d(entry->GetSphericalVideoV2Atom()))); + } + else if (trackAtom->GetHasSphericalVideoV1Atom()) + { + GetTrackBasicInfo(trackIdPair) + .sphericalV1Properties.insert( + make_pair(index, GenSphericalVideoV1Property(trackAtom->GetSphericalVideoV1Atom()))); + } + + FillRinfAtomInfo(GetTrackBasicInfo(trackIdPair), index, *entry); + GetTrackBasicInfo(trackIdPair) + .nalLengthSizeMinus1.insert(make_pair( + index, entry->GetHevcConfigurationAtom().GetConfiguration().GetLengthSizeMinus1())); + + ++index; + } + } + + { + const std::vector sampleEntries = stsd.GetSampleEntries(); + unsigned int index = 1; + for (auto& entry : sampleEntries) + { + ParameterSetMap parameterSetMap = + GenDecoderParameterSetMap(entry->GetAvcConfigurationAtom().GetConfiguration()); + parameterSetMaps.insert(std::make_pair(index, parameterSetMap)); + + const auto* clapAtom = entry->GetClap(); + if (clapAtom != NULL) + { + ISO_LOG(LOG_INFO, "CleanApertureAtom reading not implemented\n"); + } + + SampleRes size = {entry->GetWidth(), entry->GetHeight()}; + GetTrackBasicInfo(trackIdPair).sampleRes.insert(make_pair(index, size)); + + if (entry->IsStereoscopic3DAtomPresent()) + { + GetTrackBasicInfo(trackIdPair) + .st3dProperties.insert(make_pair(index, Genst3d(entry->GetStereoscopic3DAtom()))); + } + else if (trackAtom->GetHasSphericalVideoV1Atom()) + { + GetTrackBasicInfo(trackIdPair) + .st3dProperties.insert(make_pair(index, Genst3d(trackAtom->GetSphericalVideoV1Atom()))); + } + if (entry->IsSphericalVideoV2AtomAtomPresent()) + { + GetTrackBasicInfo(trackIdPair) + .sv3dProperties.insert(make_pair(index, Gensv3d(entry->GetSphericalVideoV2Atom()))); + } + else if (trackAtom->GetHasSphericalVideoV1Atom()) + { + GetTrackBasicInfo(trackIdPair) + .sphericalV1Properties.insert( + make_pair(index, GenSphericalVideoV1Property(trackAtom->GetSphericalVideoV1Atom()))); + } + + FillRinfAtomInfo(GetTrackBasicInfo(trackIdPair), index, *entry); + GetTrackBasicInfo(trackIdPair) + .nalLengthSizeMinus1.insert(make_pair( + index, entry->GetAvcConfigurationAtom().GetConfiguration().GetLengthSizeMinus1())); + + ++index; + } + } + + { + const std::vector sampleEntries = stsd.GetSampleEntries(); + unsigned int index = 1; + for (auto& entry : sampleEntries) + { + ParameterSetMap parameterSetMap = GenDecoderParameterSetMap(entry->GetESDAtom()); + parameterSetMaps.insert(std::make_pair(index, parameterSetMap)); + + if (entry->HasChannelLayoutAtom()) + { + GetTrackBasicInfo(trackIdPair) + .chnlProperties.insert(make_pair(index, GenChnl(entry->GetChannelLayoutAtom()))); + } + if (entry->HasSpatialAudioAtom()) + { + GetTrackBasicInfo(trackIdPair) + .sa3dProperties.insert(make_pair(index, GenSA3D(entry->GetSpatialAudioAtom()))); + } + ++index; + } + } +} + +void Mp4Reader::FillRinfAtomInfo(TrackBasicInfo& basicTrackInfo, + unsigned int index, + const SampleEntryAtom& entry) +{ + auto rinfAtom = entry.GetRestrictedSchemeInfoAtom(); + if (rinfAtom) + { + if (rinfAtom->GetSchemeType() == "podv") + { + auto& povdAtom = rinfAtom->GetProjectedOmniVideoAtom(); + + ProjFormat format = { + (OmniProjFormat) povdAtom.GetProjectionFormatAtom().GetProjectFormat()}; + basicTrackInfo.pfrmProperties.insert(make_pair(index, format)); + + if (povdAtom.HasRegionWisePackingAtom()) + { + basicTrackInfo.rwpkProperties.insert(make_pair(index, Genrwpk(povdAtom.GetRegionWisePackingAtom()))); + } + + if (povdAtom.HasCoverageInformationAtom()) + { + basicTrackInfo.coviProperties.insert( + make_pair(index, Gencovi(povdAtom.GetCoverageInformationAtom()))); + } + + if (povdAtom.HasRotationAtom()) + { + auto rotation = povdAtom.GetRotationAtom().GetRotation(); + basicTrackInfo.rotnProperties.insert( + make_pair(index, Rotation{rotation.yaw, rotation.pitch, rotation.roll})); + } + else + { + basicTrackInfo.rotnProperties.insert(make_pair(index, Rotation{0, 0, 0})); + } + } + + if (rinfAtom->HasSchemeTypeAtom()) + { + SchemeTypesPropertyInternal schemeTypes{}; + schemeTypes.mainScheme = rinfAtom->GetSchemeTypeAtom(); + for (auto compatSchemeType : rinfAtom->GetCompatibleSchemeTypes()) + { + schemeTypes.compatibleSchemes.push_back(*compatSchemeType); + } + basicTrackInfo.schemeTypesProperties.insert(make_pair(index, schemeTypes)); + } + + if (rinfAtom->HasStereoVideoAtom()) + { + VideoFramePackingType stviArrangement; + stviArrangement = (VideoFramePackingType) rinfAtom->GetStereoVideoAtom() + .GetStereoIndicationType() + .typePOVD.compositionType; + basicTrackInfo.stviProperties.insert(make_pair(index, stviArrangement)); + } + else if (rinfAtom->GetSchemeType() == "podv") + { + basicTrackInfo.stviProperties.insert(make_pair(index, VideoFramePackingType::OMNI_MONOSCOPIC)); + } + } +} + +void Mp4Reader::GenSegInAtom(const SegmentIndexAtom& sidx, + SegmentIndex& segIndex, + int64_t dataOffset) +{ + uint64_t offset = static_cast(dataOffset) + sidx.GetFirstOffset(); + uint64_t totalDur = sidx.GetEarliestPresentationTime(); + + std::vector segRefs = sidx.GetReferences(); + segIndex = VarLenArray(segRefs.size()); + + uint32_t segId = 0; + SegInfo *segInfo = NULL; + for ( ; segId < segRefs.size(); segId++) + { + segInfo = &(segIndex[segId]); + + segInfo->segmentId = segId + 1; + segInfo->referenceId = sidx.GetReferenceId(); + segInfo->timescale = sidx.GetTimescale(); + segInfo->referenceType = segRefs.at(segId).referenceType; + segInfo->earliestPTSinTS = totalDur; + totalDur += segRefs.at(segId).subsegmentDuration; + segInfo->durationInTS = segRefs.at(segId).subsegmentDuration; + segInfo->startDataOffset = offset; + segInfo->dataSize = segRefs.at(segId).referencedSize; + offset += segIndex[segId].dataSize; + segInfo->startsWithSAP = segRefs.at(segId).startsWithSAP; + segInfo->SAPType = segRefs.at(segId).sapType; + + } +} + +unsigned Mp4Reader::GenTrackId(InitSegmentTrackId idPair) const +{ + InitSegmentId initSegId = idPair.first; + ContextId ctxIndex = idPair.second; + + if ((ctxIndex.GetIndex() >> 16) != 0) + { + ISO_LOG(LOG_ERROR, "Segment file header is not correct !\n"); + throw exception(); + } + + unsigned int actualTrackId = (initSegId.GetIndex() << 16) | ctxIndex.GetIndex(); + return actualTrackId; +} + +InitSegmentTrackId Mp4Reader::MakeIdPair(unsigned id) const +{ + unsigned int combinedId = id; + + InitSegmentId initSegId = InitSegmentId((combinedId >> 16) & 0xffff); + ContextId ctxIndex = ContextId(combinedId & 0xffff); + + InitSegmentTrackId newIdPair = make_pair(initSegId, ctxIndex); + return newIdPair; +} + +uint64_t Mp4Reader::ParseNalLen(char* buffer) const +{ + uint64_t nalLen = 0; + size_t id = 0; + nalLen |= ((uint64_t)((uint8_t)(buffer[id]))) << 24; + id++; + nalLen |= (((uint64_t)((uint8_t)(buffer[id]))) << 16) & 0x0000000000ff0000; + id++; + nalLen |= (((uint64_t)((uint8_t)(buffer[id]))) << 8) & 0x000000000000ff00; + id++; + nalLen |= ((uint64_t)((uint8_t)(buffer[id]))) & 0x00000000000000ff; + id++; + return nalLen; +} + +void Mp4Reader::WriteNalLen(uint64_t length, char* buffer) const +{ + if (!buffer) + { + ISO_LOG(LOG_ERROR, "The buffer is NULL for Nal Length !\n"); + throw exception(); + } + + buffer[0] = (char)((0xff000000 & length) >> 24); + buffer[1] = (char)((0x00ff0000 & length) >> 16); + buffer[2] = (char)((0x0000ff00 & length) >> 8); + buffer[3] = (char)((uint8_t)length); +} + +template +VarLenArray makeVarLenArray(const CustomCont& customCont) +{ + VarLenArray varLenArray(customCont.size()); + for (typename CustomCont::const_iterator contIter = customCont.begin(); contIter != customCont.end(); ++contIter) + { + varLenArray.arrayElets[contIter - customCont.begin()] = T(*contIter); + } + return varLenArray; +} + +template +auto GenVarLenArrayMap(const CustomCont& customCont, CustomMap customMap) -> VarLenArray +{ + VarLenArray varLenArray(customCont.size()); + typename CustomCont::const_iterator contIter = customCont.begin(); + for ( ; contIter != customCont.end(); ++contIter) + { + varLenArray.arrayElets[contIter - customCont.begin()] = customMap(*contIter); + } + return varLenArray; +} + +template +VarLenArray> GenVarLenArray2d(const CustomCont& customCont) +{ + VarLenArray> varLenArray(customCont.size()); + typename CustomCont::const_iterator contIter = customCont.begin(); + for ( ; contIter != customCont.end(); ++contIter) + { + varLenArray.arrayElets[contIter - customCont.begin()] = makeVarLenArray(*contIter); + } + return varLenArray; +} + +template +VarLenArray GenFourCCVarLenArray(const CustomCont& customCont) +{ + VarLenArray varLenArray(customCont.size()); + typename CustomCont::const_iterator contIter = customCont.begin(); + for ( ; contIter != customCont.end(); ++contIter) + { + varLenArray.arrayElets[contIter - customCont.begin()] = FourCC(contIter->c_str()); + } + return varLenArray; +} + +int32_t Mp4Reader::GetMajorBrand(FourCC& majorBrand, + uint32_t initSegIndex, + uint32_t segIndex) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + bool isInitSegment = m_initSegProps.count(initSegIndex) > 0; + if (!isInitSegment) + { + return OMAF_INVALID_SEGMENT; + } + + bool isSegment = false; + if (segIndex != UINT32_MAX) + { + isSegment = m_initSegProps.at(initSegIndex).segPropMap.count(segIndex) > 0; + if (!isSegment) + { + return OMAF_INVALID_SEGMENT; + } + } + + majorBrand = FourCC((isSegment ? m_initSegProps.at(initSegIndex) + .segPropMap.at(segIndex) + .styp.GetMajorBrand() + : m_initSegProps.at(initSegIndex).ftyp.GetMajorBrand()) + .c_str()); + return ERROR_NONE; +} + + +int32_t Mp4Reader::GetMinorVersion(uint32_t& minorVersion, + uint32_t initSegIndex, + uint32_t segIndex) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + bool isInitSegment = m_initSegProps.count(initSegIndex) > 0; + if (!isInitSegment) + { + return OMAF_INVALID_SEGMENT; + } + + bool isSegment = false; + if (segIndex != UINT32_MAX) + { + isSegment = m_initSegProps.at(initSegIndex).segPropMap.count(segIndex) > 0; + if (!isSegment) + { + return OMAF_INVALID_SEGMENT; + } + } + + minorVersion = (isSegment ? m_initSegProps.at(initSegIndex) + .segPropMap.at(segIndex) + .styp.GetMinorVersion() + : m_initSegProps.at(initSegIndex).ftyp.GetMinorVersion()); + + return ERROR_NONE; +} + + +int32_t Mp4Reader::GetCompatibleBrands(VarLenArray& compatBrands, + uint32_t initSegIndex, + uint32_t segIndex) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + bool isInitSegment = m_initSegProps.count(initSegIndex) > 0; + if (!isInitSegment) + { + return OMAF_INVALID_SEGMENT; + } + + bool isSegment = false; + if (segIndex != UINT32_MAX) + { + isSegment = m_initSegProps.at(initSegIndex).segPropMap.count(segIndex) > 0; + if (!isSegment) + { + return OMAF_INVALID_SEGMENT; + } + } + + compatBrands = GenFourCCVarLenArray( + isSegment ? m_initSegProps.at(initSegIndex) + .segPropMap.at(segIndex) + .styp.GetCompatibleBrands() + : m_initSegProps.at(initSegIndex).ftyp.GetCompatibleBrands()); + + return ERROR_NONE; +} + +int32_t Mp4Reader::GetTrackInformations(VarLenArray& outTrackInfos) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + size_t totalSize = 0; + for (auto& propMap : m_initSegProps) + { + totalSize += propMap.second.basicTrackInfos.size(); + } + + outTrackInfos = VarLenArray(totalSize); + uint32_t basicTrackId = 0; + size_t offset = 0; + for (auto const& initSegment : m_initSegProps) + { + InitSegmentId initSegId = initSegment.first; + uint32_t newTrackId = basicTrackId; + + for (auto const& trackPropsKv : initSegment.second.trackProperties) + { + ContextId trackId = trackPropsKv.first; + const TrackProperties& trackProps = trackPropsKv.second; + outTrackInfos.arrayElets[newTrackId].initSegmentId = initSegId.GetIndex(); + outTrackInfos.arrayElets[newTrackId].trackId = GenTrackId(make_pair(initSegment.first, trackId)); + outTrackInfos.arrayElets[newTrackId].alternateGroupId = trackProps.alternateGroupId; + outTrackInfos.arrayElets[newTrackId].features = trackProps.trackProperty.GetFeatureMask(); + outTrackInfos.arrayElets[newTrackId].vrFeatures = trackProps.trackProperty.GetVRFeatureMask(); + outTrackInfos.arrayElets[newTrackId].timeScale = + m_initSegProps.at(initSegId).basicTrackInfos.at(trackId).timeScale; + outTrackInfos.arrayElets[newTrackId].frameRate = {}; + std::string tempURI = trackProps.trackURI; + tempURI.push_back('\0'); + outTrackInfos.arrayElets[newTrackId].trackURI = makeVarLenArray(tempURI); + outTrackInfos.arrayElets[newTrackId].alternateTrackIds = + makeVarLenArray(trackProps.alternateTrackIds); + + outTrackInfos.arrayElets[newTrackId].referenceTrackIds = + VarLenArray(trackProps.referenceTrackIds.size()); + offset = 0; + for (auto const& reference : trackProps.referenceTrackIds) + { + outTrackInfos.arrayElets[newTrackId].referenceTrackIds[offset].type = FourCC(reference.first.GetUInt32()); + outTrackInfos.arrayElets[newTrackId].referenceTrackIds[offset].trackIds = + GenVarLenArrayMap(reference.second, [&](ContextId aContextId) { + return GenTrackId({initSegment.first, aContextId}); + }); + offset++; + } + + outTrackInfos.arrayElets[newTrackId].trackGroupIds = + VarLenArray(trackProps.trackGroupIds.size()); + offset = 0; + for (auto const& group : trackProps.trackGroupIds) + { + outTrackInfos.arrayElets[newTrackId].trackGroupIds[offset].type = FourCC(group.first.GetUInt32()); + outTrackInfos.arrayElets[newTrackId].trackGroupIds[offset].trackIds = + makeVarLenArray(group.second); + offset++; + } + newTrackId++; + } + + std::map trackSampCounts; + + std::vector idPairVec; + { + uint32_t count = 0; + for (auto const& basicTrackInfosKv : m_initSegProps.at(initSegId).basicTrackInfos) + { + idPairVec.push_back(basicTrackInfosKv.first); + trackSampCounts[count + basicTrackId] = 0u; + ++count; + } + } + + + for (auto const& allSegmentProperties : initSegment.second.segPropMap) + { + if (allSegmentProperties.second.initSegmentId == initSegId) + { + auto& segTrackInfos = allSegmentProperties.second.trackDecInfos; + newTrackId = basicTrackId; + for (ContextId ctxId : idPairVec) + { + auto trackDecInfo = segTrackInfos.find(ctxId); + if (trackDecInfo != segTrackInfos.end()) + { + trackSampCounts[newTrackId] += trackDecInfo->second.samples.size(); + } + ++newTrackId; + } + } + } + + for (auto const& trackSampCount : trackSampCounts) + { + auto counttrackid = trackSampCount.first; + auto count = trackSampCount.second; + outTrackInfos.arrayElets[counttrackid].sampleProperties = VarLenArray(count); + outTrackInfos.arrayElets[counttrackid].maxSampleSize = 0; + } + + std::map sampOffset; + for (auto const& segment : CreateDashSegs(initSegId)) + { + newTrackId = basicTrackId; + std::vector::iterator ctxIdIter = idPairVec.begin(); + for ( ; ctxIdIter != idPairVec.end(); ctxIdIter++) + //for (ContextId ctxId : idPairVec) + { + std::map::const_iterator decInfoIter; + decInfoIter = segment.trackDecInfos.find((*ctxIdIter)); + //auto trackInfoIt = segment.trackDecInfos.find((*ctxIdIter)); + if (decInfoIter != segment.trackDecInfos.end()) + { + auto& trackDecInfo = decInfoIter->second; + offset = sampOffset[newTrackId]; + + if (trackDecInfo.hasTtyp) + { + auto& tAtom = trackDecInfo.ttyp; + outTrackInfos.arrayElets[newTrackId].hasTypeInformation = true; + + outTrackInfos.arrayElets[newTrackId].type.majorBrand = tAtom.GetMajorBrand().c_str(); + outTrackInfos.arrayElets[newTrackId].type.minorVersion = tAtom.GetMinorVersion(); + + std::vector convertedCompatibleBrands; + for (auto& compatibleBrand : tAtom.GetCompatibleBrands()) + { + convertedCompatibleBrands.push_back(FourCC(compatibleBrand.c_str())); + } + outTrackInfos.arrayElets[newTrackId].type.compatibleBrands = + makeVarLenArray(convertedCompatibleBrands); + } + else + { + outTrackInfos.arrayElets[newTrackId].hasTypeInformation = false; + } + + if (trackDecInfo.samples.size() > 0) + { + uint32_t delta; + if (trackDecInfo.samples.size() >= 3) + { + delta = trackDecInfo.samples.at(1).sampleDuration; + } + else + { + delta = trackDecInfo.samples.at(0).sampleDuration; + } + auto timeScale = + m_initSegProps.at(initSegId).basicTrackInfos.at((*ctxIdIter)).timeScale; + outTrackInfos.arrayElets[newTrackId].frameRate = RatValue{timeScale, delta}; + } + + for (auto const& sample : trackDecInfo.samples) + { + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleId = + ItemId(sample.sampleId).GetIndex(); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleEntryType = + FourCC(sample.sampleEntryType.GetUInt32()); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleDescriptionIndex = + sample.sampleDescriptionIndex.GetIndex(); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleType = sample.sampleType; + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].initSegmentId = + segment.initSegmentId.GetIndex(); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].segmentId = segment.segmentId.GetIndex(); + if (sample.compositionTimes.size()) + { + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStamp = + sample.compositionTimes.at(0); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStampTS = + sample.compositionTimesTS.at(0); + } + else + { + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStamp = 0; + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStampTS = 0; + } + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleFlags.flagsAsUInt = + sample.sampleFlags.flagsAsUInt; + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleDurationTS = + sample.sampleDuration; + + unsigned int sampleSize = sample.dataLength; + if (sampleSize > outTrackInfos.arrayElets[newTrackId].maxSampleSize) + { + outTrackInfos.arrayElets[newTrackId].maxSampleSize = sampleSize; + } + offset++; + } + sampOffset[newTrackId] = offset; + } + ++newTrackId; + } + } + + basicTrackId += static_cast(initSegment.second.basicTrackInfos.size()); + } + + return ERROR_NONE; +} + +int32_t Mp4Reader::GetTrackInformation(VarLenArray& outTrackInfos) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + size_t totalSize = m_initSegProps.size(); + outTrackInfos = std::move(VarLenArray(totalSize)); + uint32_t basicTrackId = 0; + size_t offset = 0; + for (auto const& initSegment : m_initSegProps) + { + InitSegmentId initSegId = initSegment.first; + uint32_t newTrackId = basicTrackId; + + auto trackPropsKv = --initSegment.second.trackProperties.end(); + ContextId trackId = trackPropsKv->first; + const TrackProperties& trackProps = trackPropsKv->second; + outTrackInfos.arrayElets[newTrackId].initSegmentId = initSegId.GetIndex(); + outTrackInfos.arrayElets[newTrackId].trackId = GenTrackId(make_pair(initSegment.first, trackId)); + outTrackInfos.arrayElets[newTrackId].alternateGroupId = trackProps.alternateGroupId; + outTrackInfos.arrayElets[newTrackId].features = trackProps.trackProperty.GetFeatureMask(); + outTrackInfos.arrayElets[newTrackId].vrFeatures = trackProps.trackProperty.GetVRFeatureMask(); + outTrackInfos.arrayElets[newTrackId].timeScale = + m_initSegProps.at(initSegId).basicTrackInfos.at(trackId).timeScale; + outTrackInfos.arrayElets[newTrackId].frameRate = {}; + std::string tempURI = trackProps.trackURI; + tempURI.push_back('\0'); + outTrackInfos.arrayElets[newTrackId].trackURI = makeVarLenArray(tempURI); + outTrackInfos.arrayElets[newTrackId].alternateTrackIds = + makeVarLenArray(trackProps.alternateTrackIds); + outTrackInfos.arrayElets[newTrackId].referenceTrackIds = + VarLenArray(trackProps.referenceTrackIds.size()); + offset = 0; + for (auto const& reference : trackProps.referenceTrackIds) + { + outTrackInfos.arrayElets[newTrackId].referenceTrackIds[offset].type = FourCC(reference.first.GetUInt32()); + outTrackInfos.arrayElets[newTrackId].referenceTrackIds[offset].trackIds = + GenVarLenArrayMap(reference.second, [&](ContextId aContextId) { + return GenTrackId({initSegment.first, aContextId}); + }); + offset++; + } + + outTrackInfos.arrayElets[newTrackId].trackGroupIds = + VarLenArray(trackProps.trackGroupIds.size()); + offset = 0; + for (auto const& group : trackProps.trackGroupIds) + { + outTrackInfos.arrayElets[newTrackId].trackGroupIds[offset].type = FourCC(group.first.GetUInt32()); + outTrackInfos.arrayElets[newTrackId].trackGroupIds[offset].trackIds = + makeVarLenArray(group.second); + offset++; + } + + std::map trackSampCounts; + + std::vector idPairVec; + auto basicTrackInfosKv = --m_initSegProps.at(initSegId).basicTrackInfos.end(); + idPairVec.push_back(basicTrackInfosKv->first); + trackSampCounts[basicTrackId] = 0u; + + for (auto const& allSegmentProperties : initSegment.second.segPropMap) + { + if (allSegmentProperties.second.initSegmentId == initSegId) + { + auto& segTrackInfos = allSegmentProperties.second.trackDecInfos; + newTrackId = basicTrackId; + auto ctxId = --idPairVec.end(); + auto trackDecInfo = segTrackInfos.find(*ctxId); + if (trackDecInfo != segTrackInfos.end()) + { + trackSampCounts[newTrackId] += trackDecInfo->second.samples.size(); + } + } + } + + auto trackSampCount = --trackSampCounts.end(); + auto counttrackid = trackSampCount->first; + auto count = trackSampCount->second; + outTrackInfos.arrayElets[counttrackid].sampleProperties = VarLenArray(count); + outTrackInfos.arrayElets[counttrackid].maxSampleSize = 0; + + std::map sampOffset; + for (auto const& segment : CreateDashSegs(initSegId)) + { + newTrackId = basicTrackId; + std::vector::iterator ctxIdIter = idPairVec.begin(); + ctxIdIter = --idPairVec.end(); + std::map::const_iterator decInfoIter; + decInfoIter = segment.trackDecInfos.find((*ctxIdIter)); + if (decInfoIter != segment.trackDecInfos.end()) + { + auto& trackDecInfo = decInfoIter->second; + offset = sampOffset[newTrackId]; + + if (trackDecInfo.hasTtyp) + { + auto& tAtom = trackDecInfo.ttyp; + outTrackInfos.arrayElets[newTrackId].hasTypeInformation = true; + + outTrackInfos.arrayElets[newTrackId].type.majorBrand = tAtom.GetMajorBrand().c_str(); + outTrackInfos.arrayElets[newTrackId].type.minorVersion = tAtom.GetMinorVersion(); + + std::vector convertedCompatibleBrands; + for (auto& compatibleBrand : tAtom.GetCompatibleBrands()) + { + convertedCompatibleBrands.push_back(FourCC(compatibleBrand.c_str())); + } + outTrackInfos.arrayElets[newTrackId].type.compatibleBrands = + makeVarLenArray(convertedCompatibleBrands); + } + else + { + outTrackInfos.arrayElets[newTrackId].hasTypeInformation = false; + } + + if (trackDecInfo.samples.size() > 0) + { + uint32_t delta; + if (trackDecInfo.samples.size() >= 3) + { + delta = trackDecInfo.samples.at(1).sampleDuration; + } + else + { + delta = trackDecInfo.samples.at(0).sampleDuration; + } + auto timeScale = + m_initSegProps.at(initSegId).basicTrackInfos.at((*ctxIdIter)).timeScale; + outTrackInfos.arrayElets[newTrackId].frameRate = RatValue{timeScale, delta}; + } + + for (auto const& sample : trackDecInfo.samples) + { + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleId = + ItemId(sample.sampleId).GetIndex(); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleEntryType = + FourCC(sample.sampleEntryType.GetUInt32()); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleDescriptionIndex = + sample.sampleDescriptionIndex.GetIndex(); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleType = sample.sampleType; + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].initSegmentId = + segment.initSegmentId.GetIndex(); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].segmentId = segment.segmentId.GetIndex(); + if (sample.compositionTimes.size()) + { + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStamp = + sample.compositionTimes.at(0); + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStampTS = + sample.compositionTimesTS.at(0); + } + else + { + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStamp = 0; + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].earliestTStampTS = 0; + } + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleFlags.flagsAsUInt = + sample.sampleFlags.flagsAsUInt; + outTrackInfos.arrayElets[newTrackId].sampleProperties[offset].sampleDurationTS = + sample.sampleDuration; + + unsigned int sampleSize = sample.dataLength; + if (sampleSize > outTrackInfos.arrayElets[newTrackId].maxSampleSize) + { + outTrackInfos.arrayElets[newTrackId].maxSampleSize = sampleSize; + } + offset++; + } + sampOffset[newTrackId] = offset; + } + } + basicTrackId++; + } + return ERROR_NONE; +} + +int32_t Mp4Reader::GetDisplayWidth(uint32_t trackId, uint32_t& displayPicW) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + CtxType ctxType; + int error = GetCtxTypeError(MakeIdPair(trackId), ctxType); + if (error) + { + return error; + } + + if (ctxType == CtxType::TRACK) + { + displayPicW = GetTrackBasicInfo(MakeIdPair(trackId)).width >> 16; + return ERROR_NONE; + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } +} + + +int32_t Mp4Reader::GetDisplayHeight(uint32_t trackId, uint32_t& displayPicH) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + CtxType ctxType; + int error = GetCtxTypeError(MakeIdPair(trackId), ctxType); + if (error) + { + return error; + } + + if (ctxType == CtxType::TRACK) + { + displayPicH = GetTrackBasicInfo(MakeIdPair(trackId)).height >> 16; + return ERROR_NONE; + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } +} + +int32_t Mp4Reader::GetDisplayWidthFP(uint32_t trackId, uint32_t& displayPicW) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + CtxType ctxType; + int error = GetCtxTypeError(MakeIdPair(trackId), ctxType); + if (error) + { + return error; + } + + if (ctxType == CtxType::TRACK) + { + displayPicW = GetTrackBasicInfo(MakeIdPair(trackId)).width; + return ERROR_NONE; + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } +} + +int32_t Mp4Reader::GetDisplayHeightFP(uint32_t trackId, uint32_t& displayPicH) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + CtxType ctxType; + int error = GetCtxTypeError(MakeIdPair(trackId), ctxType); + if (error) + { + return error; + } + + if (ctxType == CtxType::TRACK) + { + displayPicH = GetTrackBasicInfo(MakeIdPair(trackId)).height; + return ERROR_NONE; + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } +} + +int32_t Mp4Reader::GetWidth(uint32_t trackId, uint32_t itemId, uint32_t& imgW) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + uint32_t tempheight = 0; + return GetImgDims(trackId, itemId, imgW, tempheight); +} + + +int32_t Mp4Reader::GetHeight(uint32_t trackId, uint32_t itemId, uint32_t& imgH) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + uint32_t tempwidth = 0; + return GetImgDims(trackId, itemId, tempwidth, imgH); +} + +int32_t Mp4Reader::GetDims(uint32_t trackId, uint32_t itemId, uint32_t& imgW, uint32_t& imgH) const { + if (IsInitErr()) { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + return GetImgDims(trackId, itemId, imgW, imgH); +} + +int32_t Mp4Reader::GetPlaybackDurationInSecs(uint32_t trackId, double& durInSecs) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + auto trackIdPair = MakeIdPair(trackId); + double playDur = 0.0; + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + InitSegmentId initSegId = trackIdPair.first; + ContextId initTrackId = trackIdPair.second; + + switch (ctxType) + { + case CtxType::TRACK: + { + int64_t maxTInUS = 0; + uint32_t timescale = + m_initSegProps.at(initSegId).basicTrackInfos.at(initTrackId).timeScale; + std::map::const_iterator iter = m_initSegProps.at(initSegId).segPropMap.begin(); + for ( ; iter != m_initSegProps.at(initSegId).segPropMap.end(); iter++) + { + if (iter->second.initSegmentId == trackIdPair.first) + { + std::map::const_iterator iter1; + iter1 = iter->second.trackDecInfos.find(trackIdPair.second); + if (iter1 != iter->second.trackDecInfos.end()) + { + if (iter1->second.samples.size()) + { + for (const auto& compTS : iter1->second.samples.back().compositionTimesTS) + { + maxTInUS = max( + maxTInUS, + int64_t((compTS + iter1->second.samples.back().sampleDuration) * + 1000000 / timescale)); + } + } + } + } + } + playDur = maxTInUS / 1000000.0; + break; + } + + default: + std::map::const_iterator iter = m_initSegProps.at(initSegId).segPropMap.begin(); + for ( ; iter != m_initSegProps.at(initSegId).segPropMap.end(); iter++) + { + double timeSlot = 0.0; + if (iter->second.initSegmentId == trackIdPair.first) + { + SegmentId segIndex = iter->first; + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + switch (ctxType) + { + case CtxType::META: + if (m_metaInfo.at(segTrackId).forceFPSSet) + { + timeSlot = m_metaInfo.at(segTrackId).dispMasterImgs / + m_metaInfo.at(segTrackId).assignedFPS; + } + else + { + ISO_LOG(LOG_WARNING, "GetPlaybackDurationInSecs() called for meta context, but forced FPS was not set\n"); + } + break; + case CtxType::FILE: + for (const auto& contextInfo : m_ctxInfoMap) + { + double ctxDur; + error = GetPlaybackDurationInSecs(GenTrackId(contextInfo.first), ctxDur); + if (error) + { + return error; + } + if (ctxDur > timeSlot) + { + timeSlot = ctxDur; + } + } + break; + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } + } + playDur += timeSlot; + } + } + + durInSecs = playDur; + return ERROR_NONE; +} + +int32_t Mp4Reader::GetSampListByType(uint32_t trackId, + SampleFrameType itemType, + VarLenArray& itemIds) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + IdVector allItems; + int32_t error = GetCtxItems(trackIdPair, allItems); + if (error) + { + return error; + } + + std::vector matches; + CtxType ctxType; + error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + bool foundAny = false; + + for (const auto& segment : CreateDashSegs(trackIdPair.first)) + { + foundAny = true; + SegmentTrackId segTrackId = make_pair(segment.segmentId, trackIdPair.second); + if (ctxType == CtxType::TRACK) + { + if (itemType == SampleFrameType::OUT_REF_PIC) + { + if (CanFindTrackDecInfo(initSegId, segTrackId)) + { + ItemId sampId; + const SampleInfoVector& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + for (uint32_t index = 0; index < infoOfSamp.size(); ++index) + { + if (infoOfSamp[index].sampleType == FrameCodecType::OUTPUT_REF_FRAME) + { + matches.push_back((ItemId(index) + sampId).GetIndex()); + } + } + } + } + else if (itemType == SampleFrameType::NON_OUT_REFPIC) + { + if (CanFindTrackDecInfo(initSegId, segTrackId)) + { + ItemId sampId; + const SampleInfoVector& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + for (uint32_t index = 0; index < infoOfSamp.size(); ++index) + { + if (infoOfSamp[index].sampleType == FrameCodecType::NON_OUTPUT_REF_FRAME) + { + matches.push_back((ItemId(index) + sampId).GetIndex()); + } + } + } + } + else if (itemType == SampleFrameType::OUT_NONREF_PIC) + { + if (CanFindTrackDecInfo(initSegId, segTrackId)) + { + ItemId sampId; + const SampleInfoVector& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + for (uint32_t index = 0; index < infoOfSamp.size(); ++index) + { + if (infoOfSamp[index].sampleType == FrameCodecType::OUTPUT_NONREF_FRAME) + { + matches.push_back((ItemId(index) + sampId).GetIndex()); + } + } + } + } + else if (itemType == SampleFrameType::DISPLAY_PIC) + { + if (CanFindTrackDecInfo(initSegId, segTrackId)) + { + IdVector sampleIds; + ItemId sampId; + const SampleInfoVector& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + for (uint32_t index = 0; index < infoOfSamp.size(); ++index) + { + if (infoOfSamp[index].sampleType == FrameCodecType::OUTPUT_NONREF_FRAME || + infoOfSamp[index].sampleType == FrameCodecType::OUTPUT_REF_FRAME) + { + sampleIds.push_back((ItemId(index) + sampId).GetIndex()); + } + } + + std::vector samplePresentationTimes; + for (auto sampleId : sampleIds) + { + auto& trackDecInfo = GetTrackDecInfo(initSegId, segTrackId); + const auto singleSampPresentationTimes = + trackDecInfo.samples.at((ItemId(sampleId) - trackDecInfo.itemIdBase).GetIndex()).compositionTimes; + for (auto sampleTime : singleSampPresentationTimes) + { + samplePresentationTimes.push_back(make_pair(sampleId, sampleTime)); + } + } + + sort(samplePresentationTimes.begin(), samplePresentationTimes.end(), + [&](ItemIdTStampPair a, ItemIdTStampPair b) { return a.second < b.second; }); + + for (auto pair : samplePresentationTimes) + { + matches.push_back(pair.first.GetIndex()); + } + } + } + else if (itemType == SampleFrameType::SAMPLES_PIC) + { + matches = allItems; + } + //else + //{ + // return OMAF_ERROR_BAD_PARAM; + //} + } + } + + if (!foundAny) + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + + itemIds = makeVarLenArray(matches); + return ERROR_NONE; +} + + +int32_t Mp4Reader::GetSampType(uint32_t trackId, uint32_t itemId, FourCC& trackItemType) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, itemId, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + FourCCInt boxtype; + switch (ctxType) + { + case CtxType::TRACK: + { + ItemId sampId; + auto& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + boxtype = infoOfSamp.at((ItemId(itemId) - sampId).GetIndex()).sampleEntryType; + trackItemType = FourCC(boxtype.GetUInt32()); + return ERROR_NONE; + } + + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } +} + +int32_t Mp4Reader::GetSegIndex(InitSegmentTrackId trackIdPair, + ItemId itemId, + SegmentId& segIndex) const +{ + bool wentPast = false; + + if (m_initSegProps.empty() || !m_initSegProps.count(trackIdPair.first) || + m_initSegProps.at(trackIdPair.first).segPropMap.empty()) + { + return OMAF_INVALID_SEGMENT; + } + + const auto& segs = CreateDashSegs(trackIdPair.first); + for (auto segmentIt = segs.begin(); !wentPast && segmentIt != segs.end(); ++segmentIt) + { + if (segmentIt->initSegmentId == trackIdPair.first) + { + auto track = segmentIt->trackDecInfos.find(trackIdPair.second); + if (track != segmentIt->trackDecInfos.end()) + { + if (track->second.itemIdBase > ItemId(itemId)) + { + wentPast = true; + } + else + { + segIndex = segmentIt->segmentId; + } + } + } + } + + int ret = OMAF_INVALID_ITEM_ID; + auto segment = m_initSegProps.at(trackIdPair.first).segPropMap.find(segIndex); + if (segment != m_initSegProps.at(trackIdPair.first).segPropMap.end()) + { + auto track = segment->second.trackDecInfos.find(trackIdPair.second); + if (track != segment->second.trackDecInfos.end()) + { + if (itemId - track->second.itemIdBase < ItemId(uint32_t(track->second.samples.size()))) + { + ret = ERROR_NONE; + } + } + } + return ret; +} + +int32_t Mp4Reader::GetSegIndex(InitSegTrackIdPair id, SegmentId& segIndex) const +{ + return GetSegIndex(id.first, id.second, segIndex); +} + +int32_t Mp4Reader::GetSampDataInfo(uint32_t ctxId, + uint32_t itemIndex, + const InitSegmentId& initSegId, + uint64_t& refSampLength, + uint64_t& refDataOffset) +{ + auto trackCtxId = ContextId(ctxId); + if (!(m_initSegProps[initSegId].trackProperties[trackCtxId].referenceTrackIds["scal"].empty())) + { + return OMAF_INVALID_PROPERTY_INDEX; + } + + InitSegmentTrackId neededInitSegTrackId = make_pair(initSegId, trackCtxId); + + SegmentId segIndex; + int32_t result = GetSegIndex(neededInitSegTrackId, itemIndex, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, neededInitSegTrackId.second); + ItemId itemId = ItemId(itemIndex) - GetTrackDecInfo(initSegId, segTrackId).itemIdBase; + + refDataOffset = GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataOffset; + refSampLength = GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataLength; + + return ERROR_NONE; +} + +int32_t Mp4Reader::GetDepedentSampInfo(uint32_t trackId, + uint32_t itemIndex, + const InitSegmentId& initSegId, + uint8_t trackReference, + uint64_t& refSampLength, + uint64_t& refDataOffset) +{ + auto trackCtxId = MakeIdPair(trackId).second; + if (m_initSegProps[initSegId].trackProperties[trackCtxId].referenceTrackIds["scal"].empty()) + { + return OMAF_INVALID_PROPERTY_INDEX; + } + auto refTrackCtxId = + m_initSegProps[initSegId].trackProperties[trackCtxId].referenceTrackIds["scal"].at( + trackReference); + + InitSegmentTrackId refInitSegTrackId = make_pair(initSegId, refTrackCtxId); + + SegmentId refSegmentId; + int32_t result = GetSegIndex(refInitSegTrackId, itemIndex, refSegmentId); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId refSegTrackId = make_pair(refSegmentId, refInitSegTrackId.second); + ItemId refItemId = ItemId(itemIndex) - GetTrackDecInfo(initSegId, refSegTrackId).itemIdBase; + + refDataOffset = GetTrackDecInfo(initSegId, refSegTrackId).samples.at(refItemId.GetIndex()).dataOffset; + refSampLength = GetTrackDecInfo(initSegId, refSegTrackId).samples.at(refItemId.GetIndex()).dataLength; + + return ERROR_NONE; +} + +bool ParseExtractorNal(const DataVector& NalData, + ExtSample& extSamp, + uint8_t lenSizeMinus1, + uint64_t& extSize) +{ + Stream nalus(NalData); + ExtNalHdr extNalHdr; + uint32_t extractors = 0; + uint64_t inlineSizes = 0; + size_t order_idx = 1; + + while (nalus.BytesRemain() > 0) + { + size_t readCnt = 0; + if (lenSizeMinus1 == 0) + { + readCnt = nalus.Read8(); + } + else if (lenSizeMinus1 == 1) + { + readCnt = nalus.Read16(); + } + else if (lenSizeMinus1 == 3) + { + readCnt = nalus.Read32(); + } + else + { + ISO_LOG(LOG_ERROR, "Length field size is not correct !\n"); + throw exception(); + } + + extNalHdr.forbidden_zero_bit = (uint8_t) nalus.Read1(1); + extNalHdr.nal_unit_type = (uint8_t) nalus.Read1(6); + extNalHdr.nuh_layer_id = (uint8_t) nalus.Read1(6); + extNalHdr.nuh_temporal_id_plus1 = (uint8_t) nalus.Read1(3); + + readCnt = readCnt - 2; + if (extNalHdr.nal_unit_type == 49) + { + ExtSample::Extractor extractor; + for (; readCnt > 0; readCnt--) + { + uint8_t constType = (uint8_t) nalus.Read1(8); + + if (constType == 0) + { + ExtSample::SampleConstruct sampConst; + sampConst.order_idx = (uint8_t) order_idx; + sampConst.constructor_type = (uint8_t) constType; + sampConst.track_ref_index = (uint8_t) nalus.Read1(8); + readCnt--; + sampConst.track_ref_index = sampConst.track_ref_index - 1; + sampConst.sample_offset = (int8_t) nalus.Read1(8); + readCnt--; + sampConst.data_offset = nalus.Read1((lenSizeMinus1 + 1) * 8); + readCnt -= (lenSizeMinus1 + 1); + sampConst.data_length = nalus.Read1((lenSizeMinus1 + 1) * 8); + readCnt -= (lenSizeMinus1 + 1); + if (sampConst.data_length < UINT32_MAX) + { + extSize += sampConst.data_length; + } + extractor.sampleConstruct.push_back(sampConst); + order_idx = order_idx + 1; + } + else if (constType == 2) + { + ExtSample::InlineConstruct inlinConst; + inlinConst.order_idx = (uint8_t) order_idx; + inlinConst.constructor_type = (uint8_t) constType; + inlinConst.data_length = (uint8_t) nalus.Read1(8); + for (uint8_t i = 0; i < inlinConst.data_length; i++) + { + inlinConst.inline_data.push_back((uint8_t) nalus.Read1(8)); + } + inlineSizes += inlinConst.data_length; + extractor.inlineConstruct.push_back(inlinConst); + readCnt = readCnt - inlinConst.data_length - 1; + order_idx = order_idx + 1; + } + } + extSamp.extractors.push_back(extractor); + extractors++; + } + else + { + nalus.SkipBytes(readCnt); + } + } + if (extractors) + { + if (extSize > 0) + { + extSize += inlineSizes; + } + return true; + } + else + { + return false; + } +} + +int32_t Mp4Reader::GetExtractorTrackSampData(uint32_t trackId, + uint32_t itemIndex, + char* buf, + uint32_t& bufSize, + bool strHrd) +{ + uint32_t spaceAvailable = bufSize; + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, itemIndex, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + ItemId itemId = ItemId(itemIndex) - GetTrackDecInfo(initSegId, segTrackId).itemIdBase; + + SegmentIO& io = m_initSegProps.at(initSegId).segPropMap.at(segIndex).io; + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + switch (ctxType) + { + case CtxType::TRACK: + { + if (itemId.GetIndex() >= GetTrackDecInfo(initSegId, segTrackId).samples.size()) + { + return OMAF_INVALID_ITEM_ID; + } + + const uint32_t sampLen = GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataLength; + if (bufSize < sampLen) + { + bufSize = sampLen; + return OMAF_MEMORY_TOO_SMALL_BUFFER; + } + + int64_t neededDataOffset = (int64_t) GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataOffset; + LocateToOffset(io, neededDataOffset); + io.strIO->ReadStream(buf, sampLen); + bufSize = sampLen; + + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + break; + } + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } + + FourCC codeType; + error = GetDecoderCodeType(GenTrackId(trackIdPair), itemIndex, codeType); + if (error) + { + return error; + } + + if (codeType == "avc1" || codeType == "avc3") + { + if (strHrd) + { + return ParseAvcData(buf, bufSize); + } + else + { + return ERROR_NONE; + } + } + else if (codeType == "hvc1" || codeType == "hev1") + { + if (strHrd) + { + return ParseHevcData(buf, bufSize); + } + else + { + return ERROR_NONE; + } + } + + else if (codeType == "hvc2") + { + std::vector extSampBuffer(buf, buf + bufSize); + + uint8_t nalLengthSizeMinus1 = 3; + ItemId sampId; + auto& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + SmpDesIndex index = infoOfSamp.at((ItemId(itemIndex) - sampId).GetIndex()).sampleDescriptionIndex; + if (GetTrackBasicInfo(trackIdPair).nalLengthSizeMinus1.count(index.GetIndex()) != 0) + { + nalLengthSizeMinus1 = GetTrackBasicInfo(trackIdPair).nalLengthSizeMinus1.at(index); + assert(nalLengthSizeMinus1 == 3); + } + + ExtSample extSamp; + uint64_t extSize = 0; + uint64_t tolerance = 0; + + if (ParseExtractorNal(extSampBuffer, extSamp, nalLengthSizeMinus1, extSize)) + { + if (extSize == 0) + { + extSize = 0; + for (auto& extractor : extSamp.extractors) + { + for (auto& sampConstr : extractor.sampleConstruct) + { + uint64_t refSampLength = 0; + uint64_t refDataOffset = 0; + result = + GetDepedentSampInfo(trackId, itemIndex, initSegId, sampConstr.track_ref_index, + refSampLength, refDataOffset); + if (result != ERROR_NONE) + { + return result; + } + extSize += refSampLength; + } + } + tolerance = (uint64_t)(extSize / 10); + extSize += tolerance; + } + if (extSize > (uint64_t) spaceAvailable) + { + bufSize = (uint32_t)(extSize + tolerance); + return OMAF_MEMORY_TOO_SMALL_BUFFER; + } + + uint32_t extractedBytes = 0; + char* buffer = buf; + char* inlineNalLengthPlaceHolder = NULL; + size_t inlineLength = 0; + vector::iterator sampConst; + vector::iterator inlinConst; + uint64_t refSampLength = 0; + uint64_t refSampOffset = 0; + uint8_t trackRefIndex = UINT8_MAX; + + for (auto& extractor : extSamp.extractors) + { + for (sampConst = extractor.sampleConstruct.begin(), + inlinConst = extractor.inlineConstruct.begin(); + sampConst != extractor.sampleConstruct.end() || + inlinConst != extractor.inlineConstruct.end();) + { + if (inlinConst != extractor.inlineConstruct.end() && + (sampConst == extractor.sampleConstruct.end() || + (*inlinConst).order_idx < (*sampConst).order_idx)) + { + inlineNalLengthPlaceHolder = buffer; + + memcpy(buffer, (*inlinConst).inline_data.data(), (*inlinConst).inline_data.size()); + inlineLength = (*inlinConst).inline_data.size() - (nalLengthSizeMinus1 + 1); // exclude the length + buffer += (*inlinConst).data_length; + extractedBytes += (*inlinConst).data_length; + ++inlinConst; + } + else if (sampConst != extractor.sampleConstruct.end()) + { + auto referredTrack = ContextId((*sampConst).track_ref_index + 1); + InitSegmentId ref_initSegmentId; + for (const auto& loopInitSegment : m_initSegProps) + { + if (loopInitSegment.second.corresTrackId == referredTrack) + { + ref_initSegmentId = loopInitSegment.first; + break; + } + } + InitSegmentTrackId ref_trackIdPair = make_pair(ref_initSegmentId, referredTrack); + SegmentId ref_segmentId; + int32_t result = GetSegIndex(ref_trackIdPair, itemIndex, ref_segmentId); + SegmentIO& ref_io = m_initSegProps.at(ref_initSegmentId).segPropMap.at(ref_segmentId).io; + + if ((*sampConst).track_ref_index != trackRefIndex || trackRefIndex == UINT8_MAX) + { + result = + GetSampDataInfo(((*sampConst).track_ref_index + 1), itemIndex, ref_initSegmentId, + refSampLength, refSampOffset); + if (result != ERROR_NONE) + { + return result; + } + trackRefIndex = (*sampConst).track_ref_index; + LocateToOffset(ref_io, refSampOffset); + } + ref_io.strIO->ReadStream(buffer, (nalLengthSizeMinus1 + 1)); + uint64_t refNalLength = ParseNalLen(buffer); + + uint64_t inputReadOffset = refSampOffset + (*sampConst).data_offset; + + uint64_t bytesToCopy = refNalLength; + if ((*sampConst).data_length == 0) + { + bytesToCopy = refNalLength; + refSampLength = 0; + } + else + { + if ((uint64_t)((*sampConst).data_offset) + (uint64_t)((*sampConst).data_length) > refSampLength) + { + if ((*sampConst).data_offset > refSampLength) + { + return OMAF_INVALID_SEGMENT; + } + bytesToCopy = refSampLength - (*sampConst).data_offset; + } + else + { + bytesToCopy = (*sampConst).data_length; + } + + if (inlineNalLengthPlaceHolder != NULL) + { + uint64_t actualNalLength = bytesToCopy + inlineLength; + WriteNalLen(actualNalLength, inlineNalLengthPlaceHolder); + inlineNalLengthPlaceHolder = NULL; + } + else + { + inputReadOffset += (nalLengthSizeMinus1 + 1); + if (bytesToCopy == refSampLength - (*sampConst).data_offset) + { + bytesToCopy -= (nalLengthSizeMinus1 + 1); + } + buffer += (nalLengthSizeMinus1 + 1); + extractedBytes += (nalLengthSizeMinus1 + 1); + } + } + + if (extractedBytes + (uint32_t)bytesToCopy > spaceAvailable) + { + bufSize = extractedBytes + (uint32_t)bytesToCopy; + return OMAF_MEMORY_TOO_SMALL_BUFFER; + } + if (inputReadOffset > 0) + { + LocateToOffset(ref_io, (int64_t) inputReadOffset); + } + ref_io.strIO->ReadStream(buffer, bytesToCopy); + buffer += bytesToCopy; + extractedBytes += (uint32_t)bytesToCopy; + ++sampConst; + inlineNalLengthPlaceHolder = NULL; + inlineLength = 0; + + refSampLength -= (refNalLength + (nalLengthSizeMinus1 + 1)); + } + } + } + bufSize = extractedBytes; + if (strHrd) + { + return ParseHevcData(buf, bufSize); + } + + return ERROR_NONE; + } + return OMAF_UNSUPPORTED_DASH_CODECS_TYPE; // hvc2 but unknown extractor? + } + else if ((codeType == "mp4a") || (codeType == "invo") || (codeType == "urim") || (codeType == "mp4v")) + { + return ERROR_NONE; + } + else + { + return OMAF_UNSUPPORTED_DASH_CODECS_TYPE; + } +} + +int32_t Mp4Reader::GetSampData(uint32_t trackId, + uint32_t itemIndex, + char* buf, + uint32_t& bufSize, + bool strHrd) +{ + uint32_t spaceAvailable = bufSize; + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, itemIndex, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + ItemId itemId = ItemId(itemIndex) - GetTrackDecInfo(initSegId, segTrackId).itemIdBase; + + SegmentIO& io = m_initSegProps.at(initSegId).segPropMap.at(segIndex).io; + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + switch (ctxType) + { + case CtxType::TRACK: + { + if (itemId.GetIndex() >= GetTrackDecInfo(initSegId, segTrackId).samples.size()) + { + return OMAF_INVALID_ITEM_ID; + } + + const uint32_t sampLen = GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataLength; + if (bufSize < sampLen) + { + bufSize = sampLen; + return OMAF_MEMORY_TOO_SMALL_BUFFER; + } + + LocateToOffset(io, (int64_t) GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataOffset); + io.strIO->ReadStream(buf, sampLen); + bufSize = sampLen; + + if (!io.strIO->IsStreamGood()) + { + return OMAF_FILE_READ_ERROR; + } + break; + } + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } + + FourCC codeType; + error = GetDecoderCodeType(GenTrackId(trackIdPair), itemIndex, codeType); + if (error) + { + return error; + } + + if (codeType == "avc1" || codeType == "avc3") + { + if (strHrd) + { + return ParseAvcData(buf, bufSize); + } + else + { + return ERROR_NONE; + } + } + else if (codeType == "hvc1" || codeType == "hev1") + { + if (strHrd) + { + return ParseHevcData(buf, bufSize); + } + else + { + return ERROR_NONE; + } + } + else if (codeType == "hvc2") + { + std::vector extSampBuffer(buf, buf + bufSize); + + uint8_t nalLengthSizeMinus1 = 3; + ItemId sampId; + auto& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + SmpDesIndex index = infoOfSamp.at((ItemId(itemIndex) - sampId).GetIndex()).sampleDescriptionIndex; + if (GetTrackBasicInfo(trackIdPair).nalLengthSizeMinus1.count(index.GetIndex()) != 0) + { + nalLengthSizeMinus1 = GetTrackBasicInfo(trackIdPair).nalLengthSizeMinus1.at(index); + assert(nalLengthSizeMinus1 == 3); + } + + ExtSample extSamp; + uint64_t extSize = 0; + uint64_t tolerance = 0; + + if (ParseExtractorNal(extSampBuffer, extSamp, nalLengthSizeMinus1, extSize)) + { + if (extSize == 0) + { + extSize = 0; + for (auto& extractor : extSamp.extractors) + { + for (auto& sampConstr : extractor.sampleConstruct) + { + uint64_t refSampLength = 0; + uint64_t refDataOffset = 0; + result = + GetDepedentSampInfo(trackId, itemIndex, initSegId, sampConstr.track_ref_index, + refSampLength, refDataOffset); + if (result != ERROR_NONE) + { + return result; + } + extSize += refSampLength; + } + } + tolerance = (uint64_t)(extSize / 10); + extSize += tolerance; + } + if (extSize > (uint64_t) spaceAvailable) + { + bufSize = (uint32_t)(extSize + tolerance); + return OMAF_MEMORY_TOO_SMALL_BUFFER; + } + + uint32_t extractedBytes = 0; + char* buffer = buf; + char* inlineNalLengthPlaceHolder = NULL; + size_t inlineLength = 0; + vector::iterator sampConst; + vector::iterator inlinConst; + uint64_t refSampLength = 0; + uint64_t refSampOffset = 0; + uint8_t trackRefIndex = UINT8_MAX; + + for (auto& extractor : extSamp.extractors) + { + for (sampConst = extractor.sampleConstruct.begin(), + inlinConst = extractor.inlineConstruct.begin(); + sampConst != extractor.sampleConstruct.end() || + inlinConst != extractor.inlineConstruct.end();) + { + if (inlinConst != extractor.inlineConstruct.end() && + (sampConst == extractor.sampleConstruct.end() || + (*inlinConst).order_idx < (*sampConst).order_idx)) + { + inlineNalLengthPlaceHolder = buffer; + memcpy(buffer, (*inlinConst).inline_data.data(), (*inlinConst).inline_data.size()); + inlineLength = (*inlinConst).inline_data.size() - (nalLengthSizeMinus1 + 1); + buffer += (*inlinConst).data_length; + extractedBytes += (*inlinConst).data_length; + ++inlinConst; + } + else if (sampConst != extractor.sampleConstruct.end()) + { + if ((*sampConst).track_ref_index != trackRefIndex || trackRefIndex == UINT8_MAX) + { + result = + GetDepedentSampInfo(trackId, itemIndex, initSegId, (*sampConst).track_ref_index, + refSampLength, refSampOffset); + if (result != ERROR_NONE) + { + return result; + } + trackRefIndex = (*sampConst).track_ref_index; + LocateToOffset(io, refSampOffset); + } + io.strIO->ReadStream(buffer, (nalLengthSizeMinus1 + 1)); + uint64_t refNalLength = ParseNalLen(buffer); + + uint64_t inputReadOffset = refSampOffset + (*sampConst).data_offset; + + uint64_t bytesToCopy = refNalLength; + if ((*sampConst).data_length == 0) + { + bytesToCopy = refNalLength; + refSampLength = 0; + } + else + { + if ((uint64_t)((*sampConst).data_offset) + (uint64_t)((*sampConst).data_length) > refSampLength) + { + if ((*sampConst).data_offset > refSampLength) + { + return OMAF_INVALID_SEGMENT; + } + bytesToCopy = refSampLength - (*sampConst).data_offset; + } + else + { + bytesToCopy = (*sampConst).data_length; + } + + if (inlineNalLengthPlaceHolder != NULL) + { + uint64_t actualNalLength = bytesToCopy + inlineLength; + WriteNalLen(actualNalLength, inlineNalLengthPlaceHolder); + inlineNalLengthPlaceHolder = NULL; + } + else + { + inputReadOffset += (nalLengthSizeMinus1 + 1); + if (bytesToCopy == refSampLength - (*sampConst).data_offset) + { + bytesToCopy -= (nalLengthSizeMinus1 + 1); + } + buffer += (nalLengthSizeMinus1 + 1); + extractedBytes += (nalLengthSizeMinus1 + 1); + } + } + + if (extractedBytes + (uint32_t)bytesToCopy > spaceAvailable) + { + bufSize = extractedBytes + (uint32_t)bytesToCopy; + return OMAF_MEMORY_TOO_SMALL_BUFFER; + } + if (inputReadOffset > 0) + { + LocateToOffset(io, (int64_t) inputReadOffset); + } + io.strIO->ReadStream(buffer, bytesToCopy); + buffer += bytesToCopy; + extractedBytes += (uint32_t)bytesToCopy; + ++sampConst; + inlineNalLengthPlaceHolder = NULL; + inlineLength = 0; + + refSampLength -= (refNalLength + (nalLengthSizeMinus1 + 1)); + } + } + } + bufSize = extractedBytes; + if (strHrd) + { + return ParseHevcData(buf, bufSize); + } + + return ERROR_NONE; + } + return OMAF_UNSUPPORTED_DASH_CODECS_TYPE; // hvc2 but unknown extractor? + } + else if ((codeType == "mp4a") || (codeType == "invo") || (codeType == "urim") || (codeType == "mp4v")) + { + return ERROR_NONE; + } + else + { + return OMAF_UNSUPPORTED_DASH_CODECS_TYPE; + } +} + +int32_t Mp4Reader::GetSampOffset(uint32_t trackId, + uint32_t itemIndex, + uint64_t& sampOffset, + uint32_t& sampLen) +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, itemIndex, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + ItemId itemId = ItemId(itemIndex) - GetTrackDecInfo(initSegId, segTrackId).itemIdBase; + + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + if (ctxType == CtxType::TRACK) + { + if (itemId.GetIndex() >= GetTrackDecInfo(initSegId, segTrackId).samples.size()) + { + return OMAF_INVALID_ITEM_ID; + } + sampLen = GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataLength; + sampOffset = GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).dataOffset; + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + return ERROR_NONE; +} + + +int32_t Mp4Reader::GetCodecSpecInfo(uint32_t trackId, + uint32_t itemId, + VarLenArray& codecSpecInfos) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + + if (auto* parameterSetMap = GetParameterSetMap(InitSegTrackIdPair(trackIdPair, itemId))) + { + codecSpecInfos = VarLenArray(parameterSetMap->size()); + + int i = 0; + for (auto const& entry : *parameterSetMap) + { + MediaCodecSpecInfo decSpecInfo; + decSpecInfo.codecSpecInfoType = entry.first; + decSpecInfo.codecSpecInfoBits = makeVarLenArray(entry.second); + codecSpecInfos.arrayElets[i++] = decSpecInfo; + } + return ERROR_NONE; + } + return OMAF_INVALID_ITEM_ID; // or invalid context...? +} + + +int32_t Mp4Reader::GetTrackTStamps(uint32_t trackId, VarLenArray& timeStamps) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + std::map timestampMap; + + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + for (const auto& segment : m_initSegProps.at(initSegId).segPropMap) + { + if (segment.second.initSegmentId == trackIdPair.first) + { + SegmentId segIndex = segment.first; + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + switch (ctxType) + { + case CtxType::TRACK: + { + if (CanFindTrackDecInfo(initSegId, segTrackId)) + { + for (const auto& infoOfSamp : GetTrackDecInfo(initSegId, segTrackId).samples) + { + for (auto compositionTime : infoOfSamp.compositionTimes) + { + timestampMap.insert(make_pair(compositionTime, infoOfSamp.sampleId)); + } + } + } + break; + } + + case CtxType::META: + { + if (m_metaInfo.at(segTrackId).forceFPSSet == true) + { + for (const auto& imageInfo : m_metaInfo.at(segTrackId).imageInfoMap) + { + if (imageInfo.second.type == "master") + { + timestampMap.insert(make_pair(static_cast(imageInfo.second.displayTime), + imageInfo.first)); + } + } + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + break; + } + + default: + return OMAF_INVALID_MP4READER_CONTEXTID; + } + } + } + + timeStamps = VarLenArray(timestampMap.size()); + uint32_t i = 0; + for (auto const& entry : timestampMap) + { + TStampID pair; + pair.timeStamp = entry.first; + pair.itemId = entry.second.GetIndex(); + timeStamps.arrayElets[i++] = pair; + } + + return ERROR_NONE; +} + + +int32_t Mp4Reader::GetSampTStamps( + uint32_t trackId, + uint32_t itemIndex, + VarLenArray& timeStamps) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, itemIndex, segIndex); + if (result != ERROR_NONE) + { + return result; + } + ItemId itemId = ItemId(itemIndex) - + GetTrackDecInfo(initSegId, SegmentTrackId(segIndex, trackIdPair.second)).itemIdBase; + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + switch (ctxType) + { + case CtxType::TRACK: + { + const auto& displayTimes = + GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).compositionTimes; + timeStamps = makeVarLenArray(displayTimes); + break; + } + + case CtxType::META: + { + if (m_metaInfo.at(segTrackId).forceFPSSet == true) + { + timeStamps = VarLenArray(1); + timeStamps.arrayElets[0] = + static_cast(m_metaInfo.at(segTrackId).imageInfoMap.at(itemId.GetIndex()).displayTime); + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + break; + } + + default: + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + } + return ERROR_NONE; +} + + +int32_t Mp4Reader::GetSampInDecSeq( + uint32_t trackId, + VarLenArray& sampItems) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + DecodingOrderVector sampItemsVector; + sampItemsVector.clear(); + + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + for (const auto& segment : m_initSegProps.at(initSegId).segPropMap) + { + if (segment.second.initSegmentId == trackIdPair.first) + { + SegmentId segIndex = segment.first; + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + switch (ctxType) + { + case CtxType::TRACK: + { + if (CanFindTrackDecInfo(initSegId, segTrackId)) + { + const auto& samples = GetTrackDecInfo(initSegId, segTrackId).samples; + sampItemsVector.reserve(samples.size()); + for (const auto& sample : samples) + { + for (const auto compositionTime : sample.compositionTimes) + { + sampItemsVector.push_back(make_pair(sample.sampleId, compositionTime)); + } + } + + sort(sampItemsVector.begin(), sampItemsVector.end(), + [&](ItemIdTStampPair a, ItemIdTStampPair b) { return a.second < b.second; }); + } + break; + } + case CtxType::META: + { +//#ifndef DISABLE_UNCOVERED_CODE + if (m_metaInfo.at(segTrackId).forceFPSSet == true) + { + sampItemsVector.reserve(m_metaInfo.at(segTrackId).imageInfoMap.size()); + for (const auto& image : m_metaInfo.at(segTrackId).imageInfoMap) + { + sampItemsVector.push_back(pair( + image.first.GetIndex(), static_cast(image.second.displayTime))); + } + } + else + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + break; +//#endif // DISABLE_UNCOVERED_CODE + } + default: + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + } + } + } + + sampItems = VarLenArray(sampItemsVector.size()); + uint32_t i = 0; + for (auto const& entry : sampItemsVector) + { + TStampID pair; + pair.timeStamp = entry.second; + pair.itemId = entry.first.GetIndex(); + sampItems.arrayElets[i++] = pair; + } + return ERROR_NONE; +} + +int32_t Mp4Reader::GetDecoderCodeType(uint32_t trackId, uint32_t itemId, FourCC& decoderCodeType) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, itemId, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + const auto segPropsIt = + m_initSegProps.at(initSegId).segPropMap.find(segIndex); + if (segPropsIt == m_initSegProps.at(initSegId).segPropMap.end()) + { + return OMAF_INVALID_ITEM_ID; + } + const auto& segProps = segPropsIt->second; + const auto decInfoIter = segProps.trackDecInfos.find(segTrackId.second); + if (decInfoIter == segProps.trackDecInfos.end()) + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + const auto& trackDecInfo = decInfoIter->second; + const auto& decCodeType = trackDecInfo.decoderCodeTypeMap; + + auto iter = decCodeType.find(ItemId(itemId)); + if (iter != decCodeType.end()) + { + decoderCodeType = iter->second.GetUInt32(); + return ERROR_NONE; + } + + const auto parameterSetIdIt = segProps.itemToParameterSetMap.find( + InitSegTrackIdPair(trackIdPair, ItemId(itemId) - GetTrackDecInfo(initSegId, segTrackId).itemIdBase)); + if (parameterSetIdIt == segProps.itemToParameterSetMap.end()) + { + return OMAF_INVALID_ITEM_ID; + } + const SmpDesIndex parameterSetId = parameterSetIdIt->second; + iter = decCodeType.find(ItemId(parameterSetId.GetIndex())); + if (iter != decCodeType.end()) + { + decoderCodeType = iter->second.GetUInt32(); + return ERROR_NONE; + } + return OMAF_INVALID_ITEM_ID; // or invalid context...? +} + + +int32_t Mp4Reader::GetDurOfSamp(uint32_t trackId, uint32_t sampleId, uint32_t& sampDur) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, sampleId, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + switch (ctxType) + { + case CtxType::TRACK: + { + break; + } + case CtxType::META: + default: + { + return OMAF_INVALID_MP4READER_CONTEXTID; + } + } + + ItemId itemId = ItemId(sampleId) - GetTrackDecInfo(initSegId, segTrackId).itemIdBase; + + sampDur = + (uint32_t)(uint64_t(GetTrackDecInfo(initSegId, segTrackId).samples.at(itemId.GetIndex()).sampleDuration) * + 1000) / + GetTrackBasicInfo(trackIdPair).timeScale; + return ERROR_NONE; +} + +int32_t Mp4Reader::GetAudioChnlProp(uint32_t trackId, uint32_t sampleId, ChnlProperty& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.chnlProperties, index, result); + + if (result == ERROR_NONE) + { + outProp.strStruct = propIn.strStruct; + outProp.layOut = propIn.layOut; + outProp.omitChnlMap = propIn.omitChnlMap; + outProp.objCnt = propIn.objCnt; + outProp.chnlCnt = propIn.chnlCnt; + outProp.chnlLayOuts = makeVarLenArray(propIn.chnlLayOuts); + } + + return result; +} + +int32_t Mp4Reader::GetSpatAudioProp(uint32_t trackId, + uint32_t sampleId, + SpatialAudioProperty& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.sa3dProperties, index, result); + + if (result == ERROR_NONE) + { + outProp.version = propIn.version; + outProp.ambisonicType = propIn.ambisonicType; + outProp.ambisonicOrder = propIn.ambisonicOrder; + outProp.ambisonicChnlSeq = propIn.ambisonicChnlSeq; + outProp.ambisonicNorml = propIn.ambisonicNorml; + outProp.chnlMap = makeVarLenArray(propIn.chnlMap); + } + + return result; +} + +int32_t Mp4Reader::GetSteScop3DProp(uint32_t trackId, + uint32_t sampleId, + OmniStereoScopic3D& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.st3dProperties, index, result); + + if (result == ERROR_NONE) + { + outProp = propIn; + } + + return result; +} + +int32_t Mp4Reader::GetSpheV1Prop(uint32_t trackId, + uint32_t sampleId, + SphericalVideoV1Property& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.sphericalV1Properties, index, result); + + if (result == ERROR_NONE) + { + outProp = propIn; + } + + return result; +} + +int32_t Mp4Reader::GetSpheV2Prop(uint32_t trackId, + uint32_t sampleId, + SphericalVideoV2Property& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.sv3dProperties, index, result); + + if (result == ERROR_NONE) + { + outProp = propIn; + } + + return result; +} + +int32_t Mp4Reader::GetRWPKProp(uint32_t trackId, + uint32_t sampleId, + RWPKProperty& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.rwpkProperties, index, result); + + if (result == ERROR_NONE) + { + outProp.constituentPictureMatching = propIn.constituentPicMatching; + outProp.packedPicHeight = propIn.packedPicHeight; + outProp.packedPicWidth = propIn.packedPicWidth; + outProp.projPicHeight = propIn.projPicHeight; + outProp.projPicWidth = propIn.projPicWidth; + outProp.regions = makeVarLenArray(propIn.regions); + } + + return result; +} + +int32_t Mp4Reader::GetCOVIInfoProp(uint32_t trackId, + uint32_t sampleId, + COVIInformation& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.coviProperties, index, result); + + if (result == ERROR_NONE) + { + outProp.coviShapeType = propIn.coviShapeType; + outProp.defaultViewIdc = (OmniViewIdc) propIn.defaultViewIdc; + outProp.viewIdcPresenceFlag = propIn.viewIdcPresenceFlag; + outProp.sphereRegions = makeVarLenArray(propIn.sphereRegions); + } + + return result; +} + +int32_t Mp4Reader::GetProjFrmtProp(uint32_t trackId, + uint32_t sampleId, + ProjFormat& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.pfrmProperties, index, result); + + if (result == ERROR_NONE) + { + outProp = propIn; + } + + return result; +} + +int32_t Mp4Reader::GetStereVideoProp(uint32_t trackId, + uint32_t sampleId, + VideoFramePackingType& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.stviProperties, index, result); + + if (result == ERROR_NONE) + { + outProp = propIn; + } + + return result; +} + +int32_t Mp4Reader::GetRotateProp(uint32_t trackId, uint32_t sampleId, Rotation& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.rotnProperties, index, result); + + if (result == ERROR_NONE) + { + outProp = propIn; + } + + return result; +} + + +int32_t Mp4Reader::GetScheTypesProp(uint32_t trackId, + uint32_t sampleId, + SchemeTypesProperty& outProp) const +{ + TrackBasicInfo basicTrackInfo; + SmpDesIndex index; + uint32_t result = SearchInfoForTrack(trackId, sampleId, basicTrackInfo, index); + auto propIn = GetSampProp(basicTrackInfo.schemeTypesProperties, index, result); + + if (result == ERROR_NONE) + { + outProp.mainScheme.type = propIn.mainScheme.GetSchemeType().GetUInt32(); + outProp.mainScheme.version = propIn.mainScheme.GetSchemeVersion(); + outProp.mainScheme.uri = makeVarLenArray(propIn.mainScheme.GetSchemeUri()); + + std::vector compatibleTypes; + for (auto& compatibleScheme : propIn.compatibleSchemes) + { + SchemeType compType{}; + compType.type = compatibleScheme.GetSchemeType().GetUInt32(); + compType.version = compatibleScheme.GetSchemeVersion(); + compType.uri = makeVarLenArray(compatibleScheme.GetSchemeUri()); + compatibleTypes.push_back(compType); + } + outProp.compatibleSchemeTypes = makeVarLenArray(compatibleTypes); + } + + return result; +} + +uint32_t Mp4Reader::SearchInfoForTrack(uint32_t trackId, + uint32_t sampleId, + TrackBasicInfo& basicTrackInfo, + SmpDesIndex& index) const +{ + if (IsInitErr()) + { + return OMAF_MP4READER_NOT_INITIALIZED; + } + + InitSegmentTrackId trackIdPair = MakeIdPair(trackId); + InitSegmentId initSegId = trackIdPair.first; + SegmentId segIndex; + int32_t result = GetSegIndex(trackIdPair, sampleId, segIndex); + if (result != ERROR_NONE) + { + return result; + } + SegmentTrackId segTrackId = make_pair(segIndex, trackIdPair.second); + + CtxType ctxType; + int error = GetCtxTypeError(trackIdPair, ctxType); + if (error) + { + return error; + } + + if (ctxType == CtxType::TRACK) + { + ItemId sampId; + auto& infoOfSamp = GetSampInfos(initSegId, segTrackId, sampId); + index = infoOfSamp.at((ItemId(sampleId) - sampId).GetIndex()).sampleDescriptionIndex; + basicTrackInfo = GetTrackBasicInfo(trackIdPair); + return ERROR_NONE; + } + + return OMAF_INVALID_MP4READER_CONTEXTID; +} + +VCD_MP4_END diff --git a/src/isolib/dash_parser/Mp4ReaderImpl.h b/src/isolib/dash_parser/Mp4ReaderImpl.h new file mode 100644 index 00000000..0c0d39bf --- /dev/null +++ b/src/isolib/dash_parser/Mp4ReaderImpl.h @@ -0,0 +1,1081 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4ReaderImpl.h +//! \brief: Mp4Reader class definition +//! \detail: Define detailed file reader operation class +//! + +#ifndef _MP4READERIMPL_H_ +#define _MP4READERIMPL_H_ + +#include "Mp4DataTypes.h" +#include "../atoms/ChannelLayoutAtom.h" +#include "../atoms/FormAllocator.h" +#include "../atoms/DecPts.h" +#include "../atoms/ElemStreamDescAtom.h" +#include "../atoms/TypeAtom.h" +#include "../atoms/BasicAudAtom.h" +#include "../atoms/BasicVideoAtom.h" +#include "../atoms/MetaAtom.h" +#include "../atoms/MovieAtom.h" +#include "../atoms/MovieFragAtom.h" +#include "Mp4Segment.h" +#include "Mp4StreamIO.h" +#include "../atoms/SegIndexAtom.h" + +#include +#include +#include +#include + +using namespace std; + +VCD_MP4_BEGIN + +class CleanAperture; +class AvcDecoderConfigurationRecord; +class HevcDecoderConfigurationRecord; + +//! +//! \class Mp4Reader +//! \brief Define the operation and needed data for mp4 segment files reading +//! + +class Mp4Reader +{ +public: + //! + //! \brief Constructor + //! + Mp4Reader(); + + //! + //! \brief Destructor + //! + virtual ~Mp4Reader() = default; + + //! + //! \brief Create one instance of Mp4Reader class + //! + //! \return Mp4Reader* + //! the pointer to the created instance + //! + static Mp4Reader* Create(); + + //! + //! \brief Destroy the specified instance of Mp4Reader class + //! + //! \param [in] mp4Reader + //! pointer to the specified instance + //! + //! \return void + //! + static void Destroy(Mp4Reader* mp4Reader); + +public: + + //! + //! \brief Initialize Mp4Reader according to specified + //! stream operation interface + //! + //! \param [in] stream + //! pointer to the specified stream operation + //! interface + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t Initialize(StreamIO* stream); + + //! + //! \brief Free resource of Mp4Reader + //! + //! \return void + //! + void Close(); + + //! + //! \brief Get the major brand from specified segment + //! of specified track + //! + //! \param [out] majorBrand + //! output major brand + //! \param [in] initSegIndex + //! initial segment index corresponding to + //! specified track + //! \param [in] segIndex + //! index of specified segment + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetMajorBrand(FourCC& majorBrand, + uint32_t initSegIndex = 0, + uint32_t segIndex = UINT32_MAX) const; + + //! + //! \brief Get the minor version from specified segment + //! of specified track + //! + //! \param [out] minorVersion + //! output minor version + //! \param [in] initSegIndex + //! initial segment index corresponding to + //! specified track + //! \param [in] segIndex + //! index of specified segment + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetMinorVersion(uint32_t& minorVersion, + uint32_t initSegIndex = 0, + uint32_t segIndex = UINT32_MAX) const; + + //! + //! \brief Get the compatible brands from specified segment + //! of specified track + //! + //! \param [out] compatBrands + //! output compatible brands + //! \param [in] initSegIndex + //! initial segment index corresponding to + //! specified track + //! \param [in] segIndex + //! index of specified segment + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetCompatibleBrands(VarLenArray& compatBrands, + uint32_t initSegIndex = 0, + uint32_t segIndex = UINT32_MAX) const; + + //! + //! \brief Get the track information for all tracks + //! after one segment file is parsed and Mp4Reader + //! is initialized + //! + //! \param [out] trackInfos + //! track information for all tracks + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetTrackInformations(VarLenArray& trackInfos) const; + + //! + //! \brief Get the track information for basic tracks + //! after one segment file is parsed and Mp4Reader + //! is initialized + //! + //! \param [out] trackInfos + //! track information for basic tracks + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetTrackInformation(VarLenArray& trackInfos) const; + + //! + //! \brief Get picture display width + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] displayPicW + //! picture display width + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetDisplayWidth(uint32_t trackId, uint32_t& displayPicW) const; + + //! + //! \brief Get picture display height + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] displayPicH + //! picture display height + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetDisplayHeight(uint32_t trackId, uint32_t& displayPicH) const; + + //! + //! \brief Get fixed point picture display width + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] displayPicW + //! fixed point picture display width + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetDisplayWidthFP(uint32_t trackId, uint32_t& displayPicW) const; + + //! + //! \brief Get fixed point picture display height + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] displayPicH + //! fixed point picture display height + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetDisplayHeightFP(uint32_t trackId, uint32_t& displayPicH) const; + + //! + //! \brief Get width of specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemId + //! index of specified sample + //! \param [out] width + //! width of specified sample in pixels + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetWidth(uint32_t trackId, uint32_t itemId, uint32_t& width) const; + + //! + //! \brief Get height of specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemId + //! index of specified sample + //! \param [out] height + //! height of specified sample in pixels + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetHeight(uint32_t trackId, uint32_t itemId, uint32_t& height) const; + + + int32_t GetDims(uint32_t trackId, uint32_t itemId,uint32_t& width, uint32_t& height) const; + //! + //! \brief Get playback duration of specified track, + //! and unit is second + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] druInSecs + //! playback duration in second + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetPlaybackDurationInSecs(uint32_t trackId, double& durInSecs) const; + + //! + //! \brief Get samples list of specified sample frame type from + //! the specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemType + //! specified sample frame type, like reference frame type + //! \param [out] itemIds + //! samples list + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSampListByType(uint32_t trackId, SampleFrameType itemType, VarLenArray& itemIds) const; + + //! + //! \brief Get sample type of specified sample in + //! the specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemId + //! index of specified sample + //! \param [out] trackItemType + //! detailed sample type + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSampType(uint32_t trackId, uint32_t itemId, FourCC& trackItemType) const; + + //! + //! \brief Get complete data for specified sample in + //! the specified extractor track + //! + //! \param [in] trackId + //! index of specific extractor track + //! \param [in] itemIndex + //! index of specified sample + //! \param [out] buf + //! pointer to the allocated memory to store + //! the sample data + //! \param [out] bufSize + //! size of sample data + //! \param [in] strHrd + //! whether to insert NAL unit start codes into + //! sample data + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetExtractorTrackSampData(uint32_t trackId, + uint32_t itemIndex, + char* buf, + uint32_t& bufSize, + bool strHrd = true); + + //! + //! \brief Get complete data for specified sample in + //! the specified normal track + //! + //! \param [in] trackId + //! index of specific normal track + //! \param [in] itemId + //! index of specified sample + //! \param [out] buf + //! pointer to the allocated memory to store + //! the sample data + //! \param [out] bufSize + //! size of sample data + //! \param [in] strHrd + //! whether to insert NAL unit start codes into + //! sample data + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSampData(uint32_t trackId, + uint32_t itemId, + char* buf, + uint32_t& bufSize, + bool strHrd = true); + + //! + //! \brief Get track sample data offset and length for + //! the specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemIndex + //! index of specified sample + //! \param [out] sampOffset + //! output sample offset + //! \param [out] sampLen + //! output sample length + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSampOffset(uint32_t trackId, + uint32_t itemIndex, + uint64_t& sampOffset, + uint32_t& sampLen); + + //! + //! \brief Get media codec related specific information, + //! like SPS, PPS and so on, for specified sample in + //! specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemId + //! index of specified sample + //! \param [out] codecSpecInfos + //! output media codec related specific information + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetCodecSpecInfo(uint32_t trackId, + uint32_t itemId, + VarLenArray& codecSpecInfos) const; + + //! + //! \brief Get display time stamp of each sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] timeStamps + //! output time stamps for each sample + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetTrackTStamps(uint32_t trackId, + VarLenArray& timeStamps) const; + + //! + //! \brief Get display time stamp of the specified sample + //! in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemId + //! index of the specified sample + //! \param [out] timeStamps + //! output time stamps for the sample + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSampTStamps(uint32_t trackId, + uint32_t itemId, + VarLenArray& timeStamps) const; + + //! + //! \brief Get samples in decoding sequence in specified track, + //! gotten samples are presented by TStampID structure, + //! that is pair + //! + //! \param [in] trackId + //! index of specific track + //! \param [out] sampItems + //! output samples in decoding sequence + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSampInDecSeq(uint32_t trackId, + VarLenArray& sampItems) const; + + //! + //! \brief Get decoder code type for specified sample + //! in specified track, like "hvc1" and so on. + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] itemId + //! index of specified sample + //! \param [out] decoderCodeType + //! output decoder code type + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetDecoderCodeType(uint32_t trackId, + uint32_t itemId, + FourCC& decoderCodeType) const; + + //! + //! \brief Get duration for specified sample + //! in specified track, in milliseconds. + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] sampDur + //! output sample duration + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetDurOfSamp(uint32_t trackId, + uint32_t sampleId, + uint32_t& sampDur) const; + + //! + //! \brief Get audio channel layout box information for + //! specified sample in specified 'chnl' track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output channel property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetAudioChnlProp(uint32_t trackId, + uint32_t sampleId, + ChnlProperty& outProp) const; + + //! + //! \brief Get spatial audio box information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output spatial audio property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSpatAudioProp(uint32_t trackId, + uint32_t sampleId, + SpatialAudioProperty& outProp) const; + + //! + //! \brief Get stereo scopic 3D information for spherical video for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output stereo scopic 3D property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSteScop3DProp(uint32_t trackId, + uint32_t sampleId, + OmniStereoScopic3D& outProp) const; + + //! + //! \brief Get spherical video V1 information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output spherical video V1 property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSpheV1Prop(uint32_t trackId, + uint32_t sampleId, + SphericalVideoV1Property& outProp) const; + + //! + //! \brief Get spherical video V2 information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output spherical video V2 property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSpheV2Prop(uint32_t trackId, + uint32_t sampleId, + SphericalVideoV2Property& outProp) const; + + //! + //! \brief Get region wise packing information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output region wise packing property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetRWPKProp(uint32_t trackId, + uint32_t sampleId, + RWPKProperty& outProp) const; + //! + //! \brief Get content coverage information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output content coverage property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetCOVIInfoProp(uint32_t trackId, + uint32_t sampleId, + COVIInformation& outProp) const; + //! + //! \brief Get projection format information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output projection format property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetProjFrmtProp(uint32_t trackId, + uint32_t sampleId, + ProjFormat& outProp) const; + //! + //! \brief Get scheme type information for + //! specified sample in specified track + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output scheme type property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetScheTypesProp(uint32_t trackId, + uint32_t sampleId, + SchemeTypesProperty& outProp) const; + //! + //! \brief Get stereo video information for + //! specified sample in specified track, + //! only podv scheme is supported + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output stereo video property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetStereVideoProp(uint32_t trackId, + uint32_t sampleId, + VideoFramePackingType& outProp) const; + //! + //! \brief Get rotation information for + //! specified sample in specified track, + //! + //! \param [in] trackId + //! index of specific track + //! \param [in] sampleId + //! index of specified sample + //! \param [out] outProp + //! output podv rotation property + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetRotateProp(uint32_t trackId, + uint32_t sampleId, + Rotation& outProp) const; + +private: + uint32_t SearchInfoForTrack(uint32_t trackId, + uint32_t sampleId, + TrackBasicInfo& basicTrackInfo, + SmpDesIndex& index) const; + + template + Tval GetSampProp(std::map& propsMap, + SmpDesIndex& index, + uint32_t& result) const; + +public: + + //! + //! \brief Parse specified initial segment + //! + //! \param [in] strIO + //! pointer to specified initial segment handler + //! \param [in] initSegId + //! index of specified initial segment + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t ParseInitSeg(StreamIO* strIO, uint32_t initSegId); + + //! + //! \brief Disable specified initial segment + //! Invalidate the data buffer pointer to the + //! specified initial segment, then the data + //! from the segment can not be accessed any longer + //! + //! \param [in] initSegId + //! index of specified initial segment + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t DisableInitSeg(uint32_t initSegId); + + //! + //! \brief Parse specified segment for specified track + //! + //! \param [in] strIO + //! pointer to specified segment handler + //! \param [in] initSegId + //! index of specified initial segment, this index + //! is corresponding to track index + //! \param [in] segIndex + //! index of specified segment + //! \param [in] earliestPTSinTS + //! the earliest presentation time in timescale for + //! the specified sample + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t ParseSeg(StreamIO* strIO, + uint32_t initSegId, + uint32_t segIndex, + uint64_t earliestPTSinTS = UINT64_MAX); + + //! + //! \brief Disable specified segment for specified track + //! Disable the data buffer pointer to the specified + //! segment, then the data from the segment can not + //! be accessed any longer + //! + //! \param [in] initSegId + //! index of specified initial segment, which is + //! corresponding to track index + //! \param [in] segIndex + //! index of specified segment + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t DisableSeg(uint32_t initSegId, + uint32_t segIndex); + + //! + //! \brief Get segment index information for specified + //! track + //! + //! \param [in] initSegId + //! index of specified initial segment, which is + //! corresponding to track index + //! \param [out] segIndex + //! array of segmentation information structure + //! which hold segment index information + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t GetSegIndex(uint32_t initSegId, + VarLenArray& segIndex); + + //! + //! \brief Parse segment index information for specified + //! stream + //! + //! \param [in] strIO + //! pointer to the specified stream handler + //! \param [out] segIndex + //! array of segmentation information structure + //! which hold parsed segment index information + //! + //! \return int32_t + //! ERROR_NONE if success, else failed reason + //! + int32_t ParseSegIndex(StreamIO* strIO, + VarLenArray& segIndex); + +private: + std::map m_initSegProps; + UniquePtr m_fileStr; + + Sequence m_nextSeq = {}; + + enum class ReaderState + { + UNINITIALIZED, + INITIALIZING, + READY + }; + ReaderState m_readerSte; + + enum class CtxType + { + META, + TRACK, + FILE, + NOT_SET + }; + + struct CtxInfo + { + CtxType ctxType = CtxType::NOT_SET; + bool isCoverImg = false; + ItemId coverImgId = 0; + bool enableLoopPlay = false; + }; + std::map m_ctxInfoMap; + + friend class DashSegGroup; + friend class ConstDashSegGroup; + + DashSegGroup CreateDashSegs(InitSegmentId initSegId); + ConstDashSegGroup CreateDashSegs(InitSegmentId initSegId) const; + + void IsInited() const; + + int IsInitErr() const; + + CtxType GetCtxType(InitSegmentTrackId id) const; + + int GetCtxTypeError(const InitSegmentTrackId id, CtxType& ctxType) const; + + int32_t ReadStream(InitSegmentId initSegId, SegmentId segIndex); + + FileProperty GetFileProps() const; + + int32_t ReadAtomParams(SegmentIO& io, std::string& boxType, int64_t& boxSize); + int32_t ReadAtom(SegmentIO& io, Stream& bitstream); + int32_t SkipAtom(SegmentIO& io); + + int GetImgDims(uint32_t trackId, uint32_t itemId, uint32_t& width, uint32_t& height) const; + + int32_t GetCtxItems(InitSegmentTrackId segTrackId, IdVector& items) const; + + bool IsProtected(uint32_t trackId, uint32_t itemId) const; + + void GetAvcSpecData(const DataVector& rawData, DataVector& specData); + + void GetHevcSpecData(const DataVector& rawData, DataVector& specData); + + int32_t ParseAvcData(char* buf, uint32_t& bufSize); + + int32_t ParseHevcData(char* buf, uint32_t& bufSize); + + std::map m_metaMap; + + struct ImgInfo + { + std::string type = "invalid"; + uint32_t width = 0; + uint32_t height = 0; + double displayTime = 0; + }; + typedef std::map ImgInfoMap; + + struct ItemInfo + { + std::string type; + }; + typedef std::map ItemInfoMap; + + struct MetaAtomInfo + { + uint32_t dispMasterImgs = 0; + bool enableLoopPlay = false; + bool forceFPSSet = false; + float assignedFPS = 0.0; + ImgInfoMap imageInfoMap; + ItemInfoMap itemInfoMap; + }; + std::map m_metaInfo; + + TrackBasicInfo& GetTrackBasicInfo(InitSegmentTrackId initSegTrackId); + const TrackBasicInfo& GetTrackBasicInfo(InitSegmentTrackId initSegTrackId) const; + + TrackDecInfo& GetTrackDecInfo(InitSegmentId initSegId, + SegmentTrackId segTrackId); + const TrackDecInfo& GetTrackDecInfo(InitSegmentId initSegId, + SegmentTrackId segTrackId) const; + bool CanFindTrackDecInfo(InitSegmentId initSegId, + SegmentTrackId segTrackId) const; + + const SampleInfoVector& GetSampInfos(InitSegmentId initSegId, + SegmentTrackId segTrackId, + ItemId& itemIdBase) const; + + const ParameterSetMap* GetParameterSetMap(InitSegTrackIdPair id) const; //< returns null if not found + + bool FoundPrevSeg(InitSegmentId initSegId, + SegmentId curSegmentId, + SegmentId& precedingSegmentId) const; + + const TrackDecInfo* GetPrevTrackDecInfo(InitSegmentId initSegId, + SegmentTrackId segTrackId) const; + + void CfgSegSidxFallback(InitSegmentId initSegId, + SegmentTrackId segTrackId); + + void RefreshCompTimes(InitSegmentId initSegId, + SegmentId segIndex); + + ItemInfoMap ExtractItemInfoMap(const MetaAtom& metaAtom) const; + + void ProcessDecoderConfigProperties(const InitSegmentTrackId segTrackId); + + std::vector m_matrix; + + void RefreshDecCodeType(InitSegmentId initSegIndex, + SegmentTrackId segTrackId, + const SampleInfoVector& sampleInfo, + size_t prevSampInfoSize = 0); + + void RefreshItemToParamSet(ItemToParameterSetMap& itemToParameterSetMap, + InitSegmentTrackId initSegTrackId, + const SampleInfoVector& sampleInfo, + size_t prevSampInfoSize = 0); + + void AddSegSeq(InitSegmentId initSegIndex, + SegmentId segIndex, + Sequence sequence); + + TrackPropertiesMap FillTrackProps(InitSegmentId initSegIndex, + SegmentId segIndex, + MovieAtom& moovAtom); + + ItemId GetSuccedentItmId(InitSegmentId initSegIndex, + SegmentTrackId segTrackId) const; + + ItemId GetPrevItemId(InitSegmentId initSegIndex, + SegmentTrackId segTrackId) const; + + typedef std::map CtxIdPresentTSMap; + + void AddTrackProps(InitSegmentId initSegId, + SegmentId segIndex, + MovieFragmentAtom& moofAtom, + const CtxIdPresentTSMap& earliestPTSTS); + + void AddSampsToTrackDecInfo(TrackDecInfo& trackInfo, + const InitSegmentProperties& initSegProps, + const TrackBasicInfo& basicTrackInfo, + const TrackProperties& trackProps, + const uint64_t baseDataOffset, + const uint32_t sampDescId, + ItemId itemIdBase, + ItemId trackrunItemIdBase, + const TrackRunAtom* trackRunAtom); + + MoovProperties ExtractMoovProps(const MovieAtom& moovAtom) const; + + void FillSampEntryMap(TrackAtom* trackAtom, + InitSegmentId initSegId); + + void FillRinfAtomInfo(TrackBasicInfo& trackInfo, + unsigned int index, + const SampleEntryAtom& entry); + + TrackProperty GetTrackProps(TrackAtom* trackAtom) const; + + TypeToCtxIdsMap GetRefTrackIds(TrackAtom* trackAtom) const; + + TypeToIdsMap GetTrackGroupIds(TrackAtom* trackAtom) const; + + TypeToIdsMap GetSampGroupIds(TrackAtom* trackAtom) const; + + IdVector GetAlternateTrackIds(TrackAtom* trackAtom, + MovieAtom& moovAtom) const; + + pair ExtractTrackDecInfo(TrackAtom* trackAtom, + uint32_t movieTimescale) const; + + SampleInfoVector GenSampInfo(TrackAtom* trackAtom) const; + + int AddDecReferences(InitSegmentId initSegId, + SegmentTrackId segmentTrackId, + const DecodingOrderVector& sampItems, + DecodingOrderVector& output) const; + + void GenSegInAtom(const SegmentIndexAtom& sidxAtom, + SegmentIndex& segIndex, + int64_t dataOffsetAnchor); + + int32_t ConvertStrBytesToInt(SegmentIO& io, + uint32_t count, + int64_t& result); + + void LocateToOffset(SegmentIO& io, int64_t pos) const + { + if (io.strIO->TellOffset() != pos) + { + io.strIO->SeekOffset(pos); + } + } + + unsigned GenTrackId(InitSegmentTrackId id) const; + + InitSegmentTrackId MakeIdPair(unsigned id) const; + + int32_t GetSegIndex(InitSegmentTrackId initSegTrackId, + ItemId itemId, SegmentId& segIndex) const; + int32_t GetSegIndex(InitSegTrackIdPair id, SegmentId& segIndex) const; + + + int32_t GetSampDataInfo(uint32_t trackId, + uint32_t itemIndex, + const InitSegmentId& initSegId, + uint64_t& refSampLength, + uint64_t& refDataOffset); + + int32_t GetDepedentSampInfo(uint32_t trackId, + uint32_t itemIndex, + const InitSegmentId& initSegId, + uint8_t trackReference, + uint64_t& refSampLength, + uint64_t& refDataOffset); + + uint64_t ParseNalLen(char* buffer) const; + void WriteNalLen(uint64_t length, char* buffer) const; +}; + +template +inline Tval Mp4Reader::GetSampProp(std::map& propsMap, + SmpDesIndex& index, + uint32_t& result) const +{ + if (result != ERROR_NONE) + { + return Tval(); + } + + if (propsMap.count(index.GetIndex()) == 0) + { + result = OMAF_INVALID_SAMPLEDESCRIPTION_INDEX; + return Tval(); + } + + return propsMap.at(index); +} + +VCD_MP4_END; +#endif /* _MP4READERIMPL_H_ */ diff --git a/src/isolib/dash_parser/Mp4ReaderUtil.cpp b/src/isolib/dash_parser/Mp4ReaderUtil.cpp new file mode 100644 index 00000000..4b2a1337 --- /dev/null +++ b/src/isolib/dash_parser/Mp4ReaderUtil.cpp @@ -0,0 +1,343 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4ReaderUtil.cpp +//! \brief: Mp4 reader basic utility functions implementation +//! + +#include "Mp4ReaderUtil.h" +#include "../atoms/AvcConfigAtom.h" +#include "../atoms/CleanApertureAtom.h" +#include "../atoms/FormAllocator.h" +#include "../atoms/HevcConfigAtom.h" + +using namespace std; + +VCD_MP4_BEGIN + +ChnlPropertyInternal GenChnl(const ChannelLayoutAtom& inChnlBox) +{ + ChnlPropertyInternal chnl{}; + chnl.strStruct = inChnlBox.GetStreamStructure(); + chnl.layOut = inChnlBox.GetDefinedLayout(); + chnl.omitChnlMap = inChnlBox.GetOmittedChannelsMap(); + chnl.objCnt = inChnlBox.GetObjectCount(); + chnl.chnlCnt = inChnlBox.GetChannelNumber(); + chnl.chnlLayOuts.clear(); + + if (chnl.chnlCnt > 0 && chnl.strStruct == 1 && chnl.layOut == 0) + { + std::vector layout = inChnlBox.GetChannelLayouts(); + for (uint16_t i = 0; i < layout.size(); i++) + { + ChannelLayout channelPosition; + channelPosition.speakerPosition = layout[i].speakerPosition; + channelPosition.azimuth = layout[i].azimuthAngle; + channelPosition.elevation = layout[i].elevationAngle; + chnl.chnlLayOuts.push_back(channelPosition); + } + } + + return chnl; +} + +SA3DPropertyInternal GenSA3D(const SpatialAudioAtom& inSpaAudioBox) +{ + SA3DPropertyInternal sa3d{}; + sa3d.version = inSpaAudioBox.GetVersion(); + sa3d.ambisonicType = inSpaAudioBox.GetAmbisonicType(); + sa3d.ambisonicOrder = inSpaAudioBox.GetAmbisonicOrder(); + sa3d.ambisonicChnlSeq = inSpaAudioBox.GetAmbisonicChannelOrdering(); + sa3d.ambisonicNorml = inSpaAudioBox.GetAmbisonicNormalization(); + sa3d.chnlMap.clear(); + + std::vector channelMap = inSpaAudioBox.GetChannelMap(); + for (uint16_t i = 0; i < channelMap.size(); i++) + { + sa3d.chnlMap.push_back(channelMap.at(i)); + } + + return sa3d; +} + +OmniStereoScopic3D Genst3d(const Stereoscopic3D* inStereo3DBox) +{ + OmniStereoScopic3D st3d(OmniStereoScopic3D::OMNI_MONOSCOPIC); + if (inStereo3DBox != nullptr) + { + Stereoscopic3D::StereoTypeV2 v2stereomode = inStereo3DBox->GetStereoMode(); + if (v2stereomode == Stereoscopic3D::StereoTypeV2::STEREO_TOPBOTTOM) + { + st3d = OmniStereoScopic3D::OMNI_STEREOSCOPIC_TOP_BOTTOM; + } + else if (v2stereomode == Stereoscopic3D::StereoTypeV2::STEREO_LEFTRIGHT) + { + st3d = OmniStereoScopic3D::OMNI_STEREOSCOPIC_LEFT_RIGHT; + } + else if (v2stereomode == Stereoscopic3D::StereoTypeV2::STEREO_STEREOCUSTOM) + { + st3d = OmniStereoScopic3D::OMNI_STEREOSCOPIC_STEREO_CUSTOM; + } + else + { + st3d = OmniStereoScopic3D::OMNI_MONOSCOPIC; + } + } + return st3d; +} + +OmniStereoScopic3D Genst3d(const SphericalVideoV1Atom& inStereo3DBox) +{ + OmniStereoScopic3D st3d(OmniStereoScopic3D::OMNI_MONOSCOPIC); + SphericalVideoV1Atom::StereoTypeV1 v1stereomode = inStereo3DBox.GetGeneralMetaData().stereoType; + if (v1stereomode == SphericalVideoV1Atom::StereoTypeV1::STEREO_TOP_BOTTOM) + { + st3d = OmniStereoScopic3D::OMNI_STEREOSCOPIC_TOP_BOTTOM; + } + else if (v1stereomode == SphericalVideoV1Atom::StereoTypeV1::STEREO_LEFT_RIGHT) + { + st3d = OmniStereoScopic3D::OMNI_STEREOSCOPIC_LEFT_RIGHT; + } + else // set UNDEFINED also as MONOSCOPIC (default in spec). + { + st3d = OmniStereoScopic3D::OMNI_MONOSCOPIC; + } + return st3d; +} + +SphericalVideoV1Property GenSphericalVideoV1Property(const SphericalVideoV1Atom& inSpheVideoBox) +{ + SphericalVideoV1Property outSpheV1Prop{}; + outSpheV1Prop.isSphe = true; // always true for V1.0 + outSpheV1Prop.isStitched = true; // always true for V1.0 + outSpheV1Prop.projType = OmniProjType::OMNI_ERP; + outSpheV1Prop.srcCnt = inSpheVideoBox.GetGeneralMetaData().sourceCount; + outSpheV1Prop.initView.fixedPntYaw = inSpheVideoBox.GetGeneralMetaData().initViewHead + << 16; + outSpheV1Prop.initView.fixedPntPitch = inSpheVideoBox.GetGeneralMetaData().initViewPitch + << 16; + outSpheV1Prop.initView.fixedPntRoll = inSpheVideoBox.GetGeneralMetaData().initViewRoll + << 16; + outSpheV1Prop.timestamp = inSpheVideoBox.GetGeneralMetaData().timestamp; + outSpheV1Prop.panoWidth = inSpheVideoBox.GetGeneralMetaData().fullPanoWidth; + outSpheV1Prop.panoHeight = inSpheVideoBox.GetGeneralMetaData().fullPanoHeight; + outSpheV1Prop.croppedWidth = + inSpheVideoBox.GetGeneralMetaData().croppedAreaImageWidth; + outSpheV1Prop.croppedHeight = + inSpheVideoBox.GetGeneralMetaData().croppedAreaImageHeight; + outSpheV1Prop.croppedLeftW = inSpheVideoBox.GetGeneralMetaData().croppedAreaLeft; + outSpheV1Prop.croppedTopH = inSpheVideoBox.GetGeneralMetaData().croppedAreaTop; + return outSpheV1Prop; +} + +SphericalVideoV2Property Gensv3d(const SphericalVideoV2Atom* inSpheVideoBox) +{ + SphericalVideoV2Property outSpheV2Prop{}; + if (inSpheVideoBox != nullptr) + { + outSpheV2Prop.rotateDegree.fixedPntYaw = inSpheVideoBox->GetProjectionAtom().GetProjectionHeaderAtom().GetPose().yaw; + outSpheV2Prop.rotateDegree.fixedPntPitch = inSpheVideoBox->GetProjectionAtom().GetProjectionHeaderAtom().GetPose().pitch; + outSpheV2Prop.rotateDegree.fixedPntRoll = inSpheVideoBox->GetProjectionAtom().GetProjectionHeaderAtom().GetPose().roll; + + Projection::ProjectFormat type = inSpheVideoBox->GetProjectionAtom().GetProjectFormat(); + + switch (type) + { + case Projection::ProjectFormat::CUBEMAP: + { + outSpheV2Prop.projType = OmniProjType::OMNI_CUBEMAP; + outSpheV2Prop.projection.cubemap.layout = + inSpheVideoBox->GetProjectionAtom().GetCubemapProjectionAtom().GetLayout(); + outSpheV2Prop.projection.cubemap.padding = + inSpheVideoBox->GetProjectionAtom().GetCubemapProjectionAtom().GetPadding(); + break; + } + case Projection::ProjectFormat::ERP: + { + + outSpheV2Prop.projType = OmniProjType::OMNI_ERP; + outSpheV2Prop.projection.equirectangular.topBoundFP = inSpheVideoBox->GetProjectionAtom() + .GetEquirectangularProjectionAtom() + .GetProjectionBounds() + .top_32FP; + outSpheV2Prop.projection.equirectangular.btmBoundFP = inSpheVideoBox->GetProjectionAtom() + .GetEquirectangularProjectionAtom() + .GetProjectionBounds() + .bottom_32FP; + outSpheV2Prop.projection.equirectangular.leftBoundFP = inSpheVideoBox->GetProjectionAtom() + .GetEquirectangularProjectionAtom() + .GetProjectionBounds() + .left_32FP; + outSpheV2Prop.projection.equirectangular.rightBoundFP = inSpheVideoBox->GetProjectionAtom() + .GetEquirectangularProjectionAtom() + .GetProjectionBounds() + .right_32FP; + break; + } + case Projection::ProjectFormat::MESH: + { + outSpheV2Prop.projType = OmniProjType::OMNI_MESH; + break; + } + default: + { + outSpheV2Prop.projType = OmniProjType::OMNI_UNKOWN; + break; + } + } + } + return outSpheV2Prop; +} + +RWPKPropertyInternal Genrwpk(const RegionWisePackingAtom& inRWPKBox) +{ + RWPKPropertyInternal outRWPKProp{}; + outRWPKProp.constituentPicMatching = inRWPKBox.GetConstituentPictureMatchingFlag(); + outRWPKProp.packedPicHeight = inRWPKBox.GetPackedPictureHeight(); + outRWPKProp.packedPicWidth = inRWPKBox.GetPackedPictureWidth(); + outRWPKProp.projPicHeight = inRWPKBox.GetProjPictureHeight(); + outRWPKProp.projPicWidth = inRWPKBox.GetProjPictureWidth(); + + for (auto& regionFromBox : inRWPKBox.GetRegions()) + { + RWPKRegion region{}; + region.guardBandFlag = regionFromBox->guardBandFlag; + region.packingType = (OmniRWPKType) regionFromBox->packingType; + + UniquePtr rwpkReg = std::move(regionFromBox->rectangularPacking); + if (region.packingType == OmniRWPKType::OMNI_RECTANGULAR) + { + RectRWPKRegion rectRegion{}; + + rectRegion.projRegWidth = rwpkReg->projRegWidth; + rectRegion.projRegHeight = rwpkReg->projRegHeight; + rectRegion.projRegTop = rwpkReg->projRegTop; + rectRegion.projRegLeft = rwpkReg->projRegLeft; + rectRegion.transformType = rwpkReg->transformType; + rectRegion.packedRegWidth = rwpkReg->packedRegWidth; + rectRegion.packedRegHeight = rwpkReg->packedRegHeight; + rectRegion.packedRegTop = rwpkReg->packedRegTop; + rectRegion.packedRegLeft = rwpkReg->packedRegLeft; + rectRegion.leftGbWidth = rwpkReg->leftGbWidth; + rectRegion.rightGbWidth = rwpkReg->rightGbWidth; + rectRegion.topGbHeight = rwpkReg->topGbHeight; + rectRegion.bottomGbHeight = rwpkReg->bottomGbHeight; + rectRegion.gbNotUsedForPredFlag = rwpkReg->gbNotUsedForPredFlag; + rectRegion.gbType0 = rwpkReg->gbType0; + rectRegion.gbType1 = rwpkReg->gbType1; + rectRegion.gbType2 = rwpkReg->gbType2; + rectRegion.gbType3 = rwpkReg->gbType3; + + region.region.rectReg = rectRegion; + } + + regionFromBox->rectangularPacking = std::move(rwpkReg); + outRWPKProp.regions.push_back(region); + } + + return outRWPKProp; +} + +COVIInformationInternal Gencovi(const CoverageInformationAtom& inCOVIBox) +{ + COVIInformationInternal outCOVIProp{}; + outCOVIProp.coviShapeType = (COVIShapeType) inCOVIBox.GetCoverageShapeMode(); + outCOVIProp.defaultViewIdc = (OmniViewIdc) inCOVIBox.GetDefaultViewIdc(); + outCOVIProp.viewIdcPresenceFlag = inCOVIBox.GetViewIdcPresenceFlag(); + + for (auto& regionFromBox : inCOVIBox.GetSphereRegions()) + { + COVIRegion region{}; + SphereRegion spheReg = regionFromBox->region; + + region.viewIdc = (OmniViewIdc) regionFromBox->viewIdc; + + region.centAzimuth = spheReg.centreAzimuth; + region.centElevation = spheReg.centreElevation; + region.centTilt = spheReg.centreTilt; + region.azimuthRange = spheReg.azimuthRange; + region.elevationRange = spheReg.elevationRange; + region.interpolate = spheReg.interpolate; + + outCOVIProp.sphereRegions.push_back(region); + } + + return outCOVIProp; +} + +bool IsImageType(FourCCInt inType) +{ + // static const Set IMAGE_TYPES = {"avc1", "hvc1", "hev1", "grid", "iovl", "iden"}; + static const std::set IMAGE_TYPES = {"avc1", "hvc1", "hev1", "grid", "iovl", "iden"}; + + return (IMAGE_TYPES.find(inType) != IMAGE_TYPES.end()); +} + +ParameterSetMap GenDecoderParameterSetMap(const AvcDecoderConfigurationRecord& avcRrd) +{ + std::vector avcSPS; + std::vector avcPPS; + avcRrd.GetOneParameterSet(avcSPS, AvcNalDefs::SPS); + avcRrd.GetOneParameterSet(avcPPS, AvcNalDefs::PPS); + + ParameterSetMap paramsMap; + paramsMap.insert(pair(MediaCodecInfoType::AVC_SPS, move(avcSPS))); + paramsMap.insert(pair(MediaCodecInfoType::AVC_PPS, move(avcPPS))); + + return paramsMap; +} + +ParameterSetMap GenDecoderParameterSetMap(const HevcDecoderConfigurationRecord& hevcRrd) +{ + std::vector hevcSPS; + std::vector hevcPPS; + std::vector hevcVPS; + hevcRrd.GetOneParameterSet(hevcSPS, HevcNalDefs::SPS); + hevcRrd.GetOneParameterSet(hevcPPS, HevcNalDefs::PPS); + hevcRrd.GetOneParameterSet(hevcVPS, HevcNalDefs::VPS); + + ParameterSetMap paramsMap; + paramsMap.insert(pair(MediaCodecInfoType::HEVC_SPS, move(hevcSPS))); + paramsMap.insert(pair(MediaCodecInfoType::HEVC_PPS, move(hevcPPS))); + paramsMap.insert(pair(MediaCodecInfoType::HEVC_VPS, move(hevcVPS))); + + return paramsMap; +} + +ParameterSetMap GenDecoderParameterSetMap(const ElementaryStreamDescriptorAtom& eleDesRrd) +{ + std::vector vcodecSpecInfo; + ParameterSetMap paramsMap; + if (eleDesRrd.GetOneParameterSet(vcodecSpecInfo)) + { + paramsMap.insert(pair(MediaCodecInfoType::AudioSpecificConfig, + move(vcodecSpecInfo))); + } + return paramsMap; +} + +VCD_MP4_END diff --git a/src/isolib/dash_parser/Mp4ReaderUtil.h b/src/isolib/dash_parser/Mp4ReaderUtil.h new file mode 100644 index 00000000..65cc7e54 --- /dev/null +++ b/src/isolib/dash_parser/Mp4ReaderUtil.h @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4ReaderUtil.h +//! \brief: Mp4 reader basic utility functions definition +//! \detail: Define basic utility functions in mp4 reader +//! + +#ifndef _MP4READERUTIL_H_ +#define _MP4READERUTIL_H_ + +#include +#include +#include + +#include "Mp4DataTypes.h" +#include "Mp4ReaderImpl.h" +#include "../atoms/UriMetaSampEntryAtom.h" + +using namespace std; + +VCD_MP4_BEGIN + +ChnlPropertyInternal GenChnl(const ChannelLayoutAtom& inChnlBox); + +SA3DPropertyInternal GenSA3D(const SpatialAudioAtom& inSpaAudioBox); + +OmniStereoScopic3D Genst3d(const Stereoscopic3D* inStereo3DBox); + +OmniStereoScopic3D Genst3d(const SphericalVideoV1Atom& inStereo3DBox); + +SphericalVideoV1Property GenSphericalVideoV1Property(const SphericalVideoV1Atom& inSpheVideoBox); + +SphericalVideoV2Property Gensv3d(const SphericalVideoV2Atom* inSpheVideoBox); + +RWPKPropertyInternal Genrwpk(const RegionWisePackingAtom& inRWPKBox); + +COVIInformationInternal Gencovi(const CoverageInformationAtom& inCOVIBox); + +bool IsImageType(FourCCInt inType); + +ParameterSetMap GenDecoderParameterSetMap(const AvcDecoderConfigurationRecord& avcRrd); + +ParameterSetMap GenDecoderParameterSetMap(const HevcDecoderConfigurationRecord& hevcRrd); + +ParameterSetMap GenDecoderParameterSetMap(const ElementaryStreamDescriptorAtom& eleDesRrd); + +template class InContainer, + template class OutContainer = InContainer, + typename OutType = InType> +OutContainer map(const InContainer& input, function func) +{ + OutContainer output; + transform(input.begin(), input.end(), back_inserter(output), func); + return output; +} + +VCD_MP4_END; +#endif // _MP4READERUTIL_H_ diff --git a/src/isolib/dash_parser/Mp4Segment.cpp b/src/isolib/dash_parser/Mp4Segment.cpp new file mode 100644 index 00000000..fa4016f8 --- /dev/null +++ b/src/isolib/dash_parser/Mp4Segment.cpp @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4Segment.cpp +//! \brief: DashSegGroup related class implementation +//! + +#include "Mp4Segment.h" +#include "Mp4ReaderImpl.h" + +VCD_MP4_BEGIN + +DashSegGroup::DashSegGroup(Mp4Reader& impl, InitSegmentId initSegmentId) + : m_impl(impl) + , m_initSegId(initSegmentId) +{ +} + +ConstDashSegGroup::ConstDashSegGroup(const Mp4Reader& impl, InitSegmentId initSegmentId) + : m_impl(impl) + , m_initSegId(initSegmentId) +{ +} + +DashSegGroup::SegmentIter DashSegGroup::begin() +{ + auto& seqToSeg = m_impl.m_initSegProps.at(m_initSegId).seqToSeg; + return SegmentIter(m_impl, m_initSegId, seqToSeg, seqToSeg.begin()); +} + +DashSegGroup::SegmentIter DashSegGroup::end() +{ + auto& seqToSeg = m_impl.m_initSegProps.at(m_initSegId).seqToSeg; + return SegmentIter(m_impl, m_initSegId, seqToSeg, seqToSeg.end()); +} + +DashSegGroup::SegmentConstIter DashSegGroup::begin() const +{ + auto& seqToSeg = m_impl.m_initSegProps.at(m_initSegId).seqToSeg; + return SegmentConstIter(m_impl, m_initSegId, seqToSeg, seqToSeg.begin()); +} + +DashSegGroup::SegmentConstIter DashSegGroup::end() const +{ + auto& seqToSeg = m_impl.m_initSegProps.at(m_initSegId).seqToSeg; + return SegmentConstIter(m_impl, m_initSegId, seqToSeg, seqToSeg.end()); +} + +ConstDashSegGroup::ConstIter ConstDashSegGroup::begin() const +{ + auto& seqToSeg = m_impl.m_initSegProps.at(m_initSegId).seqToSeg; + return ConstIter(m_impl, m_initSegId, seqToSeg, seqToSeg.begin()); +} + +ConstDashSegGroup::ConstIter ConstDashSegGroup::end() const +{ + auto& seqToSeg = m_impl.m_initSegProps.at(m_initSegId).seqToSeg; + return ConstIter(m_impl, m_initSegId, seqToSeg, seqToSeg.end()); +} + +DashSegGroup::SegmentIter::SegmentIter(Mp4Reader& impl, + InitSegmentId initSegmentId, + SeqToSegMap& seqToSeg, + SeqToSegMap::iterator theiterator) + : m_impl(impl) + , m_initSegId(initSegmentId) + , m_seqToSeg(seqToSeg) + , m_iter(theiterator) +{ +} + +SegmentProperties& DashSegGroup::SegmentIter::operator*() const +{ + return m_impl.m_initSegProps.at(m_initSegId).segPropMap.at(m_iter->second); +} + +SegmentProperties* DashSegGroup::SegmentIter::operator->() const +{ + return &m_impl.m_initSegProps.at(m_initSegId).segPropMap.at(m_iter->second); +} + +bool DashSegGroup::SegmentIter::operator!=(const DashSegGroup::SegmentIter& other) const +{ + return &m_impl != &other.m_impl || m_iter != other.m_iter; +} + +bool DashSegGroup::SegmentConstIter::operator!=(const DashSegGroup::SegmentConstIter& other) const +{ + return &m_impl != &other.m_impl || m_iter != other.m_iter; +} + +DashSegGroup::SegmentIter& DashSegGroup::SegmentIter::operator++() +{ + SegmentId curSegmentId = m_iter->second; + do + { + ++m_iter; + } while (m_iter != m_seqToSeg.end() && m_iter->second == curSegmentId); + return *this; +} + +DashSegGroup::SegmentConstIter::SegmentConstIter(const Mp4Reader& impl, + const InitSegmentId initSegmentId, + const SeqToSegMap& seqToSeg, + SeqToSegMap::const_iterator iterator) + : m_impl(impl) + , m_initSegId(initSegmentId) + , m_seqToSeg(seqToSeg) + , m_iter(iterator) +{ +} + +const SegmentProperties& DashSegGroup::SegmentConstIter::operator*() const +{ + return m_impl.m_initSegProps.at(m_initSegId).segPropMap.at(m_iter->second); +} + +const SegmentProperties* DashSegGroup::SegmentConstIter::operator->() const +{ + return &m_impl.m_initSegProps.at(m_initSegId).segPropMap.at(m_iter->second); +} + +DashSegGroup::SegmentConstIter& DashSegGroup::SegmentConstIter::operator++() +{ + SegmentId curSegmentId = m_iter->second; + do + { + ++m_iter; + } while (m_iter != m_seqToSeg.end() && m_iter->second == curSegmentId); + return *this; +} + +VCD_MP4_END diff --git a/src/isolib/dash_parser/Mp4Segment.h b/src/isolib/dash_parser/Mp4Segment.h new file mode 100644 index 00000000..eb27dafe --- /dev/null +++ b/src/isolib/dash_parser/Mp4Segment.h @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4Segment.h +//! \brief: DashSegGroup related class definition +//! \detail: Define segments related operation class +//! + +#ifndef _MP4SEGMENTS_H_ +#define _MP4SEGMENTS_H_ + +#include "Mp4DataTypes.h" + +VCD_MP4_BEGIN + +class Mp4Reader; + +class DashSegGroup +{ +public: + DashSegGroup(Mp4Reader& impl, InitSegmentId initSegmentId); + + class SegmentIter + { + public: + SegmentIter(Mp4Reader& impl, + InitSegmentId initSegmentId, + SeqToSegMap& sequenceToSegment, + SeqToSegMap::iterator); + SegmentProperties& operator*() const; + SegmentProperties* operator->() const; + SegmentIter& operator++(); + Mp4Reader& m_impl; + InitSegmentId m_initSegId; + SeqToSegMap& m_seqToSeg; + SeqToSegMap::iterator m_iter; + + bool operator!=(const SegmentIter& other) const; + }; + + class SegmentConstIter + { + public: + SegmentConstIter(const Mp4Reader& impl, + const InitSegmentId initSegmentId, + const SeqToSegMap& seqToSeg, + SeqToSegMap::const_iterator); + const SegmentProperties& operator*() const; + const SegmentProperties* operator->() const; + SegmentConstIter& operator++(); + const Mp4Reader& m_impl; + const InitSegmentId m_initSegId; + const SeqToSegMap& m_seqToSeg; + SeqToSegMap::const_iterator m_iter; + + bool operator!=(const SegmentConstIter& other) const; + }; + + SegmentIter begin(); + SegmentConstIter begin() const; + SegmentIter end(); + SegmentConstIter end() const; + +private: + Mp4Reader& m_impl; + InitSegmentId m_initSegId; +}; + +class ConstDashSegGroup +{ +public: + ConstDashSegGroup(const Mp4Reader& impl, InitSegmentId initSegmentId); + + typedef DashSegGroup::SegmentConstIter Iter; + typedef DashSegGroup::SegmentConstIter ConstIter; + + ConstIter begin() const; + ConstIter end() const; + +private: + const Mp4Reader& m_impl; + InitSegmentId m_initSegId; +}; + +VCD_MP4_END; +#endif // _MP4SEGMENTS_H_ diff --git a/src/isolib/dash_parser/Mp4StreamIO.cpp b/src/isolib/dash_parser/Mp4StreamIO.cpp new file mode 100644 index 00000000..024adaa3 --- /dev/null +++ b/src/isolib/dash_parser/Mp4StreamIO.cpp @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4StreamIO.cpp +//! \brief: StreamIO class implementation +//! + +#include "Mp4StreamIO.h" +#include +#include "../atoms/FormAllocator.h" + +using namespace std; + +VCD_MP4_BEGIN + +StreamIOInternal::StreamIOInternal(StreamIO* stream) + : m_stream(stream) + , m_error(false) + , m_eof(false) +{ + m_error = !stream || !stream->SeekAbsoluteOffset(0); +} + +/* +StreamIOInternal& StreamIOInternal::operator=(StreamIOInternal&& other) +{ + m_error = other.m_error; + m_eof = other.m_eof; + m_stream = std::move(other.m_stream); + return *this; +} +*/ +StreamIOInternal::~StreamIOInternal() +{ + if(m_stream) + { + delete m_stream; + m_stream = NULL; + } + // nothing +} + +void StreamIOInternal::ReadStream(char* buffer, StreamIO::offset_t size_) +{ + StreamIO::offset_t got = m_stream->ReadStream(buffer, size_); + if (got < size_) + { + m_eof = true; + m_error = true; + } +} + +int StreamIOInternal::GetOneByte() +{ + char ch; + StreamIO::offset_t got = m_stream->ReadStream(&ch, sizeof(ch)); + if (got) + { + return static_cast(ch); + } + else + { + m_eof = true; + return 0; + } +} + +bool StreamIOInternal::PeekEOS() +{ + char buffer; + auto was = m_stream->TellOffset(); + if (m_stream->ReadStream(&buffer, sizeof(buffer)) == 0) + { + return true; + } + else + { + m_stream->SeekAbsoluteOffset(was); + return false; + } +} + +void StreamIOInternal::SeekOffset(StreamIO::offset_t offset) +{ + if (!m_stream->SeekAbsoluteOffset(offset)) + { + m_eof = true; + m_error = true; + } +} + +StreamIO::offset_t StreamIOInternal::TellOffset() +{ + return m_stream->TellOffset(); +} + +StreamIO::offset_t StreamIOInternal::GetStreamSize() +{ + return m_stream->GetStreamSize(); +} + +void StreamIOInternal::ClearStatus() +{ + m_eof = false; + m_error = false; +} + +bool StreamIOInternal::IsStreamGood() const +{ + return !m_error; +} + +bool StreamIOInternal::IsReachEOS() const +{ + return m_eof; +} + +VCD_MP4_END diff --git a/src/isolib/dash_parser/Mp4StreamIO.h b/src/isolib/dash_parser/Mp4StreamIO.h new file mode 100644 index 00000000..8b7f8b91 --- /dev/null +++ b/src/isolib/dash_parser/Mp4StreamIO.h @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Mp4StreamIO.h +//! \brief: StreamIO class definition +//! \detail: Define upper layer file operation above StreamIO +//! + +#ifndef _MP4STREAMIO_H_ +#define _MP4STREAMIO_H_ + +#include +#include "../include/Common.h" +#include "../atoms/FormAllocator.h" + +VCD_MP4_BEGIN + +class StreamIO +{ +public: + typedef int64_t offset_t; + + /** Size of an indeterminately sized source, ie. a network stream */ + static const offset_t IndeterminateSize = 0x7fffffffffffffffll; + + StreamIO() {}; + + virtual ~StreamIO() {}; + + virtual offset_t ReadStream(char* buffer, offset_t size) = 0; + + virtual bool SeekAbsoluteOffset(offset_t offset) = 0; + + virtual offset_t TellOffset() = 0; + + virtual offset_t GetStreamSize() = 0; +}; + +class StreamIOInternal +{ +public: + StreamIOInternal(StreamIO* stream = nullptr); + StreamIOInternal(const StreamIOInternal&) = default; + StreamIOInternal& operator=(const StreamIOInternal&) = default; + //StreamIOInternal& operator=(StreamIOInternal&&); + ~StreamIOInternal(); + + void ReadStream(char* buffer, StreamIO::offset_t size); + + int GetOneByte(); + + + void SeekOffset(StreamIO::offset_t offset); + + StreamIO::offset_t TellOffset(); + + StreamIO::offset_t GetStreamSize(); + + bool PeekEOS(); + + bool IsStreamGood() const; + + bool IsReachEOS() const; + + void ClearStatus(); + +private: + StreamIO* m_stream; + bool m_error; + bool m_eof; +}; + +VCD_MP4_END; +#endif // _MP4STREAMIO_H_ diff --git a/src/isolib/dash_writer/AcquireTrackData.cpp b/src/isolib/dash_writer/AcquireTrackData.cpp new file mode 100644 index 00000000..06810b25 --- /dev/null +++ b/src/isolib/dash_writer/AcquireTrackData.cpp @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AcquireTrackData.cpp +//! \brief: AcquireTrackData class implementation +//! + +#include +#include + +#include "FrameWrapper.h" +#include "AcquireTrackData.h" +#include "MovieAtom.h" +#include "BoxBlockAccess.h" +#include "TrackAtom.h" +#include "Utils.h" + +using namespace std; + +VCD_MP4_BEGIN + +SamplePTS GetPTS(const DecodePts& aDecodePts, uint32_t aTimeScale) +{ + SamplePTS pts; + for (auto timeSample : aDecodePts.GetTimeTS()) + { + pts.insert(make_pair(FractS64{timeSample.first, static_cast(aTimeScale)}, timeSample.second)); + } + return pts; +} + +struct AcquireTrackData::Acquirer +{ + const BoxBlockAccess& mp4; + vector frameInfo; + vector frameBlock; + FrameBuf GetFramePackedData(size_t aFrameIndex) const; + size_t GetFramePackedSize(size_t aFrameIndex) const; +}; + +AcquireTrackData::GetDataOfFrameFromTrack::GetDataOfFrameFromTrack(shared_ptr aAcquirer, + size_t aFrameIndex) + : m_acquirer(aAcquirer) + , mFrameIndex(aFrameIndex) +{ +} + +AcquireTrackData::GetDataOfFrameFromTrack::~GetDataOfFrameFromTrack() +{ +} + +FrameBuf AcquireTrackData::GetDataOfFrameFromTrack::Get() const +{ + return m_acquirer->GetFramePackedData(mFrameIndex); +} + +size_t AcquireTrackData::GetDataOfFrameFromTrack::GetDataSize() const +{ + return m_acquirer->GetFramePackedSize(mFrameIndex); +} + +AcquireTrackData::GetDataOfFrameFromTrack* AcquireTrackData::GetDataOfFrameFromTrack::Clone() const +{ + return new GetDataOfFrameFromTrack(m_acquirer, mFrameIndex); +} + +FrameBuf AcquireTrackData::Acquirer::GetFramePackedData(size_t aFrameIndex) const +{ + if (aFrameIndex >= frameBlock.size()) + { + ISO_LOG(LOG_ERROR, "Frame index exceeds size !\n"); + throw exception(); + } + const auto storage = mp4.GetData(frameBlock.at(aFrameIndex)); + FrameBuf gotFrame = FrameBuf(storage.begin(), storage.end()); + return gotFrame; +} + +size_t AcquireTrackData::Acquirer::GetFramePackedSize(size_t aFrameIndex) const +{ + if (aFrameIndex >= frameBlock.size()) + { + ISO_LOG(LOG_ERROR, "Frame index exceeds size !\n"); + throw exception(); + } + size_t packedSize = frameBlock.at(aFrameIndex).size; + return packedSize; +} + +AcquireTrackData::AcquireTrackData(AcquireTrackData&& aOther) + : m_acquirer(move(aOther.m_acquirer)) + , m_trackMeta(move(aOther.m_trackMeta)) +{ +} + +AcquireTrackData::AcquireTrackData( + const BoxBlockAccess& mp4Accessor, + const TrackAtom& trackBox, + const uint32_t timeScale) + : m_acquirer(new Acquirer{mp4Accessor, {}, {}}) +{ + const std::string& handlerName = trackBox.GetMediaAtom().GetHandlerAtom().GetName(); + const MediaHeaderAtom& mdhdBox = trackBox.GetMediaAtom().GetMediaHeaderAtom(); + const SampleTableAtom& stblBox = trackBox.GetMediaAtom().GetMediaInformationAtom().GetSampleTableAtom(); + const TrackHeaderAtom& trackHeaderBox = trackBox.GetTrackHeaderAtom(); + const TimeToSampleAtom& timeToSampleBox = stblBox.GetTimeToSampleAtom(); + shared_ptr compositionOffsetBox = stblBox.GetCompositionOffsetAtom(); + shared_ptr compositionToDecodeBox = stblBox.GetCompositionToDecodeAtom(); + shared_ptr editBox = trackBox.GetEditAtom(); + const EditListAtom* editListBox = editBox ? editBox->GetEditListAtom() : nullptr; + const SampleToChunkAtom& sampleToChunkBox = stblBox.GetSampleToChunkAtom(); + const auto& chunkOffsets = stblBox.GetChunkOffsetAtom().GetChunkOffsets(); + const SampleSizeAtom& sampleSizeBox = stblBox.GetSampleSizeAtom(); + auto syncSampleBox = stblBox.GetSyncSampleAtom(); + const set syncSamples = + syncSampleBox + ? ContMapSet([](uint32_t x) { return x - 1; }, syncSampleBox->GetSyncSampleIds()) + : set(); + auto sampleSizes = sampleSizeBox.GetEntrySize(); + + m_trackMeta.trackId = trackHeaderBox.GetTrackID(); + m_trackMeta.timescale = FractU64(1, mdhdBox.GetTimeScale()); + m_trackMeta.type = handlerName == "VideoHandler" + ? TypeOfMedia::Video + : handlerName == "SoundHandler" + ? TypeOfMedia::Audio + : handlerName == "DataHandler" ? TypeOfMedia::Data : TypeOfMedia::Other; + + DecodePts decodePts; + decodePts.SetAtom(&timeToSampleBox); + if (compositionOffsetBox) + { + decodePts.SetAtom(&*compositionOffsetBox); + } + if (compositionToDecodeBox) + { + decodePts.SetAtom(&*compositionToDecodeBox); + } + if (editListBox) + { + decodePts.SetAtom(editListBox, timeScale, mdhdBox.GetTimeScale()); + } + + decodePts.Unravel(); + + SamplePTS timeToSample = GetPTS(decodePts, (uint32_t) m_trackMeta.timescale.m_den); + multimap sampleToTimes; + for (auto tTs : timeToSample) + { + sampleToTimes.insert(make_pair(tTs.second, tTs.first)); + } + + uint32_t chunkIndex = 0; + uint32_t prevChunkIndex = 0; + + uint64_t dataOffset = 0; + for (uint32_t sampleIndex = 0; sampleIndex < sampleSizeBox.GetSampleNum(); ++sampleIndex) + { + auto ctsRange = sampleToTimes.equal_range(sampleIndex); + FrameCts cts; + auto ctsIt = ctsRange.first; + while (ctsIt != ctsRange.second) + { + cts.push_back(ctsIt->second); + ++ctsIt; + } + if (cts.size() != 1) + { + ISO_LOG(LOG_ERROR, "Invalid CTS size !\n"); + throw exception(); + } + if (!sampleToChunkBox.GetSampleChunkIndex(sampleIndex, chunkIndex)) + { + ISO_LOG(LOG_ERROR, "Failed to get sample chunk index !\n"); + throw exception(); + } + auto followingCts = timeToSample.find(*cts.begin()); + if (followingCts == timeToSample.end()) + { + ISO_LOG(LOG_ERROR, "Failed to find the beginning CTS !\n"); + throw exception(); + } + ++followingCts; + FrameDuration frameDur; + if (followingCts != timeToSample.end()) + { + frameDur = followingCts->first.cast() - cts.begin()->cast(); + } + else + { + frameDur = FrameDuration(); + } + + if (chunkIndex != prevChunkIndex) + { + dataOffset = chunkOffsets.at(chunkIndex - 1); + prevChunkIndex = chunkIndex; + } + bool idrFrame = syncSamples.count(sampleIndex) != 0; + SampleFlags sampleFlags{}; + sampleFlags.flags.sample_is_non_sync_sample = !idrFrame; + + FrameInfo frameInfo{cts, frameDur, idrFrame, {sampleFlags.flagsAsUInt}, {}}; + dataOffset += sampleSizes[sampleIndex]; + m_acquirer->frameInfo.push_back(frameInfo); + m_acquirer->frameBlock.push_back(DataBlock(dataOffset, sampleSizes[sampleIndex])); + } +} + +AcquireTrackData::~AcquireTrackData() +{ +} + +FrameBuf AcquireTrackData::GetFramePackedData(size_t aFrameIndex) const +{ + assert(aFrameIndex < m_acquirer->frameBlock.size()); + const auto frameBlock = m_acquirer->frameBlock.at(aFrameIndex); + const auto storage = m_acquirer->mp4.GetData(frameBlock); + return FrameBuf(storage.begin(), storage.end()); +} + +Frame AcquireTrackData::GetFrame(size_t aFrameIndex) const +{ + assert(aFrameIndex < m_acquirer->frameInfo.size()); + const auto frameInfo = m_acquirer->frameInfo.at(aFrameIndex); + const auto frameBlock = m_acquirer->frameBlock.at(aFrameIndex); + const auto storage = m_acquirer->mp4.GetData(frameBlock); + return Frame{frameInfo, FrameBuf(storage.begin(), storage.end())}; +} + +Frames AcquireTrackData::GetFrames() const +{ + Frames frames; + for (size_t frameIndex = 0; frameIndex < m_acquirer->frameInfo.size(); ++frameIndex) + { + const auto frameInfo = m_acquirer->frameInfo.at(frameIndex); + auto acquire = + unique_ptr(new GetDataOfFrameFromTrack(m_acquirer, frameIndex)); + frames.push_back(FrameWrapper{move(acquire), frameInfo}); + } + return frames; +} + +TrackMeta AcquireTrackData::GetTrackMeta() const +{ + return m_trackMeta; +} + +VCD_MP4_END diff --git a/src/isolib/dash_writer/AcquireTrackData.h b/src/isolib/dash_writer/AcquireTrackData.h new file mode 100644 index 00000000..8365b192 --- /dev/null +++ b/src/isolib/dash_writer/AcquireTrackData.h @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: AcquireTrackData.h +//! \brief: AcquireTrackData class definition and related data type +//! \detail: Define track related operation and data type +//! + +#ifndef _TRACK_H_ +#define _TRACK_H_ + +#include "Frame.h" +#include "FrameWrapper.h" +#include "../include/Index.h" +#include "Fraction.h" +#include "../atoms/DecPts.h" + +using namespace std; + +VCD_MP4_BEGIN + +using PTSTime = FractS64; +using SamplePTS = std::map; + +class TrackAtom; + +class BoxBlockAccess; + +typedef Index TrackId; + +class AcquireTrackData; + +typedef list Frames; + +enum class TypeOfMedia +{ + Video, + Audio, + Data, + Other +}; + +struct BrandSpec +{ + string majorBrand; + uint32_t minorVersion; + vector compatibleBrands; +}; + +struct TrackMeta +{ + TrackId trackId; + FractU64 timescale; + TypeOfMedia type; + DataItem trackType; +}; + +class AcquireTrackData +{ +public: + AcquireTrackData(const BoxBlockAccess& mp4Accessor, const TrackAtom& trackBox, const uint32_t timeScale); + + AcquireTrackData(const AcquireTrackData& other) = delete; + AcquireTrackData(AcquireTrackData&& other); + AcquireTrackData& operator=(const AcquireTrackData& other) = delete; + AcquireTrackData& operator=(AcquireTrackData&& other) = delete; + + virtual ~AcquireTrackData(); + + Frame GetFrame(size_t aFrameIndex) const; + FrameBuf GetFramePackedData(size_t aFrameIndex) const; + + Frames GetFrames() const; + + TrackMeta GetTrackMeta() const; + +private: + struct Acquirer; + + class GetDataOfFrameFromTrack : public GetDataOfFrame + { + public: + GetDataOfFrameFromTrack(shared_ptr aAcquirer, size_t aFrameIndex); + + ~GetDataOfFrameFromTrack(); + + FrameBuf Get() const override; + size_t GetDataSize() const override; + + GetDataOfFrameFromTrack* Clone() const override; + + private: + shared_ptr m_acquirer; + size_t mFrameIndex; + }; + + friend class GetDataOfFrameFromTrack; + + shared_ptr m_acquirer; + TrackMeta m_trackMeta; +}; + +VCD_MP4_END; +#endif // _TRACK_H_ diff --git a/src/isolib/dash_writer/BoxBlockAccess.cpp b/src/isolib/dash_writer/BoxBlockAccess.cpp new file mode 100644 index 00000000..8a802be5 --- /dev/null +++ b/src/isolib/dash_writer/BoxBlockAccess.cpp @@ -0,0 +1,184 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BoxBlockAccess.cpp +//! \brief: BoxBlockAccess class implementation +//! + +#include +#include "BoxBlockAccess.h" + +using namespace std; + +VCD_MP4_BEGIN + +struct BoxInfo +{ + FourCCInt fourcc; + bool hasSubBoxes; +}; + +DataBlock::DataBlock(size_t aBlockOffset, size_t aBlockSize) + : offset(aBlockOffset) + , size(aBlockSize) +{ +} + +std::vector DataBlock::GetData(istream& aStream) const +{ + std::vector buffer(size); + aStream.seekg(streamoff(offset)); + aStream.read(reinterpret_cast(&buffer[0]), streamsize(size)); + return buffer; +} + +Stream DataBlock::GetStream(istream& aStream) const +{ + std::vector buffer(size); + aStream.seekg(streamoff(offset)); + aStream.read(reinterpret_cast(&buffer[0]), streamsize(size)); + if (!aStream) + { + ISO_LOG(LOG_ERROR, "Unexpected EOS !\n"); + throw exception(); + } + return Stream(buffer); +} + +map GenBoxInfos() +{ + const BoxInfo boxInfo = {FourCCInt("ftyp"), false}; + map constructedMap; + constructedMap.insert(make_pair(FourCCInt("ftyp"), boxInfo)); + return constructedMap; +} + +const map boxInfos = GenBoxInfos(); + +uint32_t ReadU32(istream& aStream) +{ + uint32_t v = 0; + for (int c = 0; c < 4; ++c) + { + v = (v << 8) | static_cast(aStream.get()); + } + if (!aStream.good()) + { + ISO_LOG(LOG_ERROR, "Unexpected EOS !\n"); + throw exception(); + } + return v; +} + +BoxBlock::BoxBlock(FourCCInt aFourcc, size_t aBlockOffset, size_t aBlockSize) + : DataBlock(aBlockOffset, aBlockSize) + , fourcc(aFourcc) + , indexBuilt(false) +{ +} + +BoxIndex BoxBlockAccess::GenIndex(istream& aStream, const BoxBlock* aParent) +{ + BoxIndex index; + + streamoff endLimit = 0; + if (aParent) + { + endLimit = streamoff(aParent->offset + aParent->size); + } + + size_t offset = 0; + aStream.get(); + if (aStream) + { + aStream.unget(); + } + while (aStream && (!aParent || aStream.tellg() < endLimit)) + { + size_t size = size_t(ReadU32(aStream)); + FourCCInt fourcc = FourCCInt(ReadU32(aStream)); + aStream.seekg(aStream.tellg() + streamoff(size - 8)); + + index[fourcc].push_back(BoxBlock(fourcc, offset, size)); + offset += size; + aStream.get(); + if (aStream) + { + aStream.unget(); + } + } + + aStream.clear(); + + return index; +} + +BoxBlockAccess::BoxBlockAccess(istream& aStream) + : m_stream(aStream) + , m_index(GenIndex(aStream, nullptr)) +{ +} + +BoxBlockAccess::~BoxBlockAccess() +{ +} + +BoxBlock BoxBlockAccess::GetBoxBlock(FourCCInt aFourcc) const +{ + map>::const_iterator iter = m_index.find(aFourcc); + if (iter == m_index.end()) + { + ISO_LOG(LOG_ERROR, "Couldn't find Boxes List!\n"); + throw exception(); + } + + list boxList = iter->second; + if (boxList.size() == 0) + { + ISO_LOG(LOG_ERROR, "Couldn't find Box !\n"); + throw exception(); + } + + return *((iter->second).begin()); +} + +std::vector BoxBlockAccess::GetData(const DataBlock& aBlock) const +{ + return aBlock.GetData(m_stream); +} + +Stream BoxBlockAccess::GetStream(const DataBlock& aBlock) const +{ + return aBlock.GetStream(m_stream); +} + +Stream BoxBlockAccess::GetStream(FourCCInt aFourcc) const +{ + return GetBoxBlock(aFourcc).GetStream(m_stream); +} + +VCD_MP4_END diff --git a/src/isolib/dash_writer/BoxBlockAccess.h b/src/isolib/dash_writer/BoxBlockAccess.h new file mode 100644 index 00000000..8fffff78 --- /dev/null +++ b/src/isolib/dash_writer/BoxBlockAccess.h @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BoxBlockAccess.h +//! \brief: BoxBlockAccess class definition +//! \detail: Define the mp4 file access operation, +//! + +#ifndef _MP4ACCES_H_ +#define _MP4ACCES_H_ + +#include +#include +#include +#include +#include +#include "Stream.h" +#include "FourCCInt.h" +#include "FormAllocator.h" + +using namespace std; + +VCD_MP4_BEGIN + +struct DataBlock +{ + DataBlock() + : offset(0) + , size(0) + { + } + DataBlock(size_t aBlockOffset, size_t aBlockSize); + + size_t offset; + size_t size; + + std::vector GetData(istream& aStream) const; + Stream GetStream(istream& aStream) const; +}; + +struct BoxBlock; +typedef map> BoxIndex; + +struct BoxBlock : public DataBlock +{ + BoxBlock(FourCCInt aFourcc, size_t aBlockOffset, size_t aBlockSize); + + FourCCInt fourcc; + + bool operator<(const BoxBlock& other) + { + return fourcc < other.fourcc + ? true + : fourcc > other.fourcc + ? false + : offset < other.offset + ? true + : offset > other.offset + ? false + : size < other.size ? true + : size > other.size ? false : false; + } + + bool indexBuilt; + BoxIndex index; +}; + +class BoxBlockAccess +{ +public: + BoxBlockAccess(istream& aStream); + + BoxBlock GetBoxBlock(FourCCInt aFourcc) const; + std::vector GetData(const DataBlock& aBlock) const; + Stream GetStream(const DataBlock& aBlock) const; + Stream GetStream(FourCCInt aFourcc) const; + template + void ParseBoxBlock(BoxType& aBox) const; + + virtual ~BoxBlockAccess(); + +private: + static BoxIndex GenIndex(istream& aStream, const BoxBlock* aParent); + + istream& m_stream; + BoxIndex m_index; +}; + +template +void BoxBlockAccess::ParseBoxBlock(BoxType& aBox) const +{ + Stream bs = GetStream(aBox.getType()); + aBox.parseBox(bs); +} + +VCD_MP4_END; +#endif // _MP4ACCESS_H_ diff --git a/src/isolib/dash_writer/BoxWrapper.h b/src/isolib/dash_writer/BoxWrapper.h new file mode 100644 index 00000000..b894ef6b --- /dev/null +++ b/src/isolib/dash_writer/BoxWrapper.h @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: BoxWrapper.h +//! \brief: Upper layer box definition +//! \detail: Define upper layer box structures +//! + +#ifndef _BOXDEF_H_ +#define _BOXDEF_H_ + +#include + +#include "FormAllocator.h" +#include "TypeAtom.h" +#include "HandlerAtom.h" +#include "MediaHeaderAtom.h" +#include "MovieAtom.h" +#include "ProjRelatedAtom.h" +#include "TrackHeaderAtom.h" + +#include "SegmentWriter.h" + +using namespace std; + +VCD_MP4_BEGIN + +struct FileTypeBoxWrapper +{ + UniquePtr fileTypeBox; + FileTypeBoxWrapper(UniquePtr&& aFileTypeBox) + : fileTypeBox(move(aFileTypeBox)) + { + } +}; + +struct MovieBoxWrapper +{ + UniquePtr movieBox; + MovieBoxWrapper(UniquePtr&& aMovieBox) + : movieBox(move(aMovieBox)) + { + } +}; + +struct MediaHeaderBoxWrapper +{ + UniquePtr mediaHeaderBox; + MediaHeaderBoxWrapper(UniquePtr&& aMediaHeaderBox) + : mediaHeaderBox(move(aMediaHeaderBox)) + { + } +}; + +struct HandlerBoxWrapper +{ + UniquePtr handlerBox; + HandlerBoxWrapper(UniquePtr&& aHandlerBox) + : handlerBox(move(aHandlerBox)) + { + } +}; + +struct TrackHeaderBoxWrapper +{ + UniquePtr trackHeaderBox; + TrackHeaderBoxWrapper(UniquePtr&& aTrackHeaderBox) + : trackHeaderBox(move(aTrackHeaderBox)) + { + } +}; + +struct SampleEntryBoxWrapper +{ + UniquePtr sampleEntryBox; + SampleEntryBoxWrapper(UniquePtr&& aSampleEntryBox) + : sampleEntryBox(move(aSampleEntryBox)) + { + } +}; + +struct RegionBlock +{ + UniquePtr region; + RegionBlock(UniquePtr&& aRegion) + : region(move(aRegion)) + { + } +}; + +VCD_MP4_END; +#endif // _BOXDEF_H_ diff --git a/src/isolib/dash_writer/CMakeLists.txt b/src/isolib/dash_writer/CMakeLists.txt new file mode 100644 index 00000000..65d75661 --- /dev/null +++ b/src/isolib/dash_writer/CMakeLists.txt @@ -0,0 +1,37 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) + +option(USE_ANDROID_NDK + "Use android ndk" + OFF +) + +PROJECT(dashwriter) + +AUX_SOURCE_DIRECTORY(../atoms DIR_ATOMS_SRCS) +AUX_SOURCE_DIRECTORY(../common DIR_COMMON_SRCS) +AUX_SOURCE_DIRECTORY(. DIR_DASHWRITER_SRCS) + +if(NOT USE_ANDROID_NDK) +ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -z noexecstack -z relro -z now -fstack-protector-strong + -fPIE -fPIC -pie -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wl,-S -Wall -Werror") +else() +ADD_DEFINITIONS("-g -c -fPIC -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 + -fPIE -fPIC -O2 -D_FORTIFY_SOURCE=2 -Wformat + -Wformat-security -Wall") +endif() + +INCLUDE_DIRECTORIES(/usr/local/include ../include ../atoms ../common) + +LINK_DIRECTORIES(/usr/local/lib ../atoms ../common) + +set(DIR_DASHWRITER_SRCS + ${DIR_DASHWRITER_SRCS} + ${DIR_ATOMS_SRCS} + ${DIR_COMMON_SRCS} + ) + +ADD_LIBRARY(dashwriter STATIC ${DIR_DASHWRITER_SRCS}) + +TARGET_LINK_LIBRARIES(dashwriter glog) diff --git a/src/isolib/dash_writer/DataItem.h b/src/isolib/dash_writer/DataItem.h new file mode 100644 index 00000000..921effcf --- /dev/null +++ b/src/isolib/dash_writer/DataItem.h @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DataItem.h +//! \brief: DataItem class definition +//! \detail: DataItem class is the basic operation class +//! + +#ifndef _DATAITEM_H_ +#define _DATAITEM_H_ + +#include +#include "../include/Common.h" + +using namespace std; + +VCD_MP4_BEGIN + +class EmptyData +{ +}; + +extern EmptyData none; + +template +class DataItem +{ +public: + DataItem(); + DataItem(EmptyData); + DataItem(const DataItem&); + DataItem(DataItem&&); + + DataItem(const T&); + DataItem(T&&); + + DataItem& operator=(const DataItem&); + DataItem& operator=(DataItem&&); + + ~DataItem(); + + bool operator==(const DataItem& other) const; + bool operator!=(const DataItem& other) const; + bool operator<=(const DataItem& other) const; + bool operator>=(const DataItem& other) const; + bool operator<(const DataItem& other) const; + bool operator>(const DataItem& other) const; + + explicit operator bool() const; + + T& operator*(); + const T& operator*() const; + T& get(); + const T& get() const; + T* operator->(); + const T* operator->() const; + +private: + unique_ptr m_value; +}; + +template +DataItem GenDataItem(const T& value); + +template +DataItem GenDataItem(T&& value); + +template +First CoalesceData(First value, Rest... rest); + +#include "DataItem.icc" + +VCD_MP4_END; +#endif diff --git a/src/isolib/dash_writer/DataItem.icc b/src/isolib/dash_writer/DataItem.icc new file mode 100644 index 00000000..37337294 --- /dev/null +++ b/src/isolib/dash_writer/DataItem.icc @@ -0,0 +1,229 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: DataItem.icc +//! \brief: DataItem class implementation +//! + +#ifndef _DATAITEM_ICC_ +#define _DATAITEM_ICC_ + +#include "DataItem.h" + +using namespace std; + +//VCD_MP4_BEGIN + +//NoneType none; + +template +DataItem::DataItem() +{ + // nothing +} + +template +DataItem::DataItem(EmptyData) +{ + // nothing +} + +template +DataItem::~DataItem() +{ + // nothing +} + +template +bool DataItem::operator==(const DataItem& other) const +{ + bool a = !!m_value.get(); + bool b = !!other.m_value.get(); + return (!a && !b) || (a && b && *m_value == *other.m_value); +} + +template +bool DataItem::operator!=(const DataItem& other) const +{ + return !(*this == other); +} + +template +bool DataItem::operator<(const DataItem& other) const +{ + bool a = !!m_value.get(); + bool b = !!other.m_value.get(); + return (a < b) ? true : (a > b) ? false : (!a && !b) ? false : (*m_value < *other.m_value); +} + +template +bool DataItem::operator>(const DataItem& other) const +{ + bool a = !!m_value.get(); + bool b = !!other.m_value.get(); + return (a < b) ? false : (a > b) ? true : (!a && !b) ? false : (*m_value > *other.m_value); +} + +template +bool DataItem::operator<=(const DataItem& other) const +{ + bool a = !!m_value.get(); + bool b = !!other.m_value.get(); + return (a < b) ? true : (a > b) ? false : (!a && !b) ? true : (*m_value <= *other.m_value); +} + +template +bool DataItem::operator>=(const DataItem& other) const +{ + bool a = !!m_value.get(); + bool b = !!other.m_value.get(); + return (a < b) ? false : (a > b) ? true : (!a && !b) ? true : (*m_value >= *other.m_value); +} + +template +DataItem::DataItem(const DataItem& other) + : m_value(other.m_value ? new T(*other.m_value) : nullptr) +{ + // nothing +} + +template +DataItem::DataItem(DataItem&& other) + : m_value(move(other.m_value)) +{ + // nothing +} + +template +DataItem& DataItem::operator=(const DataItem& other) +{ + if (this != &other) + { + m_value.reset(other.m_value ? new T(*other.m_value) : nullptr); + } + return *this; +} + +template +DataItem& DataItem::operator=(DataItem&& other) +{ + if (this != &other) + { + m_value = move(other.m_value); + } + return *this; +} + +template +DataItem::DataItem(const T& other) +{ + m_value.reset(new T(other)); +} + +template +DataItem::DataItem(T&& other) +{ + m_value.reset(new T(move(other))); +} + +template +DataItem::operator bool() const +{ + return !!m_value.get(); +} + +template +T& DataItem::operator*() +{ + return *m_value; +} + +template +const T& DataItem::operator*() const +{ + return *m_value; +} + +template +T& DataItem::get() +{ + return *m_value; +} + +template +const T& DataItem::get() const +{ + return *m_value; +} + +template +T* DataItem::operator->() +{ + return m_value.get(); +} + +template +const T* DataItem::operator->() const +{ + return m_value.get(); +} + +template +DataItem GenDataItem(const T& value) +{ + return DataItem(value); +} + +template +DataItem GenDataItem(T&& value) +{ + return DataItem(move(value)); +} + +// base case +template +First CoalesceData(First value) +{ + return value; +} + +// general case +template +First CoalesceData(First value, Rest... rest) +{ + if (value) + { + return value; + } + else + { + return CoalesceData(rest...); + } +} + +//VCD_MP4_END +#endif diff --git a/src/isolib/dash_writer/Fraction.h b/src/isolib/dash_writer/Fraction.h new file mode 100644 index 00000000..655cc4a7 --- /dev/null +++ b/src/isolib/dash_writer/Fraction.h @@ -0,0 +1,351 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Fraction.h +//! \brief: Fraction class definition +//! \detail: Define the basic class to represent rational number +//! + +#ifndef _FRACTION_H_ +#define _FRACTION_H_ + +#include +#include +#include +#include +#include +#include "../include/Common.h" + +using namespace std; + +VCD_MP4_BEGIN + +template +T GetGCD(T first, T second) +{ + T third; + while (first) + { + third = first; + first = second % first; + second = third; + } + + return second; +} + +template +T GetLCM(T first, T second) +{ + return first / GetGCD(first, second) * second; +} + +struct InvalidFraction +{ +}; + +template +struct Fraction +{ +public: + typedef T value; + + Fraction(); + Fraction(T aNum, T aDen); + Fraction(InvalidFraction); + ~Fraction(); + + Fraction GetMinimum() const; + Fraction per1() const; + + template + U cast() const; + + double asDouble() const; + + T m_num, m_den; +}; + +typedef Fraction FractU64; +typedef Fraction FractS64; + +template +Fraction::Fraction() + : m_num(0) + , m_den(1) +{ +} + +#if defined(_MSC_VER) +#pragma warning(push) +#pragma warning(disable : 4146) +#endif + +template +Fraction::Fraction(T aNum, T aDen) + : m_num(aNum) + , m_den(aDen) +{ + if (m_den < 0) + { + m_den = -m_den; + m_num = -m_num; + } + assert(m_den != 0); +} + +#if defined(_MSC_VER) +#pragma warning(pop) +#endif + +template +template +U Fraction::cast() const +{ + return U(static_cast(m_num), static_cast(m_den)); +} + +template +Fraction::Fraction(InvalidFraction) + : m_num(0) + , m_den(0) +{ +} + +template +Fraction::~Fraction() +{ +} + +template +double Fraction::asDouble() const +{ + return double(m_num) / m_den; +} + +template +void shareDenominator(Fraction& x, Fraction& y) +{ + if (x.m_den != y.m_den) + { + T cm = GetLCM(x.m_den, y.m_den); + T mulX = cm / x.m_den; + T mulY = cm / y.m_den; + + x.m_num *= mulX; + x.m_den *= mulX; + + y.m_num *= mulY; + y.m_den *= mulY; + assert(x.m_den == y.m_den); + } +} + +template +void shareDenominators(Iterator begin, Iterator end) +{ + if (distance(begin, end) >= 2) + { + Iterator it = begin; + auto cm = (**it).m_den; + ++it; + for (; it != end; ++it) + { + cm = GetLCM(cm, (**it).m_den); + } + + for (it = begin; it != end; ++it) + { + auto mul = cm / (**it).m_den; + (**it).m_num *= mul; + (**it).m_den *= mul; + } + } +} + +template +Fraction& operator+=(Fraction& self, Fraction other) +{ + shareDenominator(self, other); + self.m_num += other.m_num; + return self; +} + +template +Fraction& operator-=(Fraction& self, Fraction other) +{ + shareDenominator(self, other); + self.m_num -= other.m_num; + return self; +} + +template +bool operator==(Fraction x, Fraction y) +{ + if (x.m_den == y.m_den) + { + return x.m_num == y.m_num; + } + else + { + shareDenominator(x, y); + return x.m_num == y.m_num; + } +} + +template +bool operator<=(Fraction x, Fraction y) +{ + if (x.m_den == y.m_den) + { + return x.m_num <= y.m_num; + } + else + { + shareDenominator(x, y); + return x.m_num <= y.m_num; + } +} + +template +bool operator>=(Fraction x, Fraction y) +{ + if (x.m_den == y.m_den) + { + return x.m_num >= y.m_num; + } + else + { + shareDenominator(x, y); + return x.m_num >= y.m_num; + } +} + +template +bool operator!=(Fraction x, Fraction y) +{ + if (x.m_den == y.m_den) + { + return x.m_num != y.m_num; + } + else + { + shareDenominator(x, y); + return x.m_num != y.m_num; + } +} + +template +bool operator<(Fraction x, Fraction y) +{ + if (x.m_den == y.m_den) + { + return x.m_num < y.m_num; + } + else + { + shareDenominator(x, y); + return x.m_num < y.m_num; + } +} + +template +bool operator>(Fraction x, Fraction y) +{ + if (x.m_den == y.m_den) + { + return x.m_num > y.m_num; + } + else + { + shareDenominator(x, y); + return x.m_num > y.m_num; + } +} + +template +Fraction operator*(Fraction x, Fraction y) +{ + return Fraction(x.m_num * y.m_num, x.m_den * y.m_den).GetMinimum(); +} + +template +Fraction operator/(Fraction x, Fraction y) +{ + return x * y.per1().GetMinimum(); +} + +template +Fraction operator+(Fraction x, Fraction y) +{ + shareDenominator(x, y); + return Fraction(x.m_num + y.m_num, x.m_den).GetMinimum(); +} + +template +Fraction operator-(Fraction x, Fraction y) +{ + shareDenominator(x, y); + return Fraction(x.m_num - y.m_num, x.m_den).GetMinimum(); +} + +template +Fraction operator-(Fraction x) +{ + return Fraction(-x.m_num, x.m_den); +} + +template +Fraction Fraction::GetMinimum() const +{ + Fraction r(*this); + if (r.m_num == 0) + { + return Fraction(0, 1); + } + else + { + T cd = GetGCD(m_num, m_den); + return Fraction(m_num / cd, m_den / cd); + } +} + +template +Fraction Fraction::per1() const +{ + return Fraction(m_den, m_num); +} + +template +ostream& operator<<(ostream& stream, Fraction x) +{ + return stream << x.m_num << "/" << x.m_den; +} + +VCD_MP4_END; + +#endif // _FRACTION_H_ diff --git a/src/isolib/dash_writer/Frame.h b/src/isolib/dash_writer/Frame.h new file mode 100644 index 00000000..fe005ef3 --- /dev/null +++ b/src/isolib/dash_writer/Frame.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Frame.h +//! \brief: Frame related information definition +//! \detail: Define related information about frame, like +//! frame rate, frame CTS and so on. +//! + +#ifndef _FRAME_H_ +#define _FRAME_H_ + +#include +#include +#include +#include +#include + +#include "../include/Common.h" +#include "DataItem.h" +#include "Fraction.h" + +using namespace std; + +VCD_MP4_BEGIN + +typedef vector FrameBuf; + +typedef FractS64 FrameTime; +typedef FractU64 FrameDuration; +typedef FractU64 FrameRate; +typedef list FrameCts; +typedef FrameTime FrameDts; + +struct FrameInfo +{ + FrameCts cts; + FrameDuration duration; + bool isIDR; + FlagsOfSample sampleFlags; + + DataItem dts; + + FrameInfo() = default; +}; + +struct Frame +{ + FrameInfo frameInfo; + FrameBuf frameBuf; +}; + +VCD_MP4_END; +#endif // _FRAME_H_ diff --git a/src/isolib/dash_writer/FrameWrapper.cpp b/src/isolib/dash_writer/FrameWrapper.cpp new file mode 100644 index 00000000..4c263883 --- /dev/null +++ b/src/isolib/dash_writer/FrameWrapper.cpp @@ -0,0 +1,114 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FrameWrapper.cpp +//! \brief: FrameWrapper class implementation +//! + +#include "FrameWrapper.h" +#include "AcquireTrackData.h" + +using namespace std; + +VCD_MP4_BEGIN + +GetDataOfFrame::GetDataOfFrame() +{ +} + +GetDataOfFrame::~GetDataOfFrame() +{ +} + +FrameWrapper::FrameWrapper(unique_ptr&& aAcquire, FrameInfo aFrameInfo) + : m_acquire(move(aAcquire)) + , m_frameInfo(aFrameInfo) +{ +} + +FrameWrapper::FrameWrapper(const FrameWrapper& aOther) + : m_acquire(aOther.m_acquire->Clone()) + , m_frameInfo(aOther.m_frameInfo) +{ +} + +FrameWrapper::FrameWrapper(FrameWrapper&& aOther) + : m_acquire(move(aOther.m_acquire)) + , m_frameInfo(move(aOther.m_frameInfo)) +{ + assert(aOther.mValid); + aOther.mValid = false; +} + +FrameWrapper& FrameWrapper::operator=(const FrameWrapper& aOther) +{ + m_acquire.reset(aOther.m_acquire->Clone()); + m_frameInfo = aOther.m_frameInfo; + return *this; +} + +FrameWrapper& FrameWrapper::operator=(FrameWrapper&& aOther) +{ + assert(aOther.mValid); + m_acquire = move(aOther.m_acquire); + m_frameInfo = move(aOther.m_frameInfo); + aOther.mValid = false; + return *this; +} + +FrameWrapper::~FrameWrapper() +{ +} + +Frame FrameWrapper::operator*() const +{ + assert(mValid); + return {m_frameInfo, m_acquire->Get()}; +} + +unique_ptr FrameWrapper::operator->() const +{ + assert(mValid); + return unique_ptr{new Frame{m_frameInfo, m_acquire->Get()}}; +} + +size_t FrameWrapper::GetSize() const +{ + return m_acquire->GetDataSize(); +} + +FrameInfo FrameWrapper::GetFrameInfo() const +{ + return m_frameInfo; +} + +void FrameWrapper::SetFrameInfo(const FrameInfo& aFrameInfo) +{ + m_frameInfo = aFrameInfo; +} + +VCD_MP4_END diff --git a/src/isolib/dash_writer/FrameWrapper.h b/src/isolib/dash_writer/FrameWrapper.h new file mode 100644 index 00000000..7875a7f6 --- /dev/null +++ b/src/isolib/dash_writer/FrameWrapper.h @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: FrameWrapper.h +//! \brief: Frame proxy class definition +//! \detail: Define the class to wrap frame +//! + +#ifndef _FRAMEWRAPPER_H_ +#define _FRAMEWRAPPER_H_ + +#include +#include "Frame.h" + +using namespace std; + +VCD_MP4_BEGIN + +class GetDataOfFrame +{ +public: + GetDataOfFrame(); + virtual ~GetDataOfFrame(); + + GetDataOfFrame(const GetDataOfFrame& other) = delete; + GetDataOfFrame& operator=(const GetDataOfFrame&) = delete; + + virtual size_t GetDataSize() const = 0; + virtual FrameBuf Get() const = 0; + + virtual GetDataOfFrame* Clone() const = 0; +}; + +class FrameWrapper +{ +public: + FrameWrapper(unique_ptr&& aAcquire, FrameInfo aFrameInfo); + FrameWrapper(const FrameWrapper& aOther); + FrameWrapper(FrameWrapper&& aOther); + FrameWrapper& operator=(const FrameWrapper& aOther); + FrameWrapper& operator=(FrameWrapper&& aOther); + ~FrameWrapper(); + Frame operator*() const; + unique_ptr operator->() const; + FrameInfo GetFrameInfo() const; + void SetFrameInfo(const FrameInfo& aFrameInfo); + size_t GetSize() const; + +private: + unique_ptr m_acquire; + FrameInfo m_frameInfo; + + bool mValid = true; +}; + +VCD_MP4_END; +#endif // _FRAMEWRAPPER_H_ diff --git a/src/isolib/dash_writer/SegmentWriter.cpp b/src/isolib/dash_writer/SegmentWriter.cpp new file mode 100644 index 00000000..0bd36d00 --- /dev/null +++ b/src/isolib/dash_writer/SegmentWriter.cpp @@ -0,0 +1,1457 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SegmentWriter.cpp +//! \brief: Writer related operation implementation +//! + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "Frame.h" +#include "AvcConfigAtom.h" +#include "Utils.h" + +#include "BoxBlockAccess.h" + +#include "AvcSampEntry.h" +#include "TypeAtom.h" +#include "HevcConfigAtom.h" +#include "HevcSampEntry.h" +#include "InitViewOrientationSampEntry.h" +#include "MediaDataAtom.h" +#include "MovieAtom.h" +#include "MovieFragAtom.h" +#include "Mp4AudSampEntryAtom.h" +#include "SegIndexAtom.h" +#include "UriMetaSampEntryAtom.h" +#include "UserDataAtom.h" + +#include "AcquireTrackData.h" +#include "BoxWrapper.h" +#include "SegmentWriter.h" + +using namespace std; + +VCD_MP4_BEGIN + +using TimeInterval = pair; + +typedef vector TrackIds; + +struct Mp4MoofInfo +{ + TrackInfo trackInfo; + int32_t moofToDataOffset; +}; + +typedef map Mp4MoofInfos; + +void WriteMoof(ostream& outStr, + const TrackIds& trackIndex, + const Segment& oneSeg, + const Mp4MoofInfos& moofInfos, + const map& framesMap) +{ + std::vector sampleDefaults; + for (auto trackId : trackIndex) + { + sampleDefaults.push_back(SampleDefaults{ + trackId.GetIndex(), + 1, + 0, + 0, + {0} + }); + } + MovieFragmentAtom moof(sampleDefaults); + moof.GetMovieFragmentHeaderAtom().SetSequenceNumber(oneSeg.sequenceId.GetIndex()); + for (auto trackId : trackIndex) + { + if (moofInfos.find(trackId) == moofInfos.end()) + { + ISO_LOG(LOG_ERROR, "Failed to find moof info for designated track !\n"); + throw exception(); + } + const auto& segmentMoofInfo = moofInfos.find(trackId)->second; + const auto& trackMeta = segmentMoofInfo.trackInfo.trackMeta; + auto traf = MakeUnique(sampleDefaults); + auto trun = MakeUnique( + uint8_t(1), 0u | TrackRunAtom::TrackRunFlags::pSampleDuration | + TrackRunAtom::TrackRunFlags::pSampleSize | + TrackRunAtom::TrackRunFlags::pSampleCompTimeOffsets | + TrackRunAtom::TrackRunFlags::pSampleFlags); + auto tfdt = + MakeUnique(); + + tfdt->SetBaseMediaDecodeTime( + uint64_t((segmentMoofInfo.trackInfo.tBegin.cast() / trackMeta.timescale).asDouble())); + assert(segmentMoofInfo.trackInfo.tBegin >= FrameTime(0, 1)); + traf->GetTrackFragmentHeaderAtom().SetTrackId(trackMeta.trackId.GetIndex()); + traf->GetTrackFragmentHeaderAtom().SetFlags(0 | TrackFragmentHeaderAtom::IsBaseMoof); + + trun->SetDataOffset(segmentMoofInfo.moofToDataOffset); + + FrameTime time = segmentMoofInfo.trackInfo.tBegin; + + if (framesMap.find(trackId) == framesMap.end()) + { + ISO_LOG(LOG_ERROR, "Can't find frame with designated trackId !\n"); + throw exception(); + } + + for (const auto& frame : framesMap.find(trackId)->second) + { + uint64_t frameSize = frame.GetSize(); + FrameInfo frameInfo = frame.GetFrameInfo(); + + TrackRunAtom::SampleDetails s; + s.version1.pDuration = + uint32_t((frameInfo.duration / trackMeta.timescale).asDouble()); + s.version1.pSize = uint32_t(frameSize); + s.version1.pFlags = {frameInfo.sampleFlags.flagsAsUInt}; + s.version1.pCompTimeOffset = int32_t( + ((frameInfo.cts.front() - time) / trackMeta.timescale.cast()).asDouble()); + trun->AddSampleDetails(s); + + time += frameInfo.duration.cast(); + } + trun->SetSampleNum((uint32_t) framesMap.find(trackId)->second.size()); + + traf->SetTrackFragmentDecodeTimeAtom(move(tfdt)); + traf->AddTrackRunAtom(move(trun)); + moof.AddTrackFragmentAtom(move(traf)); + } + + Stream bs; + moof.ToStream(bs); + auto data = bs.GetStorage(); + outStr.write(reinterpret_cast(&data[0]), streamsize(data.size())); +} + +void FlushStream(Stream& inBS, ostream& outStr) +{ + auto data = inBS.GetStorage(); + outStr.write(reinterpret_cast(&data[0]), streamsize(data.size())); + inBS.Clear(); +} + +void CopyAtom(const Atom& srcAtom, Atom& dstAtom) +{ + Stream bs; + const_cast(srcAtom).ToStream(bs); + bs.Reset(); + dstAtom.FromStream(bs); +} + +template +UniquePtr CloneAtom(const T& srcAtom) +{ + UniquePtr dstAtom(new T); + Stream bs; + const_cast(srcAtom).ToStream(bs); + bs.Reset(); + dstAtom->FromStream(bs); + return dstAtom; +} + +void FillTrackHeaderAtom(TrackHeaderAtom& theaAtom, const FileInfo& fileInfo) +{ + uint64_t creationTime = fileInfo.creationTime; + uint64_t modificationTime = fileInfo.modificationTime; + FractU64 duration = fileInfo.duration.GetMinimum(); + + theaAtom.SetFlags(1 | 3); + theaAtom.SetCreationTime(creationTime); + theaAtom.SetModificationTime(modificationTime); + theaAtom.SetDuration(uint32_t(duration.m_num)); +} + +void SetReferredTracks(TrackReferenceAtom& trefAtom, + const map>& trackReferences) +{ + for (auto& tref : trackReferences) + { + VectorT trackIds; + for (auto& track : tref.second) + { + trackIds.push_back(track.GetIndex()); + } + TrackReferenceTypeAtom trefTypeAtom(FourCCInt{std::string{tref.first.begin(), tref.first.end()}}); + trefTypeAtom.SetTrackIds(trackIds); + trefAtom.AddAtom(trefTypeAtom); + } +} + +InitialSegment::InitialSegment() = default; + +InitialSegment::InitialSegment(const InitialSegment& initSeg) +{ + moov = NULL; + if (initSeg.moov) + { + moov.reset(new MovieBoxWrapper{CloneAtom(*initSeg.moov->movieBox)}); + } + + ftyp = NULL; + if (initSeg.ftyp) + { + ftyp.reset(new FileTypeBoxWrapper{CloneAtom(*initSeg.ftyp->fileTypeBox)}); + } +} + +InitialSegment::~InitialSegment() = default; + +InitialSegment& InitialSegment::operator=(const InitialSegment& initSeg) +{ + if (initSeg.moov) + { + moov.reset(new MovieBoxWrapper{CloneAtom(*initSeg.moov->movieBox)}); + } + else + { + moov.reset(); + } + return *this; +} + +InitialSegment& InitialSegment::operator=(InitialSegment&& initSeg) +{ + moov = move(initSeg.moov); + return *this; +} + +void WriteSegmentHeader(ostream& outStr) +{ + SegmentTypeAtom stypAtom; + Stream tempBS; + + stypAtom.SetMajorBrand("msdh"); + stypAtom.AddCompatibleBrand("msdh"); + stypAtom.AddCompatibleBrand("msix"); + stypAtom.ToStream(tempBS); + + FlushStream(tempBS, outStr); +} + +void WriteSampleData(ostream& outStr, const Segment& oneSeg) +{ + TrackIds trackIds = Keys(oneSeg.tracks); + map frameMap; + + map::const_iterator iter = oneSeg.tracks.begin(); + for ( ; iter != oneSeg.tracks.end(); iter++) + { + frameMap.insert(make_pair(iter->first, iter->second.frames)); + } + + auto moofOffset = outStr.tellp(); + Mp4MoofInfos segMoofInfos; + + vector::iterator iter1 = trackIds.begin(); + for ( ; iter1 != trackIds.end(); iter1++) + { + Mp4MoofInfo moofInfo = {oneSeg.tracks.find(*iter1)->second.trackInfo, 0}; + segMoofInfos.insert(make_pair(*iter1, move(moofInfo))); + } + + WriteMoof(outStr, trackIds, oneSeg, segMoofInfos, frameMap); + + vector mdatHrd; + mdatHrd.push_back(0); + mdatHrd.push_back(0); + mdatHrd.push_back(0); + mdatHrd.push_back(0); + mdatHrd.push_back(uint8_t('m')); + mdatHrd.push_back(uint8_t('d')); + mdatHrd.push_back(uint8_t('a')); + mdatHrd.push_back(uint8_t('t')); + auto mdatOffset = outStr.tellp(); + outStr.write(reinterpret_cast(&mdatHrd[0]), streamsize(mdatHrd.size())); + uint64_t mdatSize = mdatHrd.size(); + vector::iterator iter2 = trackIds.begin(); + for ( ; iter2 != trackIds.end(); iter2++) + { + Mp4MoofInfo moofInfo = {oneSeg.tracks.find(*iter2)->second.trackInfo, + int32_t(outStr.tellp() - streamoff(moofOffset))}; + segMoofInfos[*iter2] = move(moofInfo); + if (frameMap.find(*iter2) == frameMap.end()) + { + ISO_LOG(LOG_ERROR, "Failed to find frame with designated track Id !\n"); + throw exception(); + } + for (const auto& frame : frameMap.find(*iter2)->second) + { + const auto& frameData = *frame; + const auto& data = frameData.frameBuf; + outStr.write(reinterpret_cast(&data[0]), streamsize(data.size())); + mdatSize += data.size(); + } + } + streampos afterMdat = outStr.tellp(); + + mdatHrd[0] = uint8_t((mdatSize >> 24) & 0xff); + mdatHrd[1] = uint8_t((mdatSize >> 16) & 0xff); + mdatHrd[2] = uint8_t((mdatSize >> 8) & 0xff); + mdatHrd[3] = uint8_t((mdatSize >> 0) & 0xff); + outStr.seekp(mdatOffset); + outStr.write(reinterpret_cast(&mdatHrd[0]), streamsize(4)); + + outStr.seekp(moofOffset); + WriteMoof(outStr, trackIds, oneSeg, segMoofInfos, frameMap); + outStr.seekp(afterMdat); +} + +void WriteInitSegment(ostream& outStr, const InitialSegment& initSegment) +{ + Stream stream; + + initSegment.ftyp->fileTypeBox->ToStream(stream); + + initSegment.moov->movieBox->ToStream(stream); + + auto data = stream.GetStorage(); + outStr.write(reinterpret_cast(&data[0]), streamsize(data.size())); +} + +TrackDescription::TrackDescription() = default; +TrackDescription::TrackDescription(TrackDescription&& other) + : trackMeta(move(other.trackMeta)) + , sampleEntryBoxes(move(other.sampleEntryBoxes)) + , mediaHeaderBox(move(other.mediaHeaderBox)) + , handlerBox(move(other.handlerBox)) + , trackHeaderBox(move(other.trackHeaderBox)) + , trackReferences(move(other.trackReferences)) + , alternateGroup(move(other.alternateGroup)) +{ +} + +TrackDescription::TrackDescription(TrackMeta inTrackMeta, + list> smpEtyBoxList, + unique_ptr&& mheaBox, + unique_ptr&& hdlrBox, + unique_ptr&& theaBox) + : trackMeta(inTrackMeta) + , sampleEntryBoxes(move(smpEtyBoxList)) + , mediaHeaderBox(move(mheaBox)) + , handlerBox(move(hdlrBox)) + , trackHeaderBox(move(theaBox)) +{ +} + +TrackDescription::~TrackDescription() = default; + +OmniSampleEntry::OmniSampleEntry() = default; +OmniSampleEntry::~OmniSampleEntry() = default; + +uint8_t RwpkRectRegion::packingType() const +{ + return 0; +} + +unique_ptr +RwpkRectRegion::GenRegion() const +{ + auto outReg = MakeUnique(); + outReg->packingType = (RegionWisePackingAtom::PackingType) packingType(); + outReg->rectangularPacking = MakeUnique(); + outReg->rectangularPacking->projRegWidth = projRegWidth; + outReg->rectangularPacking->projRegHeight = projRegHeight; + outReg->rectangularPacking->projRegTop = projRegTop; + outReg->rectangularPacking->projRegLeft = projRegLeft; + outReg->rectangularPacking->packedRegWidth = packedRegWidth; + outReg->rectangularPacking->packedRegHeight = packedRegHeight; + outReg->rectangularPacking->packedRegTop = packedRegTop; + outReg->rectangularPacking->packedRegLeft = packedRegLeft; + outReg->rectangularPacking->transformType = transformType; + + outReg->guardBandFlag = false; + if (rwpkGuardBand) + { + outReg->guardBandFlag = true; + outReg->rectangularPacking->gbNotUsedForPredFlag = rwpkGuardBand->gbNotUsedForPredFlag; + outReg->rectangularPacking->topGbHeight = rwpkGuardBand->topGbHeight; + outReg->rectangularPacking->leftGbWidth = rwpkGuardBand->leftGbWidth; + outReg->rectangularPacking->bottomGbHeight = rwpkGuardBand->bottomGbHeight; + outReg->rectangularPacking->rightGbWidth = rwpkGuardBand->rightGbWidth; + outReg->rectangularPacking->gbType0 = rwpkGuardBand->gbType0; + outReg->rectangularPacking->gbType1 = rwpkGuardBand->gbType1; + outReg->rectangularPacking->gbType2 = rwpkGuardBand->gbType2; + outReg->rectangularPacking->gbType3 = rwpkGuardBand->gbType3; + } + + return MakeUnique( + StaticCast(move(outReg))); +} + +uint32_t VideoSampleEntry::GetWidthFP() const +{ + uint32_t fixedPntW = uint32_t(width) << 16; + return fixedPntW; +} + +uint32_t VideoSampleEntry::GetHeightFP() const +{ + uint32_t fixedPntH = uint32_t(height) << 16; + return fixedPntH; +} + +void VideoSampleEntry::GenPovdBoxes(unique_ptr& box) const +{ + if (projFmt) + { + auto rinfAtom = MakeUnique(); + + rinfAtom->SetOriginalFormat(box->sampleEntryBox->GetType()); + + auto schemeType = MakeUnique(); + schemeType->SetSchemeType("podv"); + rinfAtom->AddSchemeTypeAtom(move(schemeType)); + + auto povdAtom = MakeUnique(); + + povdAtom->GetProjectionFormatAtom().SetProjectFormat( + (ProjectionFormatAtom::ProjectFormat) *projFmt); + + if (rwpk) + { + auto rwpkAtom = MakeUnique(); + rwpkAtom->SetProjPictureWidth(rwpk->projPicWidth); + rwpkAtom->SetProjPictureHeight(rwpk->projPicHeight); + rwpkAtom->SetPackedPictureWidth(rwpk->packedPicWidth); + rwpkAtom->SetPackedPictureHeight(rwpk->packedPicHeight); + rwpkAtom->SetConstituentPictureMatchingFlag(rwpk->constituenPicMatching); + + for (auto& region : rwpk->regions) + { + rwpkAtom->AddRegion(move(region->GenRegion()->region)); + } + + povdAtom->SetRegionWisePackingAtom(move(rwpkAtom)); + } + + if (covi) + { + auto coviAtom = MakeUnique(); + coviAtom->SetCoverageShapeMode((CoverageInformationAtom::CoverageShapeMode) covi->coverageShape); + coviAtom->SetViewIdcPresenceFlag(covi->viewIdcPresenceFlag); + coviAtom->SetDefaultViewIdc((ViewMode) covi->defaultViewIdc); + + for (auto& region : covi->sphereRegions) + { + auto sphereRegion = MakeUnique(); + sphereRegion->viewIdc = (ViewMode) region->viewIdc; + sphereRegion->region.centreAzimuth = region->centAzimuth; + sphereRegion->region.centreElevation = region->centElevation; + sphereRegion->region.centreTilt = region->centTilt; + sphereRegion->region.azimuthRange = region->azimuthRange; + sphereRegion->region.elevationRange = region->elevationRange; + sphereRegion->region.interpolate = region->interpolate; + + coviAtom->AddSphereRegion(move(sphereRegion)); + } + + povdAtom->SetCoverageInformationAtom(move(coviAtom)); + } + + for (auto& compatibleScheme : compatibleSchemes) + { + auto compatSchemeAtom = MakeUnique(); + compatSchemeAtom->SetSchemeType(FourCCInt(compatibleScheme.type.c_str())); + compatSchemeAtom->SetSchemeVersion(compatibleScheme.version); + compatSchemeAtom->SetSchemeUri(compatibleScheme.uri.c_str()); + rinfAtom->AddCompatibleSchemeTypeAtom(move(compatSchemeAtom)); + } + + if (stvi) + { + auto stviAtom = MakeUnique(); + stviAtom->SetStereoScheme(StereoVideoAtom::SchemeSpec::POVD); + StereoVideoAtom::StereoIndicationType stereoIndicationType; + stereoIndicationType.typePOVD.useQuincunxSampling = 0; + stereoIndicationType.typePOVD.compositionType = (StereoVideoAtom::POVDFrameCompType) *stvi; + stviAtom->SetStereoIndicationType(stereoIndicationType); + rinfAtom->AddStereoVideoAtom(move(stviAtom)); + } + + if (rotn) + { + auto rotnAtom = MakeUnique(); + rotnAtom->SetRotation({rotn->yaw, rotn->pitch, rotn->roll}); + povdAtom->SetRotationAtom(move(rotnAtom)); + } + + rinfAtom->AddProjectedOmniVideoAtom(move(povdAtom)); + + box->sampleEntryBox->SetType("resv"); + box->sampleEntryBox->AddRestrictedSchemeInfoAtom(move(rinfAtom)); + } +} + +unique_ptr AvcVideoSampleEntry::GenHandlerBox() const +{ + auto handlerBox = MakeUnique(MakeUnique()); + handlerBox->handlerBox->SetHandlerType("vide"); + handlerBox->handlerBox->SetName("VideoHandler"); + return handlerBox; +} + +unique_ptr AvcVideoSampleEntry::GenSampleEntryBox() const +{ + auto box = MakeUnique(); + + box->SetDataReferenceIndex(1); + + AvcConfigurationAtom& cfg = box->GetAvcConfigurationAtom(); + + AvcDecoderConfigurationRecord decCfg = cfg.GetConfiguration(); + + if (decCfg.ConfigSPS({sps.begin(), sps.end()})) + { + decCfg.AddNalUnit({sps.begin(), sps.end()}, AvcNalDefs::SPS); + decCfg.AddNalUnit({pps.begin(), pps.end()}, AvcNalDefs::PPS); + cfg.SetConfiguration(decCfg); + box->SetWidth(width); + box->SetHeight(height); + + auto wrappedSampleEntry = + MakeUnique(StaticCast(move(box))); + GenPovdBoxes(wrappedSampleEntry); + return wrappedSampleEntry; + } + else + { + return NULL; + } +} + +unique_ptr HevcVideoSampleEntry::GenHandlerBox() const +{ + auto handlerBox = MakeUnique(MakeUnique()); + handlerBox->handlerBox->SetHandlerType("vide"); + handlerBox->handlerBox->SetName("VideoHandler"); + return handlerBox; +} + +unique_ptr HevcVideoSampleEntry::GenSampleEntryBox() const +{ + auto box = MakeUnique(); + + HevcConfigurationAtom& cfg = box->GetHevcConfigurationAtom(); + + box->SetDataReferenceIndex(1); + + HevcDecoderConfigurationRecord decCfg = cfg.GetConfiguration(); + + decCfg.ConfigSPS({sps.begin(), sps.end()}, frameRate); + uint8_t arrayCompleteness = 1; + decCfg.AddNalUnit({vps.begin(), vps.end()}, HevcNalDefs::VPS, arrayCompleteness); + decCfg.AddNalUnit({sps.begin(), sps.end()}, HevcNalDefs::SPS, arrayCompleteness); + decCfg.AddNalUnit({pps.begin(), pps.end()}, HevcNalDefs::PPS, arrayCompleteness); + cfg.SetConfiguration(decCfg); + box->SetWidth(width); + box->SetHeight(height); + + box->SetType(sampleEntryType.item); + + auto wrappedSampleEntry = + MakeUnique(StaticCast(move(box))); + GenPovdBoxes(wrappedSampleEntry); + return wrappedSampleEntry; +} + +FrameBuf HevcExtractorSampleConstructor::GenFrameData() const +{ + Stream sampleStream; + + sampleStream.Write8(trackId); + sampleStream.Write8(sampleOffset); + + sampleStream.Write32(dataOffset); + sampleStream.Write32(dataLength); + + return FrameBuf(sampleStream.GetStorage().begin(), sampleStream.GetStorage().end()); +} + +FrameBuf HevcExtractorInlineConstructor::GenFrameData() const +{ + FrameBuf frmBuf; + + frmBuf.push_back(inlineData.size()); + frmBuf.insert(frmBuf.end(), inlineData.begin(), inlineData.end()); + return frmBuf; +} + +FrameBuf HevcExtractor::GenFrameData() const +{ + FrameBuf frmBuf; + + if (!sampleConstructor && !inlineConstructor) + { + ISO_LOG(LOG_ERROR, "Both of sample constructor and inline constructor are NULL !\n"); + throw exception(); + } + if (!!inlineConstructor) + { + frmBuf.push_back(2); + auto inlineBuf = inlineConstructor->GenFrameData(); + frmBuf.insert(frmBuf.end(), inlineBuf.begin(), inlineBuf.end()); + } + if (!!sampleConstructor) + { + frmBuf.push_back(0); + auto sampBuf = sampleConstructor->GenFrameData(); + frmBuf.insert(frmBuf.end(), sampBuf.begin(), sampBuf.end()); + } + + return frmBuf; +} + +FrameBuf HevcExtractorTrackPackedData::GenFrameData() const +{ + FrameBuf frmBuf; + + frmBuf.push_back(0); + frmBuf.push_back(0); + frmBuf.push_back(0); + frmBuf.push_back(0); + + const uint16_t forbiddenZero = (0 << 15) & 0b1000000000000000; + const uint16_t nalUnitType = (49 << 9) & 0b0111111000000000; + const uint16_t nuhLayerId = (0 << 3) & 0b0000000111111000; + const uint16_t nuhTemporalId = (nuhTemporalIdPlus1 << 0) & 0b0000000000000111; + + uint16_t naluHrd = forbiddenZero | nalUnitType | nuhLayerId | nuhTemporalId; + + frmBuf.push_back(naluHrd >> 8); + frmBuf.push_back(naluHrd & 0xff); + + vector::const_iterator iter = samples.begin(); + for ( ; iter != samples.end(); iter++) + { + auto genedFrmBuf = iter->GenFrameData(); + frmBuf.insert(frmBuf.end(), genedFrmBuf.begin(), genedFrmBuf.end()); + } + + uint32_t bufSize = frmBuf.size() - 4; + frmBuf[0] = (bufSize >> 24) & 0xff; + frmBuf[1] = (bufSize >> 16) & 0xff; + frmBuf[2] = (bufSize >> 8) & 0xff; + frmBuf[3] = (bufSize >> 0) & 0xff; + + return frmBuf; +} + +unique_ptr MP4AudioSampleEntry::GenSampleEntryBox() const +{ + ISO_LOG(LOG_INFO, "Gen sample entry box for MP4AudioSampleEntry \n"); + auto audioAtom = MakeUnique(); + ElementaryStreamDescriptorAtom& esdAtom = audioAtom->GetESDAtom(); + ElementaryStreamDescriptorAtom::ES_Params esParams{}; + audioAtom->SetSampleSize(sizeOfSample); + audioAtom->SetChannelCount(cntOfChannels); + audioAtom->SetSampleRate(rateOfSample); + audioAtom->SetDataReferenceIndex(1); + esParams.descrFlag = 3; + esParams.flags = esIdOfDepends ? 0x80 : 0; + esParams.id = idOfES; + esParams.depend = esIdOfDepends; + if (strUrl.size()) + { + esParams.URLlen = static_cast(strUrl.size()); + esParams.URL = {strUrl.begin(), strUrl.end()}; + } + + esParams.decConfig.flag = 4; + esParams.decConfig.strType = 0x05; + esParams.decConfig.idc = 0x40; + esParams.decConfig.bufferSizeDB = sizeOfBuf; + esParams.decConfig.avgBitrate = avgBitrate; + esParams.decConfig.maxBitrate = maxBitrate; + esParams.decConfig.info.flag = 5; + esParams.decConfig.info.size = static_cast(decSpecificInfo.size()); + esParams.decConfig.info.info.resize(decSpecificInfo.size()); + for (size_t i = 0; i < decSpecificInfo.size(); ++i) + { + esParams.decConfig.info.info[i] = static_cast(decSpecificInfo[i]); + } + + esdAtom.SetESDescriptor(esParams); + + if (isNonDiegetic) + { + audioAtom->SetNonDiegeticAudioAtom(NonDiegeticAudioAtom()); + } + if (ambisonicItem) + { + const auto& cfgOfAmb = *ambisonicItem; + SpatialAudioAtom spaAudioAtom; + spaAudioAtom.SetAmbisonicType(cfgOfAmb.type); + spaAudioAtom.SetAmbisonicOrder(cfgOfAmb.order); + spaAudioAtom.SetAmbisonicChannelOrdering(cfgOfAmb.channelOrdering); + spaAudioAtom.SetAmbisonicNormalization(cfgOfAmb.normalization); + spaAudioAtom.SetChannelMap(vector{cfgOfAmb.channelMap.begin(), cfgOfAmb.channelMap.end()}); + audioAtom->SetSpatialAudioAtom(spaAudioAtom); + } + + if (chnLayoutItem) + { + const ChannelLayout& chlLayout = *chnLayoutItem; + ChannelLayoutAtom chlLayoutAtom; + + chlLayoutAtom.SetChannelNumber(cntOfChannels); + if (chlLayout.streamStructure & 1) + { + chlLayoutAtom.SetDefinedLayout(static_cast(chlLayout.layout)); + if (chlLayout.layout == 0) + { + for (ChannelPosition oneChlPosition : chlLayout.positions) + { + ChannelLayoutAtom::ChannelLayout oneLayOut{}; + oneLayOut.speakerPosition = static_cast(oneChlPosition.speakerPosition); + oneLayOut.azimuthAngle = static_cast(oneChlPosition.azimuth); + oneLayOut.elevationAngle = static_cast(oneChlPosition.elevation); + chlLayoutAtom.AddChannelLayout(oneLayOut); + } + } + else + { + uint64_t chlsMap{0}; + for (auto omitted : chlLayout.omitted) + { + chlsMap = chlsMap | (1ull << omitted); + } + chlLayoutAtom.SetOmittedChannelsMap(chlsMap); + } + } + if (chlLayout.streamStructure & 2) // object structured + { + chlLayoutAtom.SetObjectCount(static_cast(chlLayout.objectCount)); + } + + audioAtom->SetChannelLayoutAtom(chlLayoutAtom); + } + + return MakeUnique(StaticCast(move(audioAtom))); +} + +unique_ptr MP4AudioSampleEntry::GenHandlerBox() const +{ + unique_ptr handlerBox = + MakeUnique(MakeUnique()); + handlerBox->handlerBox->SetHandlerType("soun"); + handlerBox->handlerBox->SetName("SoundHandler"); + return handlerBox; +} + +uint32_t MP4AudioSampleEntry::GetWidthFP() const +{ + return 0; +} + +uint32_t MP4AudioSampleEntry::GetHeightFP() const +{ + return 0; +} + +TrackDescription::TrackDescription(TrackMeta inTrackMeta, + FileInfo inFileInfo, + const OmniSampleEntry& inSmpEty) + : trackMeta(inTrackMeta) +{ + sampleEntryBoxes.push_back(inSmpEty.GenSampleEntryBox()); + UniquePtr box = MakeUnique(); + //GenMediaHeaderAtom(box, inFileInfo, inTrackMeta.timescale); + FractU64 timeScale = inTrackMeta.timescale; + uint64_t creationTime = inFileInfo.creationTime; + uint64_t modificationTime = inFileInfo.modificationTime; + FractU64 duration = inFileInfo.duration; + box->SetTimeScale(uint32_t(timeScale.per1().asDouble())); + box->SetCreationTime(creationTime); + box->SetModificationTime(modificationTime); + box->SetDuration(uint32_t((duration / timeScale).asDouble())); + mediaHeaderBox = move(MakeUnique(move(box)));//GenMediaHeaderAtom(inFileInfo, inTrackMeta.timescale); + + handlerBox = move(inSmpEty.GenHandlerBox()); + + UniquePtr thead = MakeUnique(); + trackHeaderBox = move(MakeUnique(move(thead))); + trackHeaderBox->trackHeaderBox->SetTrackID(inTrackMeta.trackId.GetIndex()); + trackHeaderBox->trackHeaderBox->SetWidth(inSmpEty.GetWidthFP()); + trackHeaderBox->trackHeaderBox->SetHeight(inSmpEty.GetHeightFP()); + FillTrackHeaderAtom(*trackHeaderBox->trackHeaderBox, inFileInfo); +} + +void FillMovieHeaderAtom(MovieHeaderAtom& mheaAtom, + const MovieDescription& inMovieDes, + FractU64 timeScale) +{ + uint64_t creationTime = inMovieDes.creationTime; + uint64_t modificationTime = inMovieDes.modificationTime; + FractU64 duration = inMovieDes.duration; + mheaAtom.SetTimeScale(uint32_t(timeScale.per1().asDouble())); + mheaAtom.SetCreationTime(creationTime); + mheaAtom.SetModificationTime(modificationTime); + mheaAtom.SetDuration(uint32_t((duration / timeScale).asDouble())); +} + +void UpdateMediaInfoAtom(MediaInformationAtom& minfAtom, const TrackDescription& inTrackDes) +{ + switch (inTrackDes.trackMeta.type) + { + case TypeOfMedia::Audio: + { + minfAtom.SetMediaType(MediaInformationAtom::MediaType::Sound); + break; + } + case TypeOfMedia::Video: + { + minfAtom.SetMediaType(MediaInformationAtom::MediaType::Video); + break; + } + default: + minfAtom.SetMediaType(MediaInformationAtom::MediaType::Null); + } +} + +InitialSegment GenInitSegment(const TrackDescriptionsMap& inTrackDes, + const MovieDescription& inMovieDes, + const bool isFraged) +{ + InitialSegment initSeg; + + vector tscalesVec; + vector tscalesRefVec; + map::const_iterator iter1 = inTrackDes.begin(); + for ( ; iter1 != inTrackDes.end(); iter1++) + { + tscalesVec.push_back(iter1->second.trackMeta.timescale); + } + vector::iterator iter2 = tscalesVec.begin(); + for ( ; iter2 != tscalesVec.end(); iter2++) + { + tscalesRefVec.push_back(&(*iter2)); + } + + shareDenominators(tscalesRefVec.begin(), tscalesRefVec.end()); + sort(tscalesVec.begin(), tscalesVec.end()); + + auto moovAtom = MakeUnique(); + + UniquePtr movieExtendsBoxOut(new MovieExtendsAtom()); + auto ftypAtom = MakeUnique(); + if (inMovieDes.fileType) + { + ftypAtom->SetMajorBrand(inMovieDes.fileType->majorBrand.c_str()); + ftypAtom->SetMinorVersion(inMovieDes.fileType->minorVersion); + for (auto& compatBrand : inMovieDes.fileType->compatibleBrands) + { + ftypAtom->AddCompatibleBrand(compatBrand.c_str()); + } + } + else + { + ftypAtom->SetMajorBrand("isom"); + ftypAtom->SetMinorVersion(512); + ftypAtom->AddCompatibleBrand("isom"); + ftypAtom->AddCompatibleBrand("iso2"); + ftypAtom->AddCompatibleBrand("mp41"); + ftypAtom->AddCompatibleBrand("mp42"); + } + + FillMovieHeaderAtom(moovAtom->GetMovieHeaderAtom(), inMovieDes, {1, 1000}); + + TrackId maxTrackId; + if (inTrackDes.size()) + { + maxTrackId = inTrackDes.begin()->first; + } + + map::const_iterator iter3 = inTrackDes.begin(); + for ( ; iter3 != inTrackDes.end(); iter3++) + { + const TrackId trackId = iter3->first; + const TrackDescription& trackDes = iter3->second; + UniquePtr trackAtom = MakeUnique(); + MediaAtom& mediaAtom = trackAtom->GetMediaAtom(); + MediaInformationAtom& mediaInfoAtom = mediaAtom.GetMediaInformationAtom(); + SampleTableAtom& stblAtom = mediaInfoAtom.GetSampleTableAtom(); + maxTrackId = max(maxTrackId, trackId); + + UpdateMediaInfoAtom(mediaInfoAtom, trackDes); + + CopyAtom(*trackDes.trackHeaderBox->trackHeaderBox, trackAtom->GetTrackHeaderAtom()); + + CopyAtom(*trackDes.mediaHeaderBox->mediaHeaderBox, mediaAtom.GetMediaHeaderAtom()); + + CopyAtom(*trackDes.handlerBox->handlerBox, mediaAtom.GetHandlerAtom()); + + if (trackDes.trackMeta.trackType) + { + trackAtom->SetHasTrackTypeAtom(true); + trackAtom->GetTrackTypeAtom().SetMajorBrand(trackDes.trackMeta.trackType->majorBrand.c_str()); + trackAtom->GetTrackTypeAtom().SetMinorVersion(trackDes.trackMeta.trackType->minorVersion); + for (const auto& brand : trackDes.trackMeta.trackType->compatibleBrands) + { + trackAtom->GetTrackTypeAtom().AddCompatibleBrand(brand.c_str()); + } + } + + if (trackDes.trackReferences.size()) + { + trackAtom->SetHasTrackReferences(true); + SetReferredTracks(trackAtom->GetTrackReferenceAtom(), trackDes.trackReferences); + } + + if (trackDes.trackMeta.type == TypeOfMedia::Audio) + { + trackAtom->GetTrackHeaderAtom().SetVolume(0x0100); + } + + if (trackDes.alternateGroup) + { + trackAtom->GetTrackHeaderAtom().SetAlternateGroup(*trackDes.alternateGroup); + } + + if (trackDes.vrType) + { + SphericalVideoV1Atom::GeneralMetaData data{}; + + data.isSpherical = true; + data.isStitched = true; + data.stitchedSW = ""; + data.projectionFormat = SphericalVideoV1Atom::ProjectFormat::ERP; + + switch (*trackDes.vrType) + { + case OmniMediaType::OMNI_Mono: + { + data.stereoType = SphericalVideoV1Atom::StereoTypeV1::MONO_TYPE; + } + break; + case OmniMediaType::OMNI_StereoLR: + { + data.stereoType = SphericalVideoV1Atom::StereoTypeV1::STEREO_LEFT_RIGHT; + } + break; + case OmniMediaType::OMNI_StereoTB: + { + data.stereoType = SphericalVideoV1Atom::StereoTypeV1::STEREO_TOP_BOTTOM; + } + break; + } + trackAtom->GetSphericalVideoV1Atom().SetGeneralMetaData(data); + trackAtom->SetHasSphericalVideoV1Atom(true); + } + + shared_ptr dataEntryBox(new DataEntryUrlAtom(DataEntryUrlAtom::Contained)); + mediaInfoAtom.GetDataInformationAtom().AddDataEntryAtom(dataEntryBox); + + SampleDescriptionAtom& sampleDescriptionBox = stblAtom.GetSampleDescriptionAtom(); + for (auto& sampleEntryBox : trackDes.sampleEntryBoxes) + { + UniquePtr sampleEntryCopy(sampleEntryBox->sampleEntryBox->Clone()); + sampleDescriptionBox.AddSampleEntry(move(sampleEntryCopy)); + } + + UniquePtr trackExtendsBoxOut(new TrackExtendsAtom()); + SampleDefaults sampleDefaults = { + trackDes.trackMeta.trackId.GetIndex(), + 1, + 0, + 0, + {0} + }; + trackExtendsBoxOut->SetFragmentSampleDefaults(sampleDefaults); + movieExtendsBoxOut->AddTrackExtendsAtom(move(trackExtendsBoxOut)); + moovAtom->AddTrackAtom(move(trackAtom)); + } + moovAtom->GetMovieHeaderAtom().SetNextTrackID(maxTrackId.GetIndex() + 1); + + if (isFraged) + { + moovAtom->AddMovieExtendsAtom(move(movieExtendsBoxOut)); + } + + initSeg.moov.reset(new MovieBoxWrapper{move(moovAtom)}); + initSeg.ftyp.reset(new FileTypeBoxWrapper{move(ftypAtom)}); + + return initSeg; +} + +TimeInterval GetFrameTimeInterval(const list& frameList) +{ + FrameTime time0; + FrameTime time1; + + bool first = true; + for (auto& frame : frameList) + { + for (const auto& cts : frame.GetFrameInfo().cts) + { + auto frameT0 = cts; + auto frameT1 = cts + frame.GetFrameInfo().duration.cast(); + + if (first || frameT0 < time0) + { + time0 = frameT0; + } + if (first || frameT1 > time1) + { + time1 = frameT1; + } + first = false; + } + } + + assert(!first); + return {time0, time1}; +} + +FrameTime GetDtsCtsInterval(const list& frameList) +{ + FrameTime delta; + for (auto& frame : frameList) + { + const FrameInfo& info = frame.GetFrameInfo(); + if (info.dts && info.cts.size()) + { + delta = max(delta, *info.dts - info.cts.front()); + } + } + return delta; +} + +TimeInterval GetCtsInterval(const list& frameList) +{ + FrameTime time0; + FrameTime time1; + + bool first = true; + for (auto& frame : frameList) + { + const FrameInfo& info = frame.GetFrameInfo(); + if (info.cts.size()) + { + for (const auto& cts : info.cts) + { + auto frameT0 = cts; + auto frameT1 = cts + info.duration.cast(); + + if (first || frameT0 < time0) + { + time0 = frameT0; + } + if (first || frameT1 > time1) + { + time1 = frameT1; + } + first = false; + } + } + else + { + if (auto dts = info.dts) + { + auto frameT0 = *dts; + auto frameT1 = *dts + info.duration.cast(); + if (first || frameT0 < time0) + { + time0 = frameT0; + } + if (first || frameT1 > time1) + { + time1 = frameT1; + } + first = false; + } + } + } + + assert(!first); + return {time0, time1}; +} + +TimeInterval ExtendInterval(TimeInterval timeFirst, TimeInterval timeSecond) +{ + return {min(timeFirst.first, timeSecond.first), max(timeFirst.second, timeSecond.second)}; +} + +void SegmentWriter::Impl::TrackState::FeedEOS() +{ + if (!isEnd) + { + isEnd = true; + if (frames.size()) + { + SubSegment subsegmentful{move(frames)}; + SubSegments.push_back(subsegmentful); + } + FullSubSegments.push_back(move(SubSegments)); + subSegDur = {0, 1}; + segDur = {0, 1}; + } +} + +void SegmentWriter::Impl::TrackState::FeedOneFrame(FrameWrapper oneFrame) +{ + assert(!isEnd); + + if (hasSubSeg && oneFrame.GetFrameInfo().isIDR) + { + hasSubSeg = false; + subSegDur -= + CoalesceData(imple->m_config.subsegmentDuration, imple->m_config.segmentDuration)->cast(); + if (insertSubSegNum >= imple->m_config.skipSubsegments) + { + SubSegment subsegmentful{move(frames)}; + SubSegments.push_back(subsegmentful); + insertSubSegNum = 0; + } + else + { + ++insertSubSegNum; + } + + while (segDur >= imple->m_config.segmentDuration.cast()) + { + FullSubSegments.push_back(move(SubSegments)); + segDur -= imple->m_config.segmentDuration.cast(); + } + } + + frames.push_back(oneFrame); + segDur += oneFrame.GetFrameInfo().duration.cast(); + subSegDur += oneFrame.GetFrameInfo().duration.cast(); + + hasSubSeg = + hasSubSeg || + subSegDur >= + CoalesceData(imple->m_config.subsegmentDuration, imple->m_config.segmentDuration)->cast(); +} + +list SegmentWriter::Impl::TrackState::TakeSegment() +{ + list segmentFrames; + if (FullSubSegments.size() > 0u) + { + SubSegment subsegmentful{}; + auto completeSegment = move(FullSubSegments.front()); + FullSubSegments.pop_front(); + + for (SubSegment& subsegment : completeSegment) + { + segmentFrames.push_back(move(subsegment.frames)); + } + } + return segmentFrames; +} + +bool SegmentWriter::Impl::TrackState::IsFinished() const +{ + bool isCompleted = (isEnd && (frames.size() == 0u) + && (SubSegments.size() == 0) + && (FullSubSegments.size() == 0)); + return isCompleted; +} + +bool SegmentWriter::Impl::TrackState::IsIncomplete() const +{ + bool isFinished = (frames.size() > 0u); + return isFinished; +} + +bool SegmentWriter::Impl::TrackState::CanTakeSegment() const +{ + bool isSegReady = (FullSubSegments.size() > 0u); + return isSegReady; +} + +SegmentWriter::SegmentWriter(SegmentWriterCfg inCfg) + : m_impl(new Impl()) + , m_sidxWriter(new SidxWriter) +{ + m_impl->m_config = inCfg; +} + +SegmentWriter::~SegmentWriter() +{ +} + +void SegmentWriter::AddTrack(TrackId trackIndex, TrackMeta inTrackMeta) +{ + assert(trackIndex == inTrackMeta.trackId); + (void) trackIndex; + AddTrack(inTrackMeta); +} + +void SegmentWriter::AddTrack(TrackMeta inTrackMeta) +{ + m_impl->m_trackSte[inTrackMeta.trackId] = Impl::TrackState(m_impl.get()); + m_impl->m_trackSte[inTrackMeta.trackId].trackMeta = inTrackMeta; +} + +SegmentWriter::Action SegmentWriter::FeedEOS(TrackId trackIndex) +{ + m_impl->m_trackSte[trackIndex].FeedEOS(); + return m_impl->AllTracksReadyForSegment() ? Action::ExtractSegment : Action::KeepFeeding; +} + +bool SegmentWriter::Impl::AnyTrackIncomplete() const +{ + bool incomplete = false; + for (auto& trackIdAndTrackFrames : m_imple->m_trackSte) + { + auto& stateOfTrack = trackIdAndTrackFrames.second; + incomplete = incomplete || stateOfTrack.IsIncomplete(); + } + return incomplete; +} + +bool SegmentWriter::Impl::AllTracksReadyForSegment() const +{ + bool ready = true; + for (auto& trackIdAndTrackState : m_imple->m_trackSte) + { + auto& stateOfTrack = trackIdAndTrackState.second; + ready = ready && (stateOfTrack.isEnd || stateOfTrack.CanTakeSegment()); + } + return ready; +} + +bool SegmentWriter::Impl::AllTracksFinished() const +{ + bool ready = true; + for (auto& trackIdAndTrackState : m_imple->m_trackSte) + { + auto& stateOfTrack = trackIdAndTrackState.second; + ready = ready && stateOfTrack.IsFinished(); + } + return ready; +} + +SegmentWriter::Action SegmentWriter::FeedOneFrame(TrackId trackIndex, FrameWrapper oneFrame) +{ + assert(!m_impl->m_trackSte[trackIndex].isEnd); + + if (!m_impl->m_trackSte.count(trackIndex)) + { + ISO_LOG(LOG_ERROR, "Track is not correct !\n"); + throw exception(); + } + + m_impl->m_trackSte.at(trackIndex).FeedOneFrame(oneFrame); + + return m_impl->AllTracksReadyForSegment() ? Action::ExtractSegment : Action::KeepFeeding; +} + +list SegmentWriter::ExtractSubSegments() +{ + using SegGroup = list; + using FrmGroup = list; + SegGroup segGroup; + while (m_impl->AllTracksReadyForSegment() && !m_impl->AllTracksFinished()) + { + map trackSegMap; + map::iterator iter1 = (m_impl->m_trackSte).begin(); + for ( ; iter1 != (m_impl->m_trackSte).end(); iter1++) + { + TrackId trackId = iter1->first; + Impl::TrackState& stateOfTrack = iter1->second; + trackSegMap[trackId] = stateOfTrack.TakeSegment(); + } + + SegmentList subSegGroup; + map::iterator iter2 = trackSegMap.begin(); + for ( ; iter2 != trackSegMap.end(); iter2++) + { + TrackId trackId = iter2->first; + FrmGroup& frames = iter2->second; + list::iterator iter3 = subSegGroup.begin(); + SegmentWriter::Impl::TrackState& stateOfTrack = m_impl->m_trackSte.at(trackId); + + if (m_impl->m_isFirstSeg && frames.size()) + { + Frames firstFrmGroup = *frames.begin(); + list::iterator iter4 = firstFrmGroup.begin(); + for ( ; iter4 != firstFrmGroup.end(); iter4++) + { + FrameInfo info = iter4->GetFrameInfo(); + for (auto cts : info.cts) + { + if (info.dts) + { + stateOfTrack.trackOffset = max(stateOfTrack.trackOffset, *info.dts - cts); + } + + stateOfTrack.trackOffset = max(stateOfTrack.trackOffset, -cts); + } + } + } + + FrmGroup::iterator iter5 = frames.begin(); + for ( ; iter5 != frames.end(); iter5++) + { + if (iter3 == subSegGroup.end()) + { + subSegGroup.push_back({}); + iter3 = subSegGroup.end(); + --iter3; + } + if (iter3 == subSegGroup.end()) + { + ISO_LOG(LOG_ERROR, "Failed to get sub segment group !\n"); + throw exception(); + } + TrackOfSegment& trackOfSegment = (*iter3).tracks[trackId]; + trackOfSegment.frames = move(*iter5); + + if (stateOfTrack.trackOffset.m_num) + { + for (auto& frame : trackOfSegment.frames) + { + FrameInfo frameInfo = frame.GetFrameInfo(); + for (auto& x : frameInfo.cts) + { + x += stateOfTrack.trackOffset; + } + if (frameInfo.dts) + { + *frameInfo.dts += stateOfTrack.trackOffset; + } + frame.SetFrameInfo(frameInfo); + } + } + + ++iter3; + trackOfSegment.trackInfo.trackMeta = m_impl->m_trackSte.at(trackId).trackMeta; + auto dtsCtsOffset = GetDtsCtsInterval(trackOfSegment.frames); + if (auto dts = trackOfSegment.frames.front().GetFrameInfo().dts) + { + trackOfSegment.trackInfo.tBegin = *dts; + } + else + { + trackOfSegment.trackInfo.tBegin = GetCtsInterval(trackOfSegment.frames).first - dtsCtsOffset; + } + trackOfSegment.trackInfo.dtsCtsOffset = dtsCtsOffset; + } + } + + list::iterator iter6 = subSegGroup.begin(); + for ( ; iter6 != subSegGroup.end(); iter6++) + { + TimeInterval segTimeInterval; + InvertTrue firstSegmentSpan; + for (auto trackIdSegment : iter6->tracks) + { + TrackOfSegment& trackOfSegment = trackIdSegment.second; + + auto timeSpan = GetFrameTimeInterval(trackOfSegment.frames); + if (firstSegmentSpan()) + { + segTimeInterval = timeSpan; + } + else + { + segTimeInterval = ExtendInterval(timeSpan, segTimeInterval); + } + } + + iter6->sequenceId = m_impl->m_seqId; + iter6->tBegin = segTimeInterval.first; + iter6->duration = (segTimeInterval.second - segTimeInterval.first).cast(); + ++m_impl->m_seqId; + } + + segGroup.push_back(subSegGroup); + m_impl->m_isFirstSeg = false; + } + return segGroup; +} + +SegmentList SegmentWriter::ExtractSegments() +{ + list subsegments = ExtractSubSegments(); + SegmentList segments; + for (auto& segment : subsegments) + { + segments.insert(segments.end(), segment.begin(), segment.end()); + } + return segments; +} + +void SegmentWriter::SetWriteSegmentHeader(bool toWriteHdr) +{ + m_needWriteSegmentHeader = toWriteHdr; +} + +void SegmentWriter::WriteInitSegment(ostream& outStr, const InitialSegment& initSeg) +{ + WriteInitSegment(outStr, initSeg); +} + +void SegmentWriter::WriteSubSegments(ostream& outStr, const list subSegList) +{ + if (m_needWriteSegmentHeader) + { + WriteSegmentHeader(outStr); + } + for (auto& subsegment : subSegList) + { + m_sidxWriter->AddSubSeg(subsegment); + } + auto sidxInfo = m_sidxWriter->WriteSidx(outStr, {}); + for (auto& subsegment : subSegList) + { + auto before = outStr.tellp(); + WriteSampleData(outStr, subsegment); + auto after = outStr.tellp(); + m_sidxWriter->AddSubSegSize(after - before); + } + if (sidxInfo) + { + m_sidxWriter->WriteSidx(outStr, sidxInfo->position); + } + +} + +void SegmentWriter::WriteSegment(ostream& outStr, const Segment oneSeg) +{ + WriteSubSegments(outStr, {oneSeg}); +} + +VCD_MP4_END diff --git a/src/isolib/dash_writer/SegmentWriter.h b/src/isolib/dash_writer/SegmentWriter.h new file mode 100644 index 00000000..84eb6dbe --- /dev/null +++ b/src/isolib/dash_writer/SegmentWriter.h @@ -0,0 +1,551 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: SegmentWriter.h +//! \brief: Segmenter related API definition +//! \detail: Define segment related information, like track of segment +//! and so on. +//! + +#ifndef _SEGMENTERAPI_H_ +#define _SEGMENTERAPI_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "Frame.h" +#include "DataItem.h" +#include "AcquireTrackData.h" + +using namespace std; + +VCD_MP4_BEGIN + +class MovieHeaderAtom; + +struct TrackInfo +{ + FrameTime tBegin; + TrackMeta trackMeta; + FrameTime dtsCtsOffset; +}; + +struct TrackOfSegment +{ + TrackInfo trackInfo; + Frames frames; +}; + +typedef map TrackSegmentMap; + +typedef IndexCalculation SequenceId; + +struct Segment +{ + TrackSegmentMap tracks; + SequenceId sequenceId; + FrameTime tBegin; + FractU64 duration; +}; + +typedef list SegmentList; + +struct FramesForTrack +{ + TrackMeta trackMeta; + Frames frames; + + FramesForTrack() + { + } + + FramesForTrack(TrackMeta aTrackMeta, Frames aFrames) + : trackMeta(aTrackMeta) + , frames(aFrames) + { + } + + FramesForTrack(FramesForTrack&& aOther) + : trackMeta(move(aOther.trackMeta)) + , frames(move(aOther.frames)) + { + } + + FramesForTrack(const FramesForTrack& aOther) = default; + FramesForTrack& operator=(const FramesForTrack&) = default; +}; + +typedef vector TrackFrames; + +struct FileTypeBoxWrapper; +struct MovieBoxWrapper; +struct MediaHeaderBoxWrapper; +struct HandlerBoxWrapper; +struct TrackHeaderBoxWrapper; +struct SampleEntryBoxWrapper; +struct RegionBlock; + +struct InitialSegment +{ + InitialSegment(); + InitialSegment(const InitialSegment& initSeg); + InitialSegment& operator=(const InitialSegment& initSeg); + InitialSegment& operator=(InitialSegment&& initSeg); + ~InitialSegment(); + unique_ptr moov; + unique_ptr ftyp; +}; + +struct FileInfo +{ + uint64_t creationTime; + uint64_t modificationTime; + FractU64 duration; +}; + +struct OmniSampleEntry +{ + OmniSampleEntry(); + virtual ~OmniSampleEntry(); + virtual unique_ptr GenSampleEntryBox() const = 0; + virtual unique_ptr GenHandlerBox() const = 0; + + virtual uint32_t GetWidthFP() const = 0; + + virtual uint32_t GetHeightFP() const = 0; +}; + +enum class OmniMediaType +{ + OMNI_Mono, + OMNI_StereoLR, + OMNI_StereoTB +}; + +struct RWPKGuardBand +{ + uint8_t leftGbWidth; + uint8_t rightGbWidth; + uint8_t topGbHeight; + uint8_t bottomGbHeight; + + bool gbNotUsedForPredFlag; + + uint8_t gbType0; + uint8_t gbType1; + uint8_t gbType2; + uint8_t gbType3; +}; + +struct RWPKRegion +{ + virtual ~RWPKRegion() = default; + virtual uint8_t packingType() const = 0; + virtual unique_ptr GenRegion() const = 0; +}; + +struct RwpkRectRegion : public RWPKRegion +{ + uint8_t packingType() const override; + unique_ptr GenRegion() const override; + + uint32_t projRegWidth; + uint32_t projRegHeight; + uint32_t projRegTop; + uint32_t projRegLeft; + + uint8_t transformType; + + uint16_t packedRegWidth; + uint16_t packedRegHeight; + uint16_t packedRegTop; + uint16_t packedRegLeft; + + DataItem rwpkGuardBand; +}; + +struct RegionWisePacking +{ + bool constituenPicMatching; + uint32_t projPicWidth; + uint32_t projPicHeight; + uint16_t packedPicWidth; + uint16_t packedPicHeight; + + vector> regions; +}; + +struct CoverageInformation +{ + COVIShapeType coverageShape; + bool viewIdcPresenceFlag; + OmniViewIdc defaultViewIdc; + vector> sphereRegions; +}; + +struct Mp4SchemeType +{ + string type; + uint32_t version; + string uri; +}; + +struct VideoSampleEntry : public OmniSampleEntry +{ + uint16_t width; + uint16_t height; + + uint32_t GetWidthFP() const override; + uint32_t GetHeightFP() const override; + + DataItem projFmt; + DataItem rwpk; + DataItem covi; + DataItem stvi; + DataItem rotn; + vector compatibleSchemes; + +protected: + void GenPovdBoxes(unique_ptr& box) const; +}; + +struct AvcVideoSampleEntry : public VideoSampleEntry +{ + vector sps; + vector pps; + + unique_ptr GenSampleEntryBox() const override; + unique_ptr GenHandlerBox() const override; +}; + +struct HevcVideoSampleEntry : public VideoSampleEntry +{ + FourCC sampleEntryType = "hvc1"; + + float frameRate; + + vector sps; + vector pps; + vector vps; + + unique_ptr GenSampleEntryBox() const override; + unique_ptr GenHandlerBox() const override; +}; + +struct HevcExtractorSampleConstructor +{ + int8_t trackId; + int8_t sampleOffset; + uint32_t dataOffset; + uint32_t dataLength; + + FrameBuf GenFrameData() const; +}; + +struct HevcExtractorInlineConstructor +{ + vector inlineData; + + FrameBuf GenFrameData() const; +}; + +struct HevcExtractor +{ + DataItem sampleConstructor; + DataItem inlineConstructor; + + FrameBuf GenFrameData() const; +}; + +struct HevcExtractorTrackPackedData +{ + uint8_t nuhTemporalIdPlus1; + vector samples; + + FrameBuf GenFrameData() const; +}; + +struct Ambisonic +{ + uint8_t type; + uint32_t order; + uint8_t channelOrdering; + uint8_t normalization; + vector channelMap; +}; + +struct ChannelPosition +{ + int speakerPosition = 0; + int azimuth = 0; + int elevation = 0; + + ChannelPosition() = default; +}; + +struct ChannelLayout +{ + int streamStructure = 0; + int layout = 0; + vector positions; + set omitted; + int objectCount = 0; + + ChannelLayout() = default; +}; + +struct MP4AudioSampleEntry : public OmniSampleEntry +{ + uint16_t sizeOfSample; + uint16_t cntOfChannels; + uint32_t rateOfSample; + uint16_t idOfES; + uint16_t esIdOfDepends; + string strUrl; + uint32_t sizeOfBuf; + uint32_t maxBitrate; + uint32_t avgBitrate; + string decSpecificInfo; // tag 5 + + bool isNonDiegetic; + DataItem ambisonicItem; + DataItem chnLayoutItem; + + unique_ptr GenSampleEntryBox() const override; + unique_ptr GenHandlerBox() const override; + uint32_t GetWidthFP() const override; + uint32_t GetHeightFP() const override; + + MP4AudioSampleEntry() = default; +}; + +struct TrackDescription +{ + TrackDescription(); + TrackDescription(TrackDescription&&); + TrackDescription(TrackMeta, FileInfo, const OmniSampleEntry&); + TrackDescription(TrackMeta, + list>, + unique_ptr&&, + unique_ptr&&, + unique_ptr&&); + TrackDescription& operator=(const TrackDescription&) = default; + ~TrackDescription(); + + TrackMeta trackMeta; + list> sampleEntryBoxes; + unique_ptr mediaHeaderBox; + unique_ptr handlerBox; + unique_ptr trackHeaderBox; + map> trackReferences; + DataItem alternateGroup; + + DataItem vrType; +}; + +typedef map TrackDescriptionsMap; + +struct SegmentCfg +{ + FractU64 duration; + SequenceId baseSequenceId; +}; + +struct MovieDescription +{ + uint64_t creationTime; + uint64_t modificationTime; + FractU64 duration; + vector matrix; + DataItem fileType; +}; + +InitialSegment GenInitSegment(const TrackDescriptionsMap& inTrackDes, + const MovieDescription& inMovieDes, + const bool isFraged); + +void WriteSegmentHeader(ostream& outStr); +void WriteInitSegment(ostream& outStr, const InitialSegment& initSegment); +void WriteSampleData(ostream& outStr, const Segment& oneSeg); + +struct SegmentWriterCfg +{ + bool checkIDR = false; + FractU64 segmentDuration; + DataItem subsegmentDuration; + size_t skipSubsegments = 0; +}; + +struct SidxInfo +{ + ostream::pos_type position; + size_t size; +}; + +class SidxWriter +{ +public: + SidxWriter() = default; + ~SidxWriter() = default; + + void AddSubSeg(Segment) + { + } + + void SetFirstSubSegOffset(streampos) + { + } + + void AddSubSegSize(streampos) + { + } + + DataItem WriteSidx(ostream&, DataItem) + { + return {}; + } + + void SetOutput(ostream*) + { + } +}; + +class SegmentWriter +{ +public: + SegmentWriter(SegmentWriterCfg inCfg); + + virtual ~SegmentWriter(); + + enum class Action + { + KeepFeeding, + ExtractSegment + }; + + void AddTrack(TrackId trackIndex, TrackMeta inTrackMeta); + + void AddTrack(TrackMeta inTrackMeta); + + Action FeedOneFrame(TrackId trackIndex, FrameWrapper oneFrame); + Action FeedEOS(TrackId aTrackId); + + void SetWriteSegmentHeader(bool toWriteHdr); + void WriteInitSegment(ostream& outStr, const InitialSegment& initSegment); + void WriteSegment(ostream& outStr, const Segment oneSeg); + void WriteSubSegments(ostream& outStr, const list subSegList); + + list ExtractSubSegments(); + SegmentList ExtractSegments(); + +private: + struct Impl; + + unique_ptr m_impl; + + unique_ptr m_sidxWriter; + + bool m_needWriteSegmentHeader = true; +}; + +struct SegmentWriter::Impl +{ + struct TrackState + { + Impl* imple; + TrackMeta trackMeta; + Frames frames; + + struct SubSegment + { + Frames frames; + }; + + list SubSegments; + + list> FullSubSegments; + + FrameTime segDur; + FrameTime subSegDur; + + bool isEnd = false; + + bool hasSubSeg = false; + + size_t insertSubSegNum = 0; + + FrameTime trackOffset; + + TrackState(Impl* aImpl = nullptr) + : imple(aImpl) + { + } + + void FeedOneFrame(FrameWrapper oneFrame); + + void FeedEOS(); + + bool IsFinished() const; + + bool IsIncomplete() const; + + bool CanTakeSegment() const; + + list TakeSegment(); + }; + + bool AllTracksFinished() const; + + bool AllTracksReadyForSegment() const; + + bool AnyTrackIncomplete() const; + + Impl* const m_imple; + SegmentWriterCfg m_config; + map m_trackSte; + FrameTime m_segBeginT; + bool m_isFirstSeg = true; + + FrameTime m_offset; + + SequenceId m_seqId; + + Impl() + : m_imple(this) + { + } +}; + +VCD_MP4_END; +#endif // _SEGMENTERAPI_H_ diff --git a/src/isolib/dash_writer/Utils.h b/src/isolib/dash_writer/Utils.h new file mode 100644 index 00000000..a2863e14 --- /dev/null +++ b/src/isolib/dash_writer/Utils.h @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Utils.h +//! \brief: Basic data type utility operation +//! + +#ifndef _UTILS_H_ +#define _UTILS_H_ + +#include +#include +#include +#include +#include + +#include "Atom.h" +#include "Stream.h" +#include "AvcConfigAtom.h" + +using namespace std; + +VCD_MP4_BEGIN + +template +unique_ptr MakeUnique(Args... args) +{ + return unique_ptr(new T(forward(args)...)); +} + +template +unique_ptr StaticCast(unique_ptr&& aPtr) +{ + return unique_ptr{static_cast(aPtr.release())}; +} + +template +auto ContMapSet(Function map, const Container& container) -> set +{ + set set; + for (const auto& x : container) + { + set.insert(map(x)); + } + return set; +} + +template +auto Keys(const Container& cont) -> vector +{ + vector xs; + + for (const auto& x : cont) + { + xs.push_back(x.first); + } + + return xs; +} + +class InvertTrue +{ +public: + bool operator()() + { + if (m_first) + { + m_first = false; + return true; + } + else + { + return false; + } + } + +private: + bool m_first = true; +}; + +VCD_MP4_END; +#endif //_UTILS_H_ diff --git a/src/isolib/include/Common.h b/src/isolib/include/Common.h new file mode 100644 index 00000000..c59c183e --- /dev/null +++ b/src/isolib/include/Common.h @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Common.h +//! \brief: Include the common system and data type header files that needed +//! + +#ifndef _DASHCOMMON_H_ +#define _DASHCOMMON_H_ + +#include "../../utils/ns_def.h" +#include "../../utils/error.h" +#include "../../utils/GlogWrapper.h" +#include "../common/ISOLog.h" + +#include +#include +#include + +VCD_MP4_BEGIN + +#define MP4_BUILD_VERSION "v1.0.0" + +#define DELETE_MEMORY(x) \ + if (x) \ + { \ + delete x; \ + x = NULL; \ + } + +#define DELETE_ARRAY(x) \ + if (x) \ + { \ + delete[] x; \ + x = NULL; \ + } + +#define MEMBER_SETANDGET_FUNC_WITH_OPTION(Type, Member, MemberName, Option) \ +public: \ + void Set##MemberName(Type v) \ + { \ + Member = v; \ + } \ + Type Get##MemberName() Option \ + { \ + return Member; \ + } \ + +struct FourCC +{ + char item[5]; + inline FourCC() + : item{} + { + } + inline FourCC(uint32_t value) + { + item[0] = char((value >> 24) & 0xff); + item[1] = char((value >> 16) & 0xff); + item[2] = char((value >> 8) & 0xff); + item[3] = char((value >> 0) & 0xff); + item[4] = '\0'; + } + inline FourCC(const char* str) + { + item[0] = str[0]; + item[1] = str[1]; + item[2] = str[2]; + item[3] = str[3]; + item[4] = '\0'; + } + inline FourCC(const FourCC& fourcc) + { + item[0] = fourcc.item[0]; + item[1] = fourcc.item[1]; + item[2] = fourcc.item[2]; + item[3] = fourcc.item[3]; + item[4] = '\0'; + } + inline FourCC& operator=(const FourCC& other) + { + item[0] = other.item[0]; + item[1] = other.item[1]; + item[2] = other.item[2]; + item[3] = other.item[3]; + item[4] = '\0'; + return *this; + } + inline bool operator==(const FourCC& other) const + { + return (item[0] == other.item[0]) && (item[1] == other.item[1]) && (item[2] == other.item[2]) && + (item[3] == other.item[3]); + } + inline bool operator!=(const FourCC& other) const + { + return (item[0] != other.item[0]) || (item[1] != other.item[1]) || (item[2] != other.item[2]) || + (item[3] != other.item[3]); + } + inline bool operator<(const FourCC& other) const + { + return (item[0] < other.item[0]) + ? true + : (item[0] > other.item[0]) + ? false + : (item[1] < other.item[1]) + ? true + : (item[1] > other.item[1]) + ? false + : (item[2] < other.item[2]) + ? true + : (item[2] > other.item[2]) + ? false + : (item[3] < other.item[3]) + ? true + : (item[3] > other.item[3]) ? false : false; + } + inline bool operator<=(const FourCC& other) const + { + return *this == other || *this < other; + } + inline bool operator>=(const FourCC& other) const + { + return !(*this < other); + } + inline bool operator>(const FourCC& other) const + { + return !(*this <= other); + } +}; + +enum class OmniViewIdc : uint8_t +{ + OMNI_MONOSCOPIC = 0, + OMNI_LEFT = 1, + OMNI_RIGHT = 2, + OMNI_LEFT_AND_RIGHT = 3, + OMNI_INVALID = 0xff +}; + +enum class COVIShapeType : uint8_t +{ + FOUR_GREAT_CIRCLES = 0, + TWO_AZIMUTH_AND_TWO_ELEVATION_CIRCLES +}; + +struct COVIRegion +{ + OmniViewIdc viewIdc; + int32_t centAzimuth; + int32_t centElevation; + int32_t centTilt; + uint32_t azimuthRange; + uint32_t elevationRange; + bool interpolate; +}; + +enum class OmniProjFormat +{ + OMNI_ERP = 0, + OMNI_Cubemap, + OMNI_Planar +}; + +enum class VideoFramePackingType : uint8_t +{ + OMNI_TOPBOTTOM = 3, + OMNI_SIDEBYSIDE = 4, + OMNI_TEMPINTERLEAVING = 5, + OMNI_MONOSCOPIC = 0x8f +}; + +struct Rotation +{ + int32_t yaw; + int32_t pitch; + int32_t roll; +}; + +struct FlagTypesForSample +{ + uint32_t reserved : 4, is_leading : 2, sample_depends_on : 2, sample_is_depended_on : 2, + sample_has_redundancy : 2, sample_padding_value : 3, sample_is_non_sync_sample : 1, + sample_degradation_priority : 16; +}; + +union FlagsOfSample { + uint32_t flagsAsUInt; + FlagTypesForSample flags; +}; + +VCD_MP4_END; +#endif /* _DASHCOMMON_H_ */ diff --git a/src/isolib/include/Index.h b/src/isolib/include/Index.h new file mode 100644 index 00000000..f15ef7c2 --- /dev/null +++ b/src/isolib/include/Index.h @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +//! +//! \file: Index.h +//! \brief: Index class definition +//! \detail: Define the basic operation for index related numerical value +//! + +#ifndef _IDBASE_H_ +#define _IDBASE_H_ + +#include +#include "Common.h" + +VCD_MP4_BEGIN + +template +class Index +{ +public: + Index() + : m_index() + { + } + Index(T id) + : m_index(id) + { + } + + T GetIndex() const + { + return m_index; + } + +protected: + T m_index; +}; + +template +class IndexExplicit : public Index +{ +public: + IndexExplicit() + : Index() + { + } + explicit IndexExplicit(T id) + : Index(id) + { + } +}; + +template +class IndexCalculation : public Index +{ +public: + IndexCalculation() + : Index() + { + } + IndexCalculation(T id) + : Index(id) + { + } + + IndexCalculation& operator++(); + ; + IndexCalculation& operator--(); +}; + +template +bool operator<(Index a, Index b) +{ + return a.GetIndex() < b.GetIndex(); +} + +template +bool operator==(Index a, Index b) +{ + return a.GetIndex() == b.GetIndex(); +} + +template +bool operator!=(Index a, Index b) +{ + return a.GetIndex() != b.GetIndex(); +} + +template +bool operator>(Index a, Index b) +{ + return a.GetIndex() > b.GetIndex(); +} + +template +bool operator<=(Index a, Index b) +{ + return a.GetIndex() <= b.GetIndex(); +} + +template +bool operator>=(Index a, Index b) +{ + return a.GetIndex() >= b.GetIndex(); +} + +template +IndexCalculation operator+(IndexCalculation a, IndexCalculation b) +{ + return a.GetIndex() + b.GetIndex(); +} + +template +IndexCalculation& IndexCalculation::operator++() +{ + ++Index::m_index; + return *this; +} + +template +IndexCalculation& IndexCalculation::operator--() +{ + --Index::m_index; + return *this; +} + +template +IndexCalculation operator++(IndexCalculation& a, int) +{ + auto orig = a; + ++a; + return orig; +} + +template +IndexCalculation operator--(IndexCalculation& a, int) +{ + auto orig = a; + --a; + return orig; +} + +template +IndexCalculation operator-(IndexCalculation a, IndexCalculation b) +{ + return a.GetIndex() - b.GetIndex(); +} + +template +IndexCalculation& operator+=(IndexCalculation& a, IndexCalculation b) +{ + a = a.GetIndex() + b.GetIndex(); + return a; +} + +template +IndexCalculation& operator-=(IndexCalculation& a, IndexCalculation b) +{ + a = a.GetIndex() - b.GetIndex(); + return a; +} + +template +std::ostream& operator<<(std::ostream& stream, Index value) +{ + stream << value.GetIndex(); + return stream; +} + +VCD_MP4_END; +#endif // _IDBASE_H_ diff --git a/src/player/CMakeLists.txt b/src/player/CMakeLists.txt deleted file mode 100644 index 79b6229f..00000000 --- a/src/player/CMakeLists.txt +++ /dev/null @@ -1,57 +0,0 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) - -option(USE_OMAF - "Use omaf" - OFF -) - -option(USE_WEBRTC - "Use webrtc" - OFF -) - -PROJECT(player) - -if(USE_OMAF) -ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -fpermissive") -endif() - -if(USE_WEBRTC) -ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -fpermissive -DLOW_LATENCY_USAGE") -endif() - -INCLUDE_DIRECTORIES(/usr/local/include ../utils) - -LINK_DIRECTORIES(/usr/local/lib) - -if(USE_WEBRTC) - if(NOT DEFINED WEBRTC_LINUX_SDK) - message(SEND_ERROR "WEBRTC_LINUX_SDK is required") - endif() - - ADD_DEFINITIONS("-D_ENABLE_WEBRTC_SOURCE_ -DWEBRTC_LINUX -DWEBRTC_POSIX") - INCLUDE_DIRECTORIES("${WEBRTC_LINUX_SDK}/include") - LINK_DIRECTORIES("${WEBRTC_LINUX_SDK}/lib") -endif() - -AUX_SOURCE_DIRECTORY(. DIR_SRC) -AUX_SOURCE_DIRECTORY(../utils/ UTIL_SRC) - -set(DIR_SRC - ${DIR_SRC} - ${UTIL_SRC} - ) - -ADD_EXECUTABLE(render ${DIR_SRC}) - -if(USE_OMAF) - set(LINK_LIB ${LINK_LIB} OmafDashAccess) -endif() - -if(USE_WEBRTC) - set(LINK_LIB ${LINK_LIB} owt sioclient_tls ssl crypto boost_system dl) -endif() - -set(LINK_LIB ${LINK_LIB} avfilter avformat avcodec avdevice avutil swscale swresample va va-drm va-x11 360SCVP glfw GL GLU X11 pthread lzma z glog EGL GLESv2 dl) - -TARGET_LINK_LIBRARIES( render ${LINK_LIB} ) diff --git a/src/player/DMABufferRenderSource.cpp b/src/player/DMABufferRenderSource.cpp deleted file mode 100644 index cc521c84..00000000 --- a/src/player/DMABufferRenderSource.cpp +++ /dev/null @@ -1,330 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file DMABufferRenderSource.cpp -//! \brief Implement class for DMABufferRenderSource. -//! -#ifdef USE_DMA_BUFFER -#include "DMABufferRenderSource.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "Render2TextureMesh.h" - -#include -#include -#include -#include -#include - -void (EGLAPIENTRY *EGLImageTargetTexture2DOES)(GLenum, GLeglImageOES); -EGLImageKHR (EGLAPIENTRY *CreateImageKHR)(EGLDisplay, EGLContext,EGLenum, EGLClientBuffer, const EGLint *); - -EGLImageKHR images[2] = {EGL_NO_IMAGE_KHR, EGL_NO_IMAGE_KHR}; -extern VABufferInfo buffer_info; -extern VAImage va_image; -extern EGLDisplay pEglDisplay; //to do: move it into the class - -#define MP_ARRAY_SIZE(s) (sizeof(s) / sizeof((s)[0])) -#define ADD_ATTRIB(name, value) \ - do { \ - assert(num_attribs + 3 < MP_ARRAY_SIZE(attribs)); \ - attribs[num_attribs++] = (name); \ - attribs[num_attribs++] = (value); \ - attribs[num_attribs] = EGL_NONE; \ - } while(0) - -VCD_NS_BEGIN - -DMABufferRenderSource::DMABufferRenderSource(RenderBackend *renderBackend) -{ - //1.render to texture : vertex and texCoords assign - m_videoShaderOfR2T.Bind(); - m_meshOfR2T = new Render2TextureMesh(); - m_meshOfR2T->Create(); - uint32_t vertexAttribOfR2T = m_videoShaderOfR2T.SetAttrib("vPosition"); - uint32_t texCoordsAttribOfR2T = m_videoShaderOfR2T.SetAttrib("aTexCoord"); - m_meshOfR2T->Bind(renderBackend, vertexAttribOfR2T, texCoordsAttribOfR2T); -} - -DMABufferRenderSource::~DMABufferRenderSource() -{ - if (m_meshOfR2T) - { - delete m_meshOfR2T; - m_meshOfR2T = NULL; - } -} - -RenderStatus DMABufferRenderSource::Initialize(struct MediaSourceInfo *mediaSourceInfo) -{ - if (NULL == mediaSourceInfo) - { - return RENDER_ERROR; - } - uint32_t number = 0; - struct SourceWH packedWH; - switch (mediaSourceInfo->pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - break; - case PixelFormat::PIX_FMT_YUV420P: - break; - case PixelFormat::AV_PIX_FMT_NV12_DMA_BUFFER: - number = 2; - packedWH.width = new uint32_t[number]; - packedWH.height = new uint32_t[number]; - packedWH.width[0] = mediaSourceInfo->width; - packedWH.width[1] = packedWH.width[0] / 2; - packedWH.height[0] = mediaSourceInfo->height; - packedWH.height[1] = packedWH.height[0] / 2; - break; - default: - break; - } - SetSourceWH(packedWH); - SetSourceTextureNumber(number); - EGLImageTargetTexture2DOES = (void (*)(GLenum, GLeglImageOES))eglGetProcAddress("glEGLImageTargetTexture2DOES"); - CreateImageKHR = (void* (*)(EGLDisplay, EGLContext, EGLenum, EGLClientBuffer, const EGLint*))eglGetProcAddress("eglCreateImageKHR"); - return RENDER_STATUS_OK; -} - -RenderStatus DMABufferRenderSource::CreateRenderSource(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - if (CreateSourceTex(renderBackend) != RENDER_STATUS_OK || CreateR2TFBO(renderBackend) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - return RENDER_STATUS_OK; -} - -RenderStatus DMABufferRenderSource::CreateSourceTex(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - //1. initial r2t three textures. - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = new uint32_t[sourceTextureNumber]; - renderBackend->GenTextures(sourceTextureNumber, sourceTextureHandle); - SetSourceTextureHandle(sourceTextureHandle); - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - struct SourceWH sourceWH = GetSourceWH(); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[i]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - if (i == 1 && sourceTextureNumber == 2) //hardware decoding - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RG, sourceWH.width[i], sourceWH.height[i], 0, GL_RG, GL_UNSIGNED_BYTE, NULL); - else - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RED, sourceWH.width[i], sourceWH.height[i], 0, GL_RED, GL_UNSIGNED_BYTE, NULL); - } - - //renderBackend->BindTexture(GL_TEXTURE_2D, 0); - return RENDER_STATUS_OK; -} - -RenderStatus DMABufferRenderSource::CreateR2TFBO(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - //2.initial FBOs - uint32_t textureOfR2T; - renderBackend->GenTextures(1, &textureOfR2T); - renderBackend->BindTexture(GL_TEXTURE_2D, textureOfR2T); - SetTextureOfR2T(textureOfR2T); - struct SourceWH sourceWH = GetSourceWH(); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[0]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RGB, sourceWH.width[0], sourceWH.height[0], 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); - - uint32_t fboR2THandle; - renderBackend->GenFramebuffers(1, &fboR2THandle); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, fboR2THandle); - SetFboR2THandle(fboR2THandle); - renderBackend->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureOfR2T, 0); - - if (renderBackend->CheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) - { - printf("glCheckFramebufferStatus not complete\n"); - return RENDER_ERROR; - } - else - { - printf("glCheckFramebufferStatus complete\n"); - } - return RENDER_STATUS_OK; -} - -RenderStatus DMABufferRenderSource::UpdateR2T(RenderBackend *renderBackend, void **buffer) -{ - static int frame_number =0; - frame_number++; - if (frame_number <2) return RENDER_ERROR; - if (NULL == renderBackend || NULL == buffer) - { - return RENDER_ERROR; - } - //1. update source texture - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = GetSourceTextureHandle(); - struct SourceWH sourceWH = GetSourceWH(); - - if (buffer_info.handle == 0) - return RENDER_ERROR; - for (int n = 0; n < 2; n++) { - int attribs[20] = {EGL_NONE}; - uint32_t num_attribs = 0; - int drm_fmt; - - if (n == 0) drm_fmt = DRM_FORMAT_R8; - else if (n ==1) drm_fmt = DRM_FORMAT_GR88; - - ADD_ATTRIB(EGL_LINUX_DRM_FOURCC_EXT, drm_fmt); - if (n == 0) ADD_ATTRIB(EGL_WIDTH, 5760); //hard coded - else ADD_ATTRIB(EGL_WIDTH, (5760/2)); - if (n == 0)ADD_ATTRIB(EGL_HEIGHT, 3840); //hard coded - else ADD_ATTRIB(EGL_HEIGHT, (3840/2)); //hard coded - ADD_ATTRIB(EGL_DMA_BUF_PLANE0_FD_EXT, buffer_info.handle); - ADD_ATTRIB(EGL_DMA_BUF_PLANE0_OFFSET_EXT, va_image.offsets[n]); - ADD_ATTRIB(EGL_DMA_BUF_PLANE0_PITCH_EXT, va_image.pitches[n]); - //if(images[n] == NULL) - images[n] = CreateImageKHR(pEglDisplay,EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, attribs); - - if (!images[n]) - return RENDER_ERROR; - - } - - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - //if (buffer == NULL) break; - //if (buffer[i] == NULL) break; - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[i]); - if ( sourceTextureNumber == 1) - { - renderBackend->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, sourceWH.width[i], sourceWH.height[i], GL_RGB, GL_UNSIGNED_BYTE, buffer[i]); //use rgb data - } - else if (sourceTextureNumber == 2 && i == 1 ) - { - EGLImageTargetTexture2DOES(GL_TEXTURE_2D, images[i]); - } - else - { - EGLImageTargetTexture2DOES(GL_TEXTURE_2D, images[i]); - } - } - //2. bind source texture and r2tFBO - uint32_t fboR2THandle = GetFboR2THandle(); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, fboR2THandle); - - m_videoShaderOfR2T.Bind(); - renderBackend->BindVertexArray(renderBackend->GetR2TVAOHandle()); // check - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - } - renderBackend->Viewport(0, 0, sourceWH.width[0], sourceWH.height[0]); - renderBackend->DrawArrays(GL_TRIANGLES, 0, 6); - return RENDER_STATUS_OK; -} - -RenderStatus DMABufferRenderSource::DestroyRenderSource(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - uint32_t textureOfR2T = GetTextureOfR2T(); - if (textureOfR2T) - { - renderBackend->DeleteTextures(1, &textureOfR2T); - } - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = GetSourceTextureHandle(); - if (sourceTextureHandle) - { - renderBackend->DeleteTextures(sourceTextureNumber, sourceTextureHandle); - } - uint32_t fboR2THandle = GetFboR2THandle(); - if (fboR2THandle) - { - renderBackend->DeleteFramebuffers(1, &fboR2THandle); - } - return RENDER_STATUS_OK; -} - -VCD_NS_END -#endif /* USE_DMA_BUFFER */ \ No newline at end of file diff --git a/src/player/DashMediaSource.cpp b/src/player/DashMediaSource.cpp deleted file mode 100644 index 6caa65d6..00000000 --- a/src/player/DashMediaSource.cpp +++ /dev/null @@ -1,705 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file DashMediaSource.cpp -//! \brief Implement class for DashMediaSource. -//! -#ifndef _ENABLE_WEBRTC_SOURCE_ -#include -#include -#include -#include "RenderType.h" -#include "DashMediaSource.h" -#include "OmafDashAccessApi.h" - -#define MAX_LIST_NUMBER 30 -#define MIN_LIST_REMAIN 2 - -VCD_NS_BEGIN - -DashMediaSource::DashMediaSource() -{ - pthread_mutex_init(&m_frameMutex, NULL); - m_status = STATUS_UNKNOWN; - InitializeDashSourceData(); - m_handler = NULL; - -} - -DashMediaSource::~DashMediaSource() -{ - m_status = STATUS_STOPPED; - this->Join(); - int32_t res = pthread_mutex_destroy(&m_frameMutex); - if (res != 0) {return;} - if (!m_frameBuffer.empty()) - { - for (auto &fb : m_frameBuffer) - { - delete fb; - fb = NULL; - } - } - if (!m_rwpkList.empty()) - { - for (auto rwpk : m_rwpkList) - { - if (rwpk.rectRegionPacking != NULL) - { - delete rwpk.rectRegionPacking; - rwpk.rectRegionPacking = NULL; - } - } - } - ClearDashSourceData(); - OmafAccess_CloseMedia(m_handler); - OmafAccess_Close(m_handler); -} - -RenderStatus DashMediaSource::GetPacket(AVPacket *pkt, RegionWisePacking *rwpk) -{ - if (NULL == m_handler) - { - return RENDER_ERROR; - } - uint32_t streamID = 0; - //1. get one packet from DashStreaming lib. - DashPacket dashPkt[5]; - memset(dashPkt, 0, 5 * sizeof(DashPacket)); - int dashPktNum = 0; - static bool needHeaders = true; - if (ERROR_NONE != OmafAccess_GetPacket(m_handler, streamID, &(dashPkt[0]), &dashPktNum, (uint64_t *)&(pkt->pts), needHeaders, false))//lack of rwpk - { - return RENDER_ERROR; - } - if (NULL != dashPkt[0].buf && dashPkt[0].size && dashPktNum != 0) - { - int size = dashPkt[0].size; - if (av_new_packet(pkt, size) < 0) - { - return RENDER_ERROR; - } - memcpy(pkt->data, dashPkt[0].buf, size); - pkt->size = size; - *rwpk = *(dashPkt[0].rwpk); - free(dashPkt[0].buf); - dashPkt[0].buf = NULL; - delete dashPkt[0].rwpk; - dashPkt[0].rwpk = NULL; - if (needHeaders) - { - needHeaders = false; - } - m_mediaSourceInfo.currentFrameNum++; - LOG(INFO)<<"-=-=-Get packet number-=-=-"<= 0 && m_dashSourceData.av_frame->linesize[0]) - { - uint32_t bufferNumber = 0; - switch (m_mediaSourceInfo.pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - bufferNumber = 1; - break; - case PixelFormat::PIX_FMT_YUV420P: - bufferNumber = 3; - break; - } - for (uint32_t i = 0; i < bufferNumber; i++) - { - if (m_dashSourceData.av_frame->linesize[i] > 0) - { - int factor = i==0? 1:4; - buffer[i] = new uint8_t[(m_dashSourceData.codec_ctx->width * m_dashSourceData.codec_ctx->height)/factor]; - } - } - m_mediaSourceInfo.stride = m_dashSourceData.av_frame->linesize[0]; - isAllValid = true; - switch (m_mediaSourceInfo.pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - memcpy(buffer[0], m_dashSourceData.av_frame->data[0], m_dashSourceData.av_frame->linesize[0] * m_dashSourceData.codec_ctx->height * 3); - break; - case PixelFormat::PIX_FMT_YUV420P: - for (uint32_t j=0;jheight)/factor;i++) - { - memcpy(buffer[j] + i * (m_dashSourceData.codec_ctx->width)/factor, m_dashSourceData.av_frame->data[j] + i * m_dashSourceData.av_frame->linesize[j], (m_dashSourceData.codec_ctx->width)/factor); - } - } - break; - default: - break; - } - return DECODER_OK; - } - else - { - return FRAME_ERROR; - } -} - -DecoderStatus DashMediaSource::FlushFrames(uint8_t **buffer) -{ - int32_t ret = 0; - ret = avcodec_receive_frame(m_dashSourceData.codec_ctx, m_dashSourceData.av_frame); - if (ret < 0) - { - return FRAME_ERROR; - } - //2. transfer av_frame to buffer. - if (ret >= 0 && m_dashSourceData.av_frame->linesize[0]) - { - uint32_t bufferNumber = 0; - switch (m_mediaSourceInfo.pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - bufferNumber = 1; - break; - case PixelFormat::PIX_FMT_YUV420P: - bufferNumber = 3; - break; - } - for (uint32_t i = 0; i < bufferNumber; i++) - { - if (m_dashSourceData.av_frame->linesize[i] > 0) - { - uint32_t factor = i==0? 1:4; - buffer[i] = new uint8_t[(m_dashSourceData.codec_ctx->width * m_dashSourceData.codec_ctx->height)/factor]; - } - } - m_mediaSourceInfo.stride = m_dashSourceData.av_frame->linesize[0]; - isAllValid = true; - switch (m_mediaSourceInfo.pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - memcpy(buffer[0], m_dashSourceData.av_frame->data[0], m_dashSourceData.av_frame->linesize[0] * m_dashSourceData.codec_ctx->height * 3); - break; - case PixelFormat::PIX_FMT_YUV420P: - for (uint32_t j=0;jheight)/factor;i++) - { - memcpy(buffer[j] + i * (m_dashSourceData.codec_ctx->width)/factor, m_dashSourceData.av_frame->data[j] + i * m_dashSourceData.av_frame->linesize[j], (m_dashSourceData.codec_ctx->width)/factor); - } - } - break; - default: - break; - } - return DECODER_OK; - } - else - { - return FRAME_ERROR; - } - return DECODER_OK; -} - -RenderStatus DashMediaSource::SetRegionInfo(struct RegionInfo *regionInfo) -{ - if (NULL == regionInfo) - { - return RENDER_ERROR; - } - regionInfo->sourceNumber = m_mediaSourceInfo.sourceNumber; - regionInfo->sourceInfo = (struct SourceInfo *)malloc(sizeof(struct SourceInfo) * regionInfo->sourceNumber); - if (NULL == regionInfo->sourceInfo) - { - return RENDER_ERROR; - } - regionInfo->sourceInfo[0].sourceWidth = regionInfo->regionWisePacking->projPicWidth; - regionInfo->sourceInfo[0].sourceHeight = regionInfo->regionWisePacking->projPicHeight; - regionInfo->sourceInfo[0].tileColumnNumber = regionInfo->sourceInfo[0].sourceWidth / regionInfo->regionWisePacking->rectRegionPacking[0].projRegWidth; - regionInfo->sourceInfo[0].tileRowNumber = regionInfo->sourceInfo[0].sourceHeight / regionInfo->regionWisePacking->rectRegionPacking[0].projRegHeight; - - regionInfo->sourceInfo[1].sourceWidth = m_mediaSourceInfo.sourceWH->width[1]; - regionInfo->sourceInfo[1].sourceHeight = m_mediaSourceInfo.sourceWH->height[1]; - regionInfo->sourceInfo[1].tileColumnNumber = regionInfo->sourceInfo[1].sourceWidth / regionInfo->regionWisePacking->rectRegionPacking[regionInfo->regionWisePacking->numRegions - 1].projRegWidth; - regionInfo->sourceInfo[1].tileRowNumber = regionInfo->sourceInfo[1].sourceHeight / regionInfo->regionWisePacking->rectRegionPacking[regionInfo->regionWisePacking->numRegions - 1].projRegHeight; - - return RENDER_STATUS_OK; -} - -RenderStatus DashMediaSource::ClearDashSourceData() -{ - if (m_dashSourceData.codec_ctx) - avcodec_close(m_dashSourceData.codec_ctx); - if (m_dashSourceData.av_frame) - av_free(m_dashSourceData.av_frame); - if (m_dashSourceData.gl_frame) - av_free(m_dashSourceData.gl_frame); - InitializeDashSourceData(); - return RENDER_STATUS_OK; -} - -RenderStatus DashMediaSource::InitializeDashSourceData() -{ - m_dashSourceData.codec_ctx = NULL; - m_dashSourceData.decoder = NULL; - m_dashSourceData.av_frame = NULL; - m_dashSourceData.gl_frame = NULL; - m_dashSourceData.conv_ctx = NULL; - m_dashSourceData.fmt_ctx = NULL; - m_dashSourceData.packet = NULL; - m_dashSourceData.stream_idx = -1; - m_dashSourceData.video_stream = NULL; - return RENDER_STATUS_OK; -} - -RenderStatus DashMediaSource::Initialize(struct RenderConfig renderConfig) -{ - if (NULL == renderConfig.url) - { - return RENDER_ERROR; - } - //1.initial DashStreaming - DashStreamingClient *pCtxDashStreaming = (DashStreamingClient *)malloc(sizeof(DashStreamingClient)); - if (NULL == pCtxDashStreaming) - { - return RENDER_ERROR; - } - pCtxDashStreaming->media_url = renderConfig.url; - pCtxDashStreaming->cache_path = renderConfig.cachePath; - pCtxDashStreaming->source_type = MultiResSource; - m_handler = OmafAccess_Init(pCtxDashStreaming); - if (NULL == m_handler) - { - LOG(ERROR)<<"handler init failed!"<yaw = 0; - clientInfo.pose->pitch = 0; - clientInfo.viewPort_hFOV = renderConfig.viewportHFOV; - clientInfo.viewPort_vFOV = renderConfig.viewportVFOV; - clientInfo.viewPort_Width = renderConfig.viewportWidth; - clientInfo.viewPort_Height = renderConfig.viewportHeight; - OmafAccess_SetupHeadSetInfo(m_handler, &clientInfo); - //3.load media source - if (ERROR_NONE != OmafAccess_OpenMedia(m_handler, pCtxDashStreaming, false)) - { - LOG(ERROR)<<"Open media failed!"<width = mediaInfo->stream_info[0].width; - // m_dashSourceData.codec_ctx->height = mediaInfo->stream_info[0].height; - if (avcodec_open2(m_dashSourceData.codec_ctx, m_dashSourceData.decoder, NULL) < 0) - { - LOG(ERROR)<<"avcodec open failed!"<duration; - m_mediaSourceInfo.frameRate = dashMediaInfo->stream_info[0].framerate_num / dashMediaInfo->stream_info[0].framerate_den; - m_mediaSourceInfo.frameNum = round(float(m_mediaSourceInfo.duration) / 1000 * m_mediaSourceInfo.frameRate); - m_mediaSourceInfo.hasAudio = dashMediaInfo->streaming_type == MediaType_Audio; - m_mediaSourceInfo.width = dashMediaInfo->stream_info[0].width;//packed - m_mediaSourceInfo.height = dashMediaInfo->stream_info[0].height; - m_mediaSourceInfo.numberOfStreams = dashMediaInfo->stream_count; - // should get from DashAccess lib. - m_mediaSourceInfo.sourceWH = new SourceWH; - int32_t sourceNumber = dashMediaInfo->stream_info[0].source_number; - m_mediaSourceInfo.sourceWH->width = new uint32_t[sourceNumber]; - m_mediaSourceInfo.sourceWH->height = new uint32_t[sourceNumber]; - memset(m_mediaSourceInfo.sourceWH->width, 0, sizeof(uint32_t) * sourceNumber); - memset(m_mediaSourceInfo.sourceWH->height, 0, sizeof(uint32_t) * sourceNumber); - for (int i=0;istream_info[0].source_resolution[i].qualityRanking; - m_mediaSourceInfo.sourceWH->width[qualityRanking - 1] = dashMediaInfo->stream_info[0].source_resolution[i].width; - m_mediaSourceInfo.sourceWH->height[qualityRanking - 1] = dashMediaInfo->stream_info[0].source_resolution[i].height; - } - m_mediaSourceInfo.sourceNumber = sourceNumber; - m_mediaSourceInfo.stride = dashMediaInfo->stream_info[0].width; - m_mediaSourceInfo.projFormat = dashMediaInfo->stream_info[0].mProjFormat; - LOG(INFO)<<"------------------------------------------"<width[0]<<"x"<height[0]<mBuffer[i]; - } - *regionInfo = *p->mRegionInfo; - delete p; - p = NULL; - return RENDER_STATUS_OK; -} - -void DashMediaSource::Run() -{ - m_status = STATUS_RUNNING; - while (m_status != STATUS_STOPPED) - { - //vod && full - pthread_mutex_lock(&m_frameMutex); - if (m_sourceType == 1 && m_frameBuffer.size() >= MAX_LIST_NUMBER) - { - pthread_mutex_unlock(&m_frameMutex); - continue; - } - pthread_mutex_unlock(&m_frameMutex); - //1.get packet and decode frame from m_mediaSource - if ((m_sourceType == 1 && m_mediaSourceInfo.frameNum != m_mediaSourceInfo.currentFrameNum) || m_sourceType == 2)// vod and getpacket not over || live - { - RegionWisePacking rwpk; - if (RENDER_STATUS_OK != GetPacket(m_dashSourceData.packet, &rwpk)) - { - // av_free_packet(m_dashSourceData.packet); - continue; - } - if (rwpk.rectRegionPacking != NULL)//just for DASH Source. - { - m_rwpkList.push_back(rwpk);// keep the correct order of rwpk. - } - } - - uint8_t **buffer = new uint8_t*[4]; - struct RegionInfo *regionInfo = new struct RegionInfo; - - DecoderStatus decoderStatus = GetOneFrame(&buffer[0]); - if (FRAME_ERROR == decoderStatus) - { - // av_free_packet(m_dashSourceData.packet); - av_packet_unref(m_dashSourceData.packet); - delete []buffer; - buffer = NULL; - delete regionInfo; - regionInfo = NULL; - continue; - } - else if (PACKET_ERROR == decoderStatus) - { - if (FRAME_ERROR == FlushFrames(buffer)) // flush end - { - if (buffer != NULL) - { - delete []buffer; - buffer = NULL; - } - if (regionInfo != NULL) - { - delete regionInfo; - regionInfo = NULL; - } - m_status = STATUS_STOPPED; - break; - } - } - // av_free_packet(m_dashSourceData.packet); - av_packet_unref(m_dashSourceData.packet); - LOG(INFO)<<"rwpkList.size: "<regionWisePacking = new RegionWisePacking; - *regionInfo->regionWisePacking = m_rwpkList.front(); - m_rwpkList.erase(m_rwpkList.begin()); - } - SetRegionInfo(regionInfo); - - struct FrameInfo *frameInfo = new struct FrameInfo; - if (NULL == frameInfo) - { - DeleteBuffer(buffer); - if (regionInfo != NULL) - { - ClearRWPK(regionInfo->regionWisePacking); - delete regionInfo; - regionInfo = NULL; - } - return; - } - frameInfo->mBuffer = buffer; - frameInfo->mRegionInfo = regionInfo; - //vod && not full - int32_t res = pthread_mutex_lock(&m_frameMutex); - if (res != 0) - { - DeleteBuffer(buffer); - if (regionInfo != NULL) - { - ClearRWPK(regionInfo->regionWisePacking); - delete regionInfo; - regionInfo = NULL; - } - delete frameInfo; - frameInfo = NULL; - return; - } - if (m_sourceType == 1) - { - m_frameBuffer.push_back(frameInfo); - } - //live - else if (m_sourceType == 2) - { - //live && not full - if (m_frameBuffer.size() < MAX_LIST_NUMBER) - { - m_frameBuffer.push_back(frameInfo); - LOG(INFO)<<"=========not full========="<rectRegionPacking != NULL) - { - delete rwpk->rectRegionPacking; - rwpk->rectRegionPacking = NULL; - } - delete rwpk; - rwpk = NULL; - } -} - -RenderStatus DashMediaSource::ClearPackedData(struct FrameInfo *frameInfo) -{ - DeleteBuffer(frameInfo->mBuffer); - if (frameInfo->mRegionInfo != NULL) - { - if (frameInfo->mRegionInfo->regionWisePacking != NULL) - { - if (frameInfo->mRegionInfo->regionWisePacking->rectRegionPacking != NULL) - { - delete [] frameInfo->mRegionInfo->regionWisePacking->rectRegionPacking; - frameInfo->mRegionInfo->regionWisePacking->rectRegionPacking = NULL; - } - delete frameInfo->mRegionInfo->regionWisePacking; - frameInfo->mRegionInfo->regionWisePacking = NULL; - } - delete frameInfo->mRegionInfo; - frameInfo->mRegionInfo = NULL; - } - return RENDER_STATUS_OK; -} - -VCD_NS_END -#endif//LOW_LATENCY_USAGE diff --git a/src/player/FFmpegMediaSource.cpp b/src/player/FFmpegMediaSource.cpp deleted file mode 100644 index 1e4754c8..00000000 --- a/src/player/FFmpegMediaSource.cpp +++ /dev/null @@ -1,701 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file FFmpegMediaSource.cpp -//! \brief Implement class for FFmpegMediaSource. -//! -#ifdef USE_DMA_BUFFER -#include "FFmpegMediaSource.h" -// hard code 3x3 + 6x3 -// low reso config -#define SOURCENUMBER 2 -#define REGIONNUMBER 27 -#define FULLWIDTH 7680 -#define FULLHEIGHT 3840 -#define PACKEDWIDTH 5760 -#define PACKEDHEIGHT 3840 -#define LOWWIDTH 3840 -#define LOWHEIGHT 1920 - -#include -#include - -#include -clock_t start, stop; -double duration; - -VABufferInfo buffer_info; -VAImage va_image; - -VCD_NS_BEGIN - -FFmpegMediaSource::FFmpegMediaSource() -{ - InitializeFFmpegSourceData(); -} - -FFmpegMediaSource::~FFmpegMediaSource() -{ -} -static enum AVPixelFormat hw_pix_fmt; -static AVBufferRef *hw_device_ctx = NULL; - -static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, - const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != -1; p++) { - if (*p == hw_pix_fmt) - return *p; - } - - LOG(ERROR)<<"Failed to get HW surface format."<format == hw_pix_fmt) { - /* retrieve data from GPU to CPU */ - start = clock(); - if ((ret = av_hwframe_transfer_data(sw_frame, frame, 0)) < 0) { - LOG(ERROR)<<"av_hwframe_transfer_data error."<format, tmp_frame->width, - tmp_frame->height, 1); - buffer = (uint8_t * )av_malloc(size); - if (!buffer) { - LOG(ERROR)<<"Can not alloc buffer."<data, - (const int *)tmp_frame->linesize, (AVPixelFormat)tmp_frame->format, - tmp_frame->width, tmp_frame->height, 1); - stop = clock(); - duration = ((double)(stop - start))/CLOCKS_PER_SEC; - LOG(INFO)<<"av_image_copy_to_buffer " << duration << " seconds"<format == hw_pix_fmt) { - vaSyncSurface(display, (VASurfaceID)(long)frame->data[3]); - vaDeriveImage(display, (VASurfaceID)(long)frame->data[3], &va_image); - buffer_info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME; - vaAcquireBufferHandle(display, va_image.buf, &buffer_info); - } - -#if DUMP_FRAME //dump a frame - static int frame_count2=0; - LOG(INFO)<<"frame_count2 = "<< frame_count2 <stream_index == m_ffmpegSourceData.stream_idx) - { - ret = decode_write(m_ffmpegSourceData.codec_ctx,m_ffmpegSourceData.packet, (void **)m_ffmpegSourceData.av_frame->data); - - } - // av_free_packet(m_ffmpegSourceData.packet); - av_packet_unref(m_ffmpegSourceData.packet); - } - else if (m_mediaSourceInfo.pixFormat == PixelFormat::AV_PIX_FMT_NV12_DMA_BUFFER) //hardware decoding +dma buffer sharing - { - - LOG(INFO)<<"hw decoding + dma buffer sharing."<stream_index == m_ffmpegSourceData.stream_idx) - { - ret = decode_write2(m_ffmpegSourceData.codec_ctx,m_ffmpegSourceData.packet, (void **)m_ffmpegSourceData.av_frame->data); - - } - // av_free_packet(m_ffmpegSourceData.packet); - av_packet_unref(m_ffmpegSourceData.packet); - } - else // software decoding - { - LOG(INFO)<<"sw decoding."<stream_index == m_ffmpegSourceData.stream_idx) - { - int32_t frame_finished = 0; //should set 0 for software decoding - if (avcodec_decode_video2(m_ffmpegSourceData.codec_ctx, m_ffmpegSourceData.av_frame, &frame_finished, m_ffmpegSourceData.packet) < 0) - { - // av_free_packet(m_ffmpegSourceData.packet); - av_packet_unref(m_ffmpegSourceData.packet); - return RENDER_ERROR; - } - - /* - if (frame_finished) //render.cpp - { - // ABANDON colorSpace Conversion using SW. - // if (!m_ffmpegSourceData.conv_ctx) - // { - // m_ffmpegSourceData.conv_ctx = sws_getContext(m_ffmpegSourceData.codec_ctx->width, - // m_ffmpegSourceData.codec_ctx->height, m_ffmpegSourceData.codec_ctx->pix_fmt, - // m_ffmpegSourceData.codec_ctx->width, m_ffmpegSourceData.codec_ctx->height, AV_PIX_FMT_RGB24, - // SWS_BICUBIC, NULL, NULL, NULL); - // } - // sws_scale(m_ffmpegSourceData.conv_ctx, m_ffmpegSourceData.av_frame->data, m_ffmpegSourceData.av_frame->linesize, 0, - // m_ffmpegSourceData.codec_ctx->height, m_ffmpegSourceData.gl_frame->data, m_ffmpegSourceData.gl_frame->linesize); - } - else - { - return RENDER_ERROR; - }*/ - } - // av_free_packet(m_ffmpegSourceData.packet); - av_packet_unref(m_ffmpegSourceData.packet); - } while ((m_ffmpegSourceData.packet->stream_index != m_ffmpegSourceData.stream_idx)); - - } - - //buffer allocated. - LOG(INFO)<<"m_mediaSourceInfo.pixFormat= "<< m_mediaSourceInfo.pixFormat << "ret= "<< ret <width * m_ffmpegSourceData.codec_ctx->height * 3); - buffer[0] = m_ffmpegSourceData.av_frame->data[0]; - break; - case PixelFormat::PIX_FMT_YUV420P: - //buffer[0] = (uint8_t *)malloc(sizeof(uint8_t) * m_ffmpegSourceData.codec_ctx->width * m_ffmpegSourceData.codec_ctx->height); - buffer[0] = m_ffmpegSourceData.av_frame->data[0]; - //buffer[1] = (uint8_t *)malloc(sizeof(uint8_t) * m_ffmpegSourceData.codec_ctx->width * m_ffmpegSourceData.codec_ctx->height * 0.25); - buffer[1] = m_ffmpegSourceData.av_frame->data[1]; - //buffer[2] = (uint8_t *)malloc(sizeof(uint8_t) * m_ffmpegSourceData.codec_ctx->width * m_ffmpegSourceData.codec_ctx->height * 0.25); - buffer[2] = m_ffmpegSourceData.av_frame->data[2]; - break; - case PixelFormat::AV_PIX_FMT_NV12: - case PixelFormat::AV_PIX_FMT_NV12_DMA_BUFFER: - buffer[0] = m_ffmpegSourceData.av_frame->data[0]; - buffer[1] = m_ffmpegSourceData.av_frame->data[0]+ m_ffmpegSourceData.codec_ctx->width *m_ffmpegSourceData.codec_ctx->height; - break; - - default: - LOG(ERROR)<<"Wrong PixelFormat."<sourceNumber = SOURCENUMBER; - regionInfo->regionWisePacking = (RegionWisePacking *)malloc(sizeof(RegionWisePacking)); - if (NULL == regionInfo->regionWisePacking) - { - return RENDER_ERROR; - } - regionInfo->regionWisePacking->numRegions = REGIONNUMBER; - regionInfo->regionWisePacking->projPicWidth = FULLWIDTH; - regionInfo->regionWisePacking->projPicHeight = FULLHEIGHT; - regionInfo->regionWisePacking->packedPicWidth = PACKEDWIDTH; - regionInfo->regionWisePacking->packedPicHeight = PACKEDHEIGHT; - regionInfo->regionWisePacking->rectRegionPacking = (RectangularRegionWisePacking *)malloc(sizeof(RectangularRegionWisePacking) * regionInfo->regionWisePacking->numRegions); - if (NULL == regionInfo->regionWisePacking->rectRegionPacking) - { - return RENDER_ERROR; - } - uint32_t highStep = 1280; - uint32_t highReso = 1280; - // hard code to set rwpk - for (uint32_t i = 0; i < 9; i++) - { - regionInfo->regionWisePacking->rectRegionPacking[i].packedRegWidth = highReso; - regionInfo->regionWisePacking->rectRegionPacking[i].packedRegHeight = highReso; - regionInfo->regionWisePacking->rectRegionPacking[i].packedRegLeft = (i % 3) * highStep; - regionInfo->regionWisePacking->rectRegionPacking[i].packedRegTop = i / 3 * highStep; - - regionInfo->regionWisePacking->rectRegionPacking[i].projRegWidth = highReso; - regionInfo->regionWisePacking->rectRegionPacking[i].projRegHeight = highReso; - regionInfo->regionWisePacking->rectRegionPacking[i].projRegLeft = (i % 3) * highStep; - regionInfo->regionWisePacking->rectRegionPacking[i].projRegTop = i / 3 * highStep; - } - uint32_t lowStep = 640; - uint32_t lowReso = 640; - for (uint32_t i = 0; i < 18; i++) - { - regionInfo->regionWisePacking->rectRegionPacking[i + 9].packedRegWidth = lowReso; - regionInfo->regionWisePacking->rectRegionPacking[i + 9].packedRegHeight = lowReso; - regionInfo->regionWisePacking->rectRegionPacking[i + 9].packedRegLeft = highReso * 3 + i / 6 * lowStep; - regionInfo->regionWisePacking->rectRegionPacking[i + 9].packedRegTop = (i % 6) * lowStep; - - regionInfo->regionWisePacking->rectRegionPacking[i + 9].projRegWidth = lowReso; - regionInfo->regionWisePacking->rectRegionPacking[i + 9].projRegHeight = lowReso; - regionInfo->regionWisePacking->rectRegionPacking[i + 9].projRegLeft = (i % 6) * lowStep; - regionInfo->regionWisePacking->rectRegionPacking[i + 9].projRegTop = i / 6 * lowStep; - } - regionInfo->sourceInfo = (struct SourceInfo *)malloc(sizeof(struct SourceInfo) * regionInfo->sourceNumber); - if (NULL == regionInfo->sourceInfo) - { - return RENDER_ERROR; - } - regionInfo->sourceInfo[0].sourceWidth = regionInfo->regionWisePacking->projPicWidth; - regionInfo->sourceInfo[0].sourceHeight = regionInfo->regionWisePacking->projPicHeight; - regionInfo->sourceInfo[0].tileColumnNumber = regionInfo->sourceInfo[0].sourceWidth / regionInfo->regionWisePacking->rectRegionPacking[0].projRegWidth; - regionInfo->sourceInfo[0].tileRowNumber = regionInfo->sourceInfo[0].sourceHeight / regionInfo->regionWisePacking->rectRegionPacking[0].projRegHeight; - //low reso tile hard code - regionInfo->sourceInfo[1].sourceWidth = LOWWIDTH; - regionInfo->sourceInfo[1].sourceHeight = LOWHEIGHT; - regionInfo->sourceInfo[1].tileColumnNumber = regionInfo->sourceInfo[1].sourceWidth / regionInfo->regionWisePacking->rectRegionPacking[9].projRegWidth; - regionInfo->sourceInfo[1].tileRowNumber = regionInfo->sourceInfo[1].sourceHeight / regionInfo->regionWisePacking->rectRegionPacking[9].projRegHeight; - return RENDER_STATUS_OK; -} - -RenderStatus FFmpegMediaSource::ClearFFmpegSourceData() -{ - if (m_ffmpegSourceData.fmt_ctx) - avformat_free_context(m_ffmpegSourceData.fmt_ctx); - if (m_ffmpegSourceData.codec_ctx) - avcodec_close(m_ffmpegSourceData.codec_ctx); - if (m_ffmpegSourceData.packet) - av_free(m_ffmpegSourceData.packet); - if (m_ffmpegSourceData.av_frame) - av_free(m_ffmpegSourceData.av_frame); - if (m_ffmpegSourceData.gl_frame) - av_free(m_ffmpegSourceData.gl_frame); - InitializeFFmpegSourceData(); - return RENDER_STATUS_OK; -} - -RenderStatus FFmpegMediaSource::InitializeFFmpegSourceData() -{ - m_ffmpegSourceData.fmt_ctx = NULL; - m_ffmpegSourceData.stream_idx = -1; - m_ffmpegSourceData.video_stream = NULL; - m_ffmpegSourceData.codec_ctx = NULL; - m_ffmpegSourceData.decoder = NULL; - m_ffmpegSourceData.packet = NULL; - m_ffmpegSourceData.av_frame = NULL; - m_ffmpegSourceData.gl_frame = NULL; - m_ffmpegSourceData.conv_ctx = NULL; - return RENDER_STATUS_OK; -} - -RenderStatus FFmpegMediaSource::Initialize(struct RenderConfig renderConfig) -{ - int ret, video_stream; - // initialize libav - // av_register_all(); - avformat_network_init(); - // open video - if (avformat_open_input(&m_ffmpegSourceData.fmt_ctx, renderConfig.url, NULL, NULL) < 0) - { - std::cout << "failed to open input" << std::endl; - ClearFFmpegSourceData(); - return RENDER_ERROR; - } - // find stream info - if (avformat_find_stream_info(m_ffmpegSourceData.fmt_ctx, NULL) < 0) - { - std::cout << "failed to get stream info" << std::endl; - ClearFFmpegSourceData(); - return RENDER_ERROR; - } - // dump debug info - av_dump_format(m_ffmpegSourceData.fmt_ctx, 0, renderConfig.url, 0); - // find the video stream - for (unsigned int i = 0; i < m_ffmpegSourceData.fmt_ctx->nb_streams; i++) - { - if (m_ffmpegSourceData.fmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) - { - m_ffmpegSourceData.stream_idx = i; - break; - } - } - if (m_ffmpegSourceData.stream_idx == -1) - { - std::cout << "failed to find video stream" << std::endl; - ClearFFmpegSourceData(); - return RENDER_ERROR; - } - m_ffmpegSourceData.video_stream = m_ffmpegSourceData.fmt_ctx->streams[m_ffmpegSourceData.stream_idx]; - m_ffmpegSourceData.codec_ctx = m_ffmpegSourceData.video_stream->codec; - // find the decoder - //m_ffmpegSourceData.decoder = avcodec_find_decoder(m_ffmpegSourceData.codec_ctx->codec_id); - ret = av_find_best_stream(m_ffmpegSourceData.fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &m_ffmpegSourceData.decoder, 0); - if (ret<0) - { - fprintf(stderr, "Cannot find a video stream in the input file\n"); - return RENDER_ERROR; - } - video_stream = ret; - - if (m_ffmpegSourceData.decoder == NULL) - { - std::cout << "failed to find decoder" << std::endl; - ClearFFmpegSourceData(); - return RENDER_ERROR; - } - - if(renderConfig.decoderType == VAAPI_DECODER) // HW decoder - { - for (int i = 0;; i++) { - const AVCodecHWConfig *config = avcodec_get_hw_config(m_ffmpegSourceData.decoder,i); - if (!config) { - fprintf(stderr, "Decoder %s does not support device type %s.\n", - m_ffmpegSourceData.decoder->name, av_hwdevice_get_type_name(AV_HWDEVICE_TYPE_VAAPI)); - return RENDER_ERROR; - } - if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && - config->device_type == AV_HWDEVICE_TYPE_VAAPI) { - hw_pix_fmt = config->pix_fmt; - break; - } - } - } - - if (!(m_ffmpegSourceData.codec_ctx = avcodec_alloc_context3(m_ffmpegSourceData.decoder))) - return RENDER_ERROR; - - m_ffmpegSourceData.video_stream = m_ffmpegSourceData.fmt_ctx->streams[video_stream]; - if ((ret = avcodec_parameters_to_context(m_ffmpegSourceData.codec_ctx, m_ffmpegSourceData.video_stream->codecpar)) < 0) { - LOG(ERROR)<<"avcodec_parameters_to_context error. Error code: %s\n"<get_format = get_hw_format; - - if(renderConfig.decoderType == VAAPI_DECODER) // HW decoder - { - int ret_value = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, NULL, NULL, 0); - if (ret_value < 0) { - LOG(ERROR)<<"Failed to create a VAAPI device. Error code: %s\n"<get_format = get_hw_format; - } - if (hw_device_ctx) - m_ffmpegSourceData.codec_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); - else - m_ffmpegSourceData.codec_ctx->hw_device_ctx = NULL; - if (!m_ffmpegSourceData.codec_ctx->hw_device_ctx) { - fprintf(stderr, "A hardware device reference create failed.\n"); - } - else - fprintf(stderr, "A hardware device reference create success.\n"); - } - else // SW decoder - m_mediaSourceInfo.pixFormat = PixelFormat::PIX_FMT_YUV420P; - - // open the decoder - if (avcodec_open2(m_ffmpegSourceData.codec_ctx, m_ffmpegSourceData.decoder, NULL) < 0) - { - std::cout << "failed to open codec" << std::endl; - ClearFFmpegSourceData(); - return RENDER_ERROR; - } - // allocate the video frames - m_ffmpegSourceData.av_frame = av_frame_alloc(); - m_ffmpegSourceData.gl_frame = av_frame_alloc(); - int32_t size = avpicture_get_size(AV_PIX_FMT_RGB24, m_ffmpegSourceData.codec_ctx->width, m_ffmpegSourceData.codec_ctx->height); - uint8_t *internal_buffer = (uint8_t *)av_malloc(size * sizeof(uint8_t)); - avpicture_fill((AVPicture *)m_ffmpegSourceData.gl_frame, internal_buffer, AV_PIX_FMT_RGB24, m_ffmpegSourceData.codec_ctx->width, m_ffmpegSourceData.codec_ctx->height); - m_ffmpegSourceData.packet = (AVPacket *)av_malloc(sizeof(AVPacket)); - m_sourceType = MediaSourceType::SOURCE_VOD; - if (RENDER_STATUS_OK != SetMediaSourceInfo(NULL)) - { - return RENDER_ERROR; - } - return RENDER_STATUS_OK; -} - -struct MediaSourceInfo FFmpegMediaSource::GetMediaSourceInfo() -{ - return m_mediaSourceInfo; -} - -void *FFmpegMediaSource::GetSourceMetaData() -{ - return &m_ffmpegSourceData; -} - -bool FFmpegMediaSource::IsEOS() -{ - if (m_sourceType == MediaSourceType::SOURCE_VOD) - { - std::cout << "frame:" << m_mediaSourceInfo.currentFrameNum << std::endl; - return m_mediaSourceInfo.currentFrameNum == m_mediaSourceInfo.frameNum; - } - else if (m_sourceType == MediaSourceType::SOURCE_LIVE) - { - return false; //live video should be controlled by player(pause and play, etc) will be added soon. - } - return false; -} - -RenderStatus FFmpegMediaSource::SetMediaSourceInfo(void *mediaInfo) -{ - m_mediaSourceInfo.width = m_ffmpegSourceData.codec_ctx->width; - m_mediaSourceInfo.height = m_ffmpegSourceData.codec_ctx->height; - m_mediaSourceInfo.projFormat = VCD::OMAF::PF_ERP; //hard code - //m_mediaSourceInfo.pixFormat = PixelFormat::AV_PIX_FMT_NV12; //PixelFormat::PIX_FMT_YUV420P; //hard code for hw - m_mediaSourceInfo.hasAudio = false; - m_mediaSourceInfo.audioChannel = 0; - m_mediaSourceInfo.numberOfStreams = 1; - m_mediaSourceInfo.stride = m_ffmpegSourceData.codec_ctx->width; - m_mediaSourceInfo.frameNum = 100; //hard code - m_mediaSourceInfo.currentFrameNum = 0; - m_mediaSourceInfo.sourceWH = new SourceWH; - m_mediaSourceInfo.sourceWH->width = new uint32_t[SOURCENUMBER]; - m_mediaSourceInfo.sourceWH->width[0] = FULLWIDTH; //hard - m_mediaSourceInfo.sourceWH->width[1] = LOWWIDTH; - m_mediaSourceInfo.sourceWH->height = new uint32_t[SOURCENUMBER]; - m_mediaSourceInfo.sourceWH->height[0] = FULLHEIGHT; - m_mediaSourceInfo.sourceWH->height[1] = LOWHEIGHT; - if (m_ffmpegSourceData.fmt_ctx->duration != AV_NOPTS_VALUE) - { - m_mediaSourceInfo.duration = m_ffmpegSourceData.video_stream->duration * (1.0 * m_ffmpegSourceData.video_stream->time_base.num / m_ffmpegSourceData.video_stream->time_base.den); - } - if (m_ffmpegSourceData.video_stream->r_frame_rate.den > 0) - { - m_mediaSourceInfo.frameRate = m_ffmpegSourceData.video_stream->r_frame_rate.num / m_ffmpegSourceData.video_stream->r_frame_rate.den; - } - isAllValid = true; - return RENDER_STATUS_OK; -} - -RenderStatus FFmpegMediaSource::ChangeViewport(float yaw, float pitch) -{ - //DO NOTHING - return RENDER_STATUS_OK; -} - -//use in dash media source fifo. -//ffmpegmediasource has no fifo. -//do nothing. -void FFmpegMediaSource::DeleteBuffer(uint8_t **buffer) -{ - return; -} - -void FFmpegMediaSource::ClearRWPK(RegionWisePacking *rwpk) -{ - return; -} - -VCD_NS_END -#endif /* USE_DMA_BUFFER */ \ No newline at end of file diff --git a/src/player/FFmpegMediaSource.h b/src/player/FFmpegMediaSource.h deleted file mode 100644 index 2a318dec..00000000 --- a/src/player/FFmpegMediaSource.h +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file FFmpegMediaSource.h -//! \brief Defines class for FFmpegMediaSource. -//! -#ifndef _FFMPEGMEDIASOURCE_H_ -#define _FFMPEGMEDIASOURCE_H_ - -#include "MediaSource.h" - -VCD_NS_BEGIN - -class FFmpegMediaSource -: public MediaSource -{ -public: - FFmpegMediaSource(); - virtual ~FFmpegMediaSource(); - //! \brief Initial in DashMediaInfo - //! - //! \param [in] struct RenderConfig - //! render configuration - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus Initialize(struct RenderConfig renderConfig); - //! \brief Get a frame from the Media Source - //! - //! [out] uint8_t ** - //! the frame buffer - //! [out] struct RegionInfo * - //! regionInfo - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus GetFrame(uint8_t **buffer, struct RegionInfo *regionInfo); - //! \brief Set Media Source Info - //! - //! \param [in] void * - //! mediaInfo - //! - //! \return void * - //! - virtual RenderStatus SetMediaSourceInfo(void *mediaInfo); - //! \brief Get Media Source Info - //! - //! \return struct MediaSourceInfo - //! - virtual struct MediaSourceInfo GetMediaSourceInfo(); - //! \brief Get SourceMetaData - //! - //! \return void* - //! - virtual void* GetSourceMetaData(); - //! \brief Check is file ends - //! - //! \return bool - //! - virtual bool IsEOS(); - //! \brief set yaw and pitch to change Viewport - //! - //! \param [in] float - //! yaw angle - //! [in] pitch - //! pitch angle - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus ChangeViewport(float yaw, float pitch); - //! \brief set region information - //! - //! \param [in] struct RegionInfo* - //! regionInfo - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus SetRegionInfo(struct RegionInfo* regionInfo); - //! \brief delete buffer data - //! - //! \param [in] uint8_t ** - //! buffer - //! - virtual void DeleteBuffer(uint8_t **buffer); - //! \brief delete Region Wise Packing data - //! - //! \param [in] RegionWisePacking * - //! rwpk - //! - virtual void ClearRWPK(RegionWisePacking *rwpk); - -private: - - struct SourceData m_ffmpegSourceData; - - RenderStatus ClearFFmpegSourceData(); - - RenderStatus InitializeFFmpegSourceData(); - -}; - -VCD_NS_END -#endif /* _FFMPEGMEDIASOURCE_H_ */ - diff --git a/src/player/GLFWRenderContext.cpp b/src/player/GLFWRenderContext.cpp deleted file mode 100644 index 1df7958a..00000000 --- a/src/player/GLFWRenderContext.cpp +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file GLFWRenderContext.cpp -//! \brief Implement class for GLFWRenderContext. -//! - -#include "GLFWRenderContext.h" - -VCD_NS_BEGIN - -GLFWRenderContext::GLFWRenderContext() -{ - m_renderContextType = GLFW_CONTEXT; -} - -GLFWRenderContext::GLFWRenderContext(uint32_t width, uint32_t height) -{ - m_renderContextType = GLFW_CONTEXT; - m_windowWidth = width; - m_windowHeight = height; -} - -GLFWRenderContext::~GLFWRenderContext() -{ -} - -RenderStatus GLFWRenderContext::SwapBuffers(void * window, int param) -{ - glfwSwapBuffers((GLFWwindow *)m_window); - glfwPollEvents(); - - return RENDER_STATUS_OK; -} - -bool GLFWRenderContext::isRunning() -{ - return !glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_ESCAPE) && !glfwWindowShouldClose((GLFWwindow *)m_window); -} - -RenderStatus GLFWRenderContext::GetStatusAndPose(float *yaw, float *pitch, uint32_t* status) -{ - static glm::vec2 transfer(0, RENDER_PI); - glm::vec3 direction(0.0f, 0.0f, 1.0f); - double xpos, ypos; - - glfwGetCursorPos((GLFWwindow *)m_window, &xpos, &ypos); - glfwSetCursorPos((GLFWwindow *)m_window, m_windowWidth / 2, m_windowHeight / 2); - if (glfwGetMouseButton((GLFWwindow *)m_window, GLFW_MOUSE_BUTTON_LEFT)) - { - m_horizontalAngle += m_mouseSpeed * float(m_windowWidth / 2 - xpos); - m_verticalAngle += m_mouseSpeed * float(m_windowHeight / 2 - ypos); - if (m_verticalAngle > RENDER_PI / 2) - m_verticalAngle = RENDER_PI / 2; - if (m_verticalAngle < -RENDER_PI / 2) - m_verticalAngle = -RENDER_PI / 2; - } - if (glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_UP) == GLFW_PRESS) - { - m_verticalAngle = transfer.x; - m_verticalAngle -= 3 * m_speed; - - if (m_verticalAngle > RENDER_PI / 2) - m_verticalAngle = RENDER_PI / 2; - if (m_verticalAngle < -RENDER_PI / 2) - m_verticalAngle = -RENDER_PI / 2; - } - if (glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_DOWN) == GLFW_PRESS) - { - m_verticalAngle = transfer.x; - m_verticalAngle += 3 * m_speed; - if (m_verticalAngle > RENDER_PI / 2) - m_verticalAngle = RENDER_PI / 2; - if (m_verticalAngle < -RENDER_PI / 2) - m_verticalAngle = -RENDER_PI / 2; - } - if (glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_RIGHT) == GLFW_PRESS) - { - m_horizontalAngle = transfer.y; - m_horizontalAngle -= 3 * m_speed; - } - if (glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_LEFT) == GLFW_PRESS) - { - m_horizontalAngle = transfer.y; - m_horizontalAngle += 3 * m_speed; - } - if (glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_S) == GLFW_PRESS) - { - *status = PLAY; - } - if (glfwGetKey((GLFWwindow *)m_window, GLFW_KEY_P) == GLFW_PRESS) - { - *status = PAUSE; - } - direction = glm::vec3( - cos(m_verticalAngle) * sin(m_horizontalAngle), //1*0 - sin(m_verticalAngle), //0 - cos(m_verticalAngle) * cos(m_horizontalAngle) //1*1 - ); - - glm::vec3 right = glm::vec3( - sin(m_horizontalAngle - RENDER_PI / 2.0f), - 0, - cos(m_horizontalAngle - RENDER_PI / 2.0f)); - // glm::vec3 right = glm::cross( direction, glm::vec3(0.0,1.0,0.0)); - glm::vec3 up = glm::cross(right, direction); - float FoV = m_initialFoV; // - 5 * glfwGetMouseWheel(); - float aspect = float(m_windowWidth) / m_windowHeight; - m_projectionMatrix = glm::perspective(glm::radians(-2 * FoV), aspect, 0.01f, 1000.0f); - m_viewModelMatrix = glm::lookAt( - // position, // Camera is here - glm::vec3(0, 0, 0), - // position+direction, // and looks here : at the same position, plus "direction" - direction, - up // Head is up (set to 0,-1,0 to look upside-down) - ); - transfer.x = m_verticalAngle; - transfer.y = m_horizontalAngle; - // eular angle and (longitude, latitude) transformation. - float r = sqrt(direction[0] * direction[0] + direction[1] * direction[1] + direction[2] * direction[2]); - float longitude = atan2(direction[0], -direction[2]); - float latitude = asin(-direction[1] / r); - float u = 0.5f - longitude / 2 / RENDER_PI; - float v = 0.5f - latitude / RENDER_PI; - *yaw = (u - 0.5f) * 360; - *pitch = (0.5f - v) * 180; - - return RENDER_STATUS_OK; -} - -RenderStatus GLFWRenderContext::InitContext() -{ - // initialize glfw - if (!glfwInit()) - { - LOG(ERROR)<< "glfw failed to init" << std::endl; - glfwTerminate(); - return RENDER_ERROR; - } - //tranverse(); - // open a window - //glfwWindowHint(GLFW_SAMPLES, 4); - glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); - glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2); - glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); - glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); - m_window = glfwCreateWindow(m_windowWidth, m_windowHeight, "VR Player", NULL, NULL); //done - if (!m_window) - { - LOG(ERROR)<< "failed to open window" << std::endl; - glfwTerminate(); - return RENDER_ERROR; - } - glfwMakeContextCurrent((GLFWwindow *)m_window); - glfwSetInputMode((GLFWwindow *)m_window, GLFW_STICKY_KEYS, GL_TRUE); - glfwSetInputMode((GLFWwindow *)m_window, GLFW_CURSOR, GLFW_CURSOR_DISABLED); - - glfwPollEvents(); - glfwSetCursorPos((GLFWwindow *)m_window, m_windowWidth / 2, m_windowHeight / 2); //done - - // initialize opengl - // glClearColor(0.0f, 0.0f, 0.0f, 0.0f); - // glClear(GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT); - glEnable(GL_TEXTURE_2D); - glEnable(GL_DEPTH_TEST); - glDepthFunc(GL_LESS); - glEnable(GL_CULL_FACE); - glFrontFace(GL_CCW); - glCullFace(GL_BACK); - - return RENDER_STATUS_OK; -} - -VCD_NS_END diff --git a/src/player/HWRenderSource.cpp b/src/player/HWRenderSource.cpp deleted file mode 100644 index 0d52de63..00000000 --- a/src/player/HWRenderSource.cpp +++ /dev/null @@ -1,299 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file HWRenderSource.cpp -//! \brief Implement class for HWRenderSource. -//! -#ifdef USE_DMA_BUFFER -#include "HWRenderSource.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "Render2TextureMesh.h" - -#include -extern clock_t start, stop; -extern double duration; - -VCD_NS_BEGIN - -HWRenderSource::HWRenderSource(RenderBackend *renderBackend) -{ - //1.render to texture : vertex and texCoords assign - m_videoShaderOfR2T.Bind(); - m_meshOfR2T = new Render2TextureMesh(); - m_meshOfR2T->Create(); - uint32_t vertexAttribOfR2T = m_videoShaderOfR2T.SetAttrib("vPosition"); - uint32_t texCoordsAttribOfR2T = m_videoShaderOfR2T.SetAttrib("aTexCoord"); - m_meshOfR2T->Bind(renderBackend, vertexAttribOfR2T, texCoordsAttribOfR2T); -} - -HWRenderSource::~HWRenderSource() -{ - if (m_meshOfR2T) - { - delete m_meshOfR2T; - m_meshOfR2T = NULL; - } -} - -RenderStatus HWRenderSource::Initialize(struct MediaSourceInfo *mediaSourceInfo) -{ - if (NULL == mediaSourceInfo) - { - return RENDER_ERROR; - } - uint32_t number = 0; - struct SourceWH packedWH; - switch (mediaSourceInfo->pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - number = 1; - packedWH.width = new uint32_t[number]; - packedWH.height = new uint32_t[number]; - packedWH.width[0] = mediaSourceInfo->width; - packedWH.height[0] = mediaSourceInfo->height; - break; - case PixelFormat::PIX_FMT_YUV420P: - number = 3; - packedWH.width = new uint32_t[number]; - packedWH.height = new uint32_t[number]; - packedWH.width[0] = mediaSourceInfo->width; - packedWH.width[1] = packedWH.width[0] / 2; - packedWH.width[2] = packedWH.width[1]; - packedWH.height[0] = mediaSourceInfo->height; - packedWH.height[1] = packedWH.height[0] / 2; - packedWH.height[2] = packedWH.height[1]; - break; - case PixelFormat::AV_PIX_FMT_NV12: - number = 2; - packedWH.width = new uint32_t[number]; - packedWH.height = new uint32_t[number]; - packedWH.width[0] = mediaSourceInfo->width; - packedWH.width[1] = packedWH.width[0] / 2; - packedWH.height[0] = mediaSourceInfo->height; - packedWH.height[1] = packedWH.height[0] / 2; - break; - default: - break; - } - SetSourceWH(packedWH); - SetSourceTextureNumber(number); - return RENDER_STATUS_OK; -} - -RenderStatus HWRenderSource::CreateRenderSource(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - if (CreateSourceTex(renderBackend) != RENDER_STATUS_OK || CreateR2TFBO(renderBackend) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - return RENDER_STATUS_OK; -} - -RenderStatus HWRenderSource::CreateSourceTex(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - //1. initial r2t three textures. - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = new uint32_t[sourceTextureNumber]; - renderBackend->GenTextures(sourceTextureNumber, sourceTextureHandle); - SetSourceTextureHandle(sourceTextureHandle); - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - struct SourceWH sourceWH = GetSourceWH(); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[i]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - if (i == 1 && sourceTextureNumber == 2) //hardware decoding - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RG, sourceWH.width[i], sourceWH.height[i], 0, GL_RG, GL_UNSIGNED_BYTE, NULL); - else - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RED, sourceWH.width[i], sourceWH.height[i], 0, GL_RED, GL_UNSIGNED_BYTE, NULL); - } - return RENDER_STATUS_OK; -} - -RenderStatus HWRenderSource::CreateR2TFBO(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - //2.initial FBOs - uint32_t textureOfR2T; - renderBackend->GenTextures(1, &textureOfR2T); - renderBackend->BindTexture(GL_TEXTURE_2D, textureOfR2T); - SetTextureOfR2T(textureOfR2T); - struct SourceWH sourceWH = GetSourceWH(); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[0]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RGB, sourceWH.width[0], sourceWH.height[0], 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); - - uint32_t fboR2THandle; - renderBackend->GenFramebuffers(1, &fboR2THandle); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, fboR2THandle); - SetFboR2THandle(fboR2THandle); - renderBackend->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureOfR2T, 0); - - if (renderBackend->CheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) - { - printf("glCheckFramebufferStatus not complete\n"); - return RENDER_ERROR; - } - else - { - printf("glCheckFramebufferStatus complete\n"); - } - return RENDER_STATUS_OK; -} - -RenderStatus HWRenderSource::UpdateR2T(RenderBackend *renderBackend, void **buffer) -{ - if (NULL == renderBackend || NULL == buffer) - { - return RENDER_ERROR; - } - //1. update source texture - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = GetSourceTextureHandle(); - struct SourceWH sourceWH = GetSourceWH(); - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (buffer == NULL) break; - if (buffer[i] == NULL) break; - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[i]); - if ( sourceTextureNumber == 1) - { - renderBackend->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, sourceWH.width[i], sourceWH.height[i], GL_RGB, GL_UNSIGNED_BYTE, buffer[i]); //use rgb data - } - else if (sourceTextureNumber == 2 && i == 1 ) - { - start = clock(); - renderBackend->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, sourceWH.width[i], sourceWH.height[i], GL_RG, GL_UNSIGNED_BYTE, buffer[i]); //use uv data - stop = clock(); - duration = ((double)(stop - start))/CLOCKS_PER_SEC; - fprintf(stderr, "TexSubImage2D i==1 %f seconds\n", duration ); - } - else - { - start = clock(); - renderBackend->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, sourceWH.width[i], sourceWH.height[i], GL_RED, GL_UNSIGNED_BYTE, buffer[i]); //use y data - stop = clock(); - duration = ((double)(stop - start))/CLOCKS_PER_SEC; - fprintf(stderr, "TexSubImage2D i==%d %f seconds\n", i,duration ); - } - } - //2. bind source texture and r2tFBO - uint32_t fboR2THandle = GetFboR2THandle(); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, fboR2THandle); - - m_videoShaderOfR2T.Bind(); - renderBackend->BindVertexArray(renderBackend->GetR2TVAOHandle()); // check - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - } - renderBackend->Viewport(0, 0, sourceWH.width[0], sourceWH.height[0]); - renderBackend->DrawArrays(GL_TRIANGLES, 0, 6); - return RENDER_STATUS_OK; -} - -RenderStatus HWRenderSource::DestroyRenderSource(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - uint32_t textureOfR2T = GetTextureOfR2T(); - if (textureOfR2T) - { - renderBackend->DeleteTextures(1, &textureOfR2T); - } - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = GetSourceTextureHandle(); - if (sourceTextureHandle) - { - renderBackend->DeleteTextures(sourceTextureNumber, sourceTextureHandle); - } - uint32_t fboR2THandle = GetFboR2THandle(); - if (fboR2THandle) - { - renderBackend->DeleteFramebuffers(1, &fboR2THandle); - } - return RENDER_STATUS_OK; -} - -VCD_NS_END -#endif /* USE_DMA_BUFFER */ \ No newline at end of file diff --git a/src/player/MediaSource.h b/src/player/MediaSource.h deleted file mode 100644 index ae2c1d87..00000000 --- a/src/player/MediaSource.h +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file MediaSource.h -//! \brief Defines class for MediaSource. -//! -#ifndef _MEDIASOURCE_H_ -#define _MEDIASOURCE_H_ - -#include "Common.h" -#include -#include -#include -#include -#include - -VCD_NS_BEGIN - -class MediaSource -{ -public: - MediaSource(); - virtual ~MediaSource(); - //! \brief Initial in DashMediaInfo - //! - //! \param [in] struct RenderConfig - //! render configuration - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus Initialize(struct RenderConfig renderConfig) = 0; - //! \brief Get a frame from the Media Source - //! - //! [out] uint8_t ** - //! the frame buffer - //! [out] struct RegionInfo * - //! regionInfo - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus GetFrame(uint8_t **buffer, struct RegionInfo *regionInfo) = 0; - //! \brief Set Media Source Info - //! - //! \param [in] void * - //! mediaInfo - //! - //! \return void * - //! - virtual RenderStatus SetMediaSourceInfo(void *mediaInfo) = 0; - //! \brief Get Media Source Info - //! - //! \return struct MediaSourceInfo - //! - virtual struct MediaSourceInfo GetMediaSourceInfo() = 0; - //! \brief Get SourceMetaData - //! - //! \return void* - //! - virtual void* GetSourceMetaData() = 0; - //! \brief Check is file ends - //! - //! \return bool - //! - virtual bool IsEOS() = 0; - //! \brief set yaw and pitch to change Viewport - //! - //! \param [in] float - //! yaw angle - //! [in] pitch - //! pitch angle - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus ChangeViewport(float yaw, float pitch) = 0; - //! \brief set region information - //! - //! \param [in] struct RegionInfo* - //! regionInfo - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus SetRegionInfo(struct RegionInfo* regionInfo) = 0; - //! \brief delete buffer data - //! - //! \param [in] uint8_t ** - //! buffer - //! - virtual void DeleteBuffer(uint8_t **buffer) = 0; - //! \brief delete Region Wise Packing data - //! - //! \param [in] RegionWisePacking * - //! rwpk - //! - virtual void ClearRWPK(RegionWisePacking *rwpk) = 0; - //! \brief get isAllValid - //! - //! \return bool - //! isAllValid - //! - bool getIsAllValid() {return isAllValid;}; - -protected: - - struct MediaSourceInfo m_mediaSourceInfo; - MediaSourceType::Enum m_sourceType;//vod or live - bool isAllValid; - -}; - -VCD_NS_END -#endif /* _MEDIASOURCE_H_ */ - diff --git a/src/player/Player.cpp b/src/player/Player.cpp deleted file mode 100644 index a0b29d0f..00000000 --- a/src/player/Player.cpp +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file Player.cpp -//! \brief Implement class for Player. -//! - -#include "Player.h" -#include "Common.h" -#include -#include -#include -#include -#include -#include -#include -#include - -#include "GLFWRenderContext.h" -#include "EGLRenderContext.h" -// #include - -glm::mat4 ProjectionMatrix; -glm::mat4 ViewModelMatrix; - -VCD_NS_BEGIN - -Player::Player(struct RenderConfig config) -{ - m_renderManager = new RenderManager(config); - m_status = READY; -} - -Player::~Player() -{ - if (m_renderManager != NULL) - { - delete m_renderManager; - m_renderManager = NULL; - } -} - -RenderStatus Player::Open() -{ - if (NULL == m_renderManager->GetRenderConfig().url) - { - LOG(ERROR)<<"Wrong url"<Initialize()) - { - return RENDER_ERROR; - } - m_status = READY; - return RENDER_STATUS_OK; -} - -RenderStatus Player::Play() -{ - float poseYaw, posePitch; - std::chrono::high_resolution_clock clock; - uint64_t lastTime = 0; - uint64_t renderCount = 0; // record render times - uint64_t start = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); - bool quitFlag = false; - do - { - m_renderManager->GetStatusAndPose(&poseYaw, &posePitch, &m_status); - m_renderManager->SetViewport(poseYaw, posePitch); - if (0 == lastTime) - { - lastTime = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); - } - if (PLAY == GetStatus()) - { - m_renderManager->PrepareRender(); - - uint64_t renderTime = std::chrono::duration_cast(clock.now().time_since_epoch()).count(); - uint64_t interval = renderTime - lastTime; - uint32_t renderInterval = m_renderManager->GetRenderConfig().renderInterval; - if(interval < renderInterval) - { - usleep((renderInterval - interval) * 1000); - LOG(INFO)<<"==========wait_time============== :"<<(renderInterval - interval)<INTERVAL========"<(clock.now().time_since_epoch()).count(); - LOG(INFO)<<"===========renderTime==============:"<Render(); - renderCount++; - } - else if (READY == GetStatus() || PAUSE == GetStatus()) - { - m_renderManager->Render(); - } - LOG(INFO)<<"status:"<IsEOS()) - { - cout<<"Soon to quit player!"<(clock.now().time_since_epoch()).count(); - LOG(INFO)<<"-----------------------------"<InitContext(); - //6.initial MediaSource - switch (m_renderConfig.sourceType) - { -#ifndef _ENABLE_WEBRTC_SOURCE_ - case DASH_SOURCE: - m_mediaSource = new DashMediaSource(); - break; -#endif -#ifdef USE_DMA_BUFFER - case FFMPEG_SOURCE: - m_mediaSource = new FFmpegMediaSource(); - break; -#endif -#ifdef _ENABLE_WEBRTC_SOURCE_ - case WEBRTC_SOURCE: - m_mediaSource = new WebRTCMediaSource(); - break; -#endif - default: - m_mediaSource = NULL; - LOG(ERROR)<<"initial media source error!"<Join(); - if (m_mediaSource != NULL) - { - delete m_mediaSource; - m_mediaSource = NULL; - } - if (m_renderBackend != NULL) - { - delete m_renderBackend; - m_renderBackend = NULL; - } - if (m_renderSource != NULL) - { - delete m_renderSource; - m_renderSource = NULL; - } - if (m_renderTarget != NULL) - { - delete m_renderTarget; - m_renderTarget = NULL; - } - if (m_surfaceRender != NULL) - { - delete m_surfaceRender; - m_surfaceRender = NULL; - } - if (m_viewPortManager != NULL) - { - delete m_viewPortManager; - m_viewPortManager = NULL; - } - if (m_renderContext != NULL) - { - delete m_renderContext; - m_renderContext = NULL; - } -} - -RenderStatus RenderManager::PrepareRender() -{ - //1.get frame from m_mediaSource - uint8_t *buffer[4]; - struct RegionInfo regionInfo; - if (RENDER_STATUS_OK != m_mediaSource->GetFrame(&buffer[0], ®ionInfo)) - { - return RENDER_ERROR; - } - //2.update texture and render to texture in m_renderSource - if (RENDER_STATUS_OK != m_renderSource->UpdateR2T(m_renderBackend, (void **)buffer)) - { - return RENDER_ERROR; - } - //3.tile copy and render to FBO from m_renderTarget - RenderStatus renderTargetStatus = m_renderTarget->Update(m_renderBackend, ®ionInfo); - if (RENDER_ERROR == renderTargetStatus) - { - return RENDER_ERROR; - } - //4. delete memory. - m_mediaSource->DeleteBuffer(buffer); - if (regionInfo.sourceInfo != NULL) - { - delete regionInfo.sourceInfo; - regionInfo.sourceInfo = NULL; - } - m_mediaSource->ClearRWPK(regionInfo.regionWisePacking); - return RENDER_STATUS_OK; -} - -RenderStatus RenderManager::Render() -{ - uint32_t width = m_renderConfig.windowWidth; - uint32_t height = m_renderConfig.windowHeight; - if (RENDER_STATUS_OK != m_surfaceRender->Render(m_renderBackend, m_renderTarget, width, height, m_renderContext->GetProjectionMatrix(), m_renderContext->GetViewModelMatrix())) - { - return RENDER_ERROR; - } - m_renderContext->SwapBuffers(NULL, 0); - return RENDER_STATUS_OK; -} - -RenderStatus RenderManager::Initialize() //should input the decoderManager -{ - if (NULL == m_renderConfig.url) - { - return RENDER_ERROR; - } - //1.load media source and get type - RenderStatus loadMediaStatus = m_mediaSource->Initialize(m_renderConfig); - if (loadMediaStatus != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - //2. change viewport thread start - StartThread(); - m_status= STATUS_CREATED; - while (!m_mediaSource->getIsAllValid()) - { - usleep(50*1000); - } - struct MediaSourceInfo mediaSourceInfo; - mediaSourceInfo = m_mediaSource->GetMediaSourceInfo(); - m_renderConfig.projFormat = mediaSourceInfo.projFormat; - m_renderConfig.renderInterval = 1000 / mediaSourceInfo.frameRate; - //2.initial SurfaceRender and shaders - if (CreateRender(m_renderConfig.projFormat) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - //3.set uniform frameTex - m_renderSource->m_videoShaderOfR2T.Bind(); - m_renderSource->m_videoShaderOfR2T.SetUniform1i("frameTex", 0); - m_renderSource->m_videoShaderOfR2T.SetUniform1i("frameU", 1); - m_renderSource->m_videoShaderOfR2T.SetUniform1i("frameV", 2); - if (m_renderConfig.decoderType == 1) //hardware decoding with libva - { - m_renderSource->m_videoShaderOfR2T.SetUniform1i("isNV12", 1); - } - else //sw decoding - { - m_renderSource->m_videoShaderOfR2T.SetUniform1i("isNV12", 0); - } - m_surfaceRender->m_videoShaderOfOnScreen.Bind(); - m_surfaceRender->m_videoShaderOfOnScreen.SetUniform1i("frameTex_screen", 0); - //4.initial renderSource - if (m_renderSource->Initialize(&mediaSourceInfo) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - if (m_renderSource->CreateRenderSource(m_renderBackend) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - //5.initial renderTarget - if (m_renderTarget->Initialize(&mediaSourceInfo, m_renderSource->GetFboR2THandle()) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - if (m_renderTarget->CreateRenderTarget(m_renderBackend) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - return RENDER_STATUS_OK; -} - -RenderStatus RenderManager::CreateRender(int32_t projFormat) -{ - switch (projFormat) - { -#ifndef LOW_LATENCY_USAGE - case VCD::OMAF::PF_ERP: -#else - case PT_ERP: -#endif - { - m_surfaceRender = new ERPRender(m_renderBackend); - if (NULL == m_surfaceRender) - { - LOG(ERROR)<< "ERPRender creation failed" << std::endl; - return RENDER_ERROR; - } - break; - } -#ifndef LOW_LATENCY_USAGE - case VCD::OMAF::PF_CUBEMAP: -#else - case PT_CUBEMAP: -#endif - { - m_surfaceRender = new CubeMapRender(m_renderBackend); - if (NULL == m_surfaceRender) - { - LOG(ERROR)<< "CubeMapRender creation failed" << std::endl; - return RENDER_ERROR; - } - break; - } - - default: - return RENDER_ERROR; - } - - return RENDER_STATUS_OK; -} - -bool RenderManager::IsEOS() -{ - return m_mediaSource->IsEOS() || !(m_renderContext->isRunning()); -} - -RenderStatus RenderManager::ChangeViewport(float yaw, float pitch) -{ - m_mediaSource->ChangeViewport(yaw, pitch); - return RENDER_STATUS_OK; -} - -void RenderManager::Run() -{ - m_status = STATUS_RUNNING; - while (m_status != STATUS_STOPPED) - { - int32_t res = pthread_mutex_lock(&m_poseMutex); - if (res != 0) {return;} - struct Pose pose = m_viewPortManager->GetViewPort(); - res = pthread_mutex_unlock(&m_poseMutex); - if (res != 0) {return;} - ChangeViewport(pose.yaw, pose.pitch); - usleep(5*1000); - } -} - -RenderStatus RenderManager::SetViewport(float yaw, float pitch) -{ - struct Pose pose; - pose.yaw = yaw; - pose.pitch = pitch; - int32_t res = pthread_mutex_lock(&m_poseMutex); - if (res != 0) {return RENDER_ERROR;} - m_viewPortManager->SetViewPort(pose); - res = pthread_mutex_unlock(&m_poseMutex); - if (res != 0) {return RENDER_ERROR;} - return RENDER_STATUS_OK; -} - -struct RenderConfig RenderManager::GetRenderConfig() -{ - return m_renderConfig; -} - -void RenderManager::GetStatusAndPose(float *yaw, float *pitch, uint32_t *status) -{ - m_renderContext->GetStatusAndPose(yaw, pitch, status); -} - -VCD_NS_END diff --git a/src/player/RenderTarget.cpp b/src/player/RenderTarget.cpp deleted file mode 100644 index 74877002..00000000 --- a/src/player/RenderTarget.cpp +++ /dev/null @@ -1,368 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file RenderTarget.cpp -//! \brief Implement class for RenderTarget. -//! - -#include "RenderTarget.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -VCD_NS_BEGIN - -RenderTarget::RenderTarget() -{ - m_fboR2THandle = 0; - m_fboOnScreenHandle = 0; - m_textureOfR2S = 0; - m_targetWH.width = NULL; - m_targetWH.height = NULL; -} - -RenderTarget::~RenderTarget() -{ - RENDERBACKEND::GetInstance()->DeleteFramebuffers(1, &m_fboR2THandle); - RENDERBACKEND::GetInstance()->DeleteFramebuffers(1, &m_fboOnScreenHandle); - RENDERBACKEND::GetInstance()->DeleteTextures(1, &m_textureOfR2S); - if (m_targetWH.width != NULL) - { - delete [] m_targetWH.width; - m_targetWH.width = NULL; - } - if (m_targetWH.height != NULL) - { - delete [] m_targetWH.height; - m_targetWH.height = NULL; - } -} - -RenderStatus RenderTarget::Initialize(struct MediaSourceInfo *mediaSourceInfo, uint32_t fboR2T) -{ - if (NULL == mediaSourceInfo) - { - return RENDER_ERROR; - } - uint32_t width = mediaSourceInfo->sourceWH->width[0]; - uint32_t height = mediaSourceInfo->sourceWH->height[0]; - struct SourceWH targetWH; - targetWH.width = new uint32_t[1]; - targetWH.width[0] = width; - targetWH.height = new uint32_t[1]; - targetWH.height[0] = height; - SetTargetWH(targetWH); - SetFboR2THandle(fboR2T); - return RENDER_STATUS_OK; -} - -RenderStatus RenderTarget::CreateRenderTarget(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - - renderBackend->GenTextures(1, &m_textureOfR2S); - renderBackend->BindTexture(GL_TEXTURE_2D, m_textureOfR2S); - - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, m_targetWH.width[0]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_targetWH.width[0], m_targetWH.height[0], 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); - - renderBackend->GenFramebuffers(1, &m_fboOnScreenHandle); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, m_fboOnScreenHandle); - renderBackend->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_textureOfR2S, 0); - - if (renderBackend->CheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) - { - printf("glCheckFramebufferStatus not complete\n"); - return RENDER_ERROR; - } - else - { - printf("glCheckFramebufferStatus complete\n"); - } - - return RENDER_STATUS_OK; -} - -RenderStatus RenderTarget::Update(RenderBackend *renderBackend, struct RegionInfo *regionInfo) -{ - if (NULL == renderBackend || NULL == regionInfo) - { - return RENDER_ERROR; - } - //1. get rwpk info and do blit. - std::vector hasHighTileIds; - std::vector hasLowTileIds; - std::vector>>> regionInfoTransfer; - //1.1 get high and low tile id and regionInfoTransfer. - GetRenderMultiSource(regionInfo, hasHighTileIds, hasLowTileIds, regionInfoTransfer); - - renderBackend->BindFramebuffer(GL_READ_FRAMEBUFFER, m_fboR2THandle); - renderBackend->BindFramebuffer(GL_DRAW_FRAMEBUFFER, m_fboOnScreenHandle); - renderBackend->Clear(GL_COLOR_BUFFER_BIT);//clear buffer. - //1.2 blit the region tile to FBO2. - // glBlitFramebuffer(0, 0, FRAME_WIDTH, FRAME_HEIGHT, 0, 0, FRAME_WIDTH, FRAME_HEIGHT, GL_COLOR_BUFFER_BIT, GL_NEAREST); - //the pair.second in regionInfoTransfer is : {projRegLeft, projRegTop, projRegWidth, projRegHeight, packedRegLeft, packedRegTop, packedRegWidth, packedRegHeight, packedPicWidth, packedPicHeight, tileColumn, tileRow}; - uint32_t highWidth = regionInfoTransfer[0][0].second[2];//high resolution picture width - uint32_t highHeight = regionInfoTransfer[0][0].second[3];//high resolution picture height - uint32_t lowWidth = regionInfoTransfer[1][0].second[2];//low resolution picture width - uint32_t lowHeight = regionInfoTransfer[1][0].second[3];//low resolution picture height - uint16_t highCol = regionInfoTransfer[0][0].second[10];//high resolution tile column - uint16_t highRow = regionInfoTransfer[0][0].second[11];//high resolution tile row - uint16_t lowCol = regionInfoTransfer[1][0].second[10];//low resolution tile column - uint16_t lowRow = regionInfoTransfer[1][0].second[11];//low resolution tile row - float ratioW = (float)highWidth * highCol / lowWidth / lowCol;// the scale ratio for width - float ratioH = (float)highHeight * highRow / lowHeight / lowRow;// the sacle ratio for height - for (uint32_t j = 0; j < regionInfoTransfer[1].size(); j++) // blit order : low first. - { - if (find(hasLowTileIds.begin(), hasLowTileIds.end(), regionInfoTransfer[1][j].first) != hasLowTileIds.end()) - { - uint16_t projRegLeft = regionInfoTransfer[1][j].second[0]; - uint16_t projRegTop = regionInfoTransfer[1][j].second[1]; - uint16_t projRegRight = regionInfoTransfer[1][j].second[2] + projRegLeft; - uint16_t projRegBottom = regionInfoTransfer[1][j].second[3] + projRegTop; - uint16_t packedRegLeft = regionInfoTransfer[1][j].second[4]; - uint16_t packedRegTop = regionInfoTransfer[1][j].second[5]; - uint16_t packedRegRight = regionInfoTransfer[1][j].second[6] + packedRegLeft; - uint16_t packedRegBottom = regionInfoTransfer[1][j].second[7] + packedRegTop; - glBlitFramebuffer(packedRegLeft, packedRegTop, packedRegRight, packedRegBottom, int(projRegLeft * ratioW), int(projRegTop * ratioH), int(projRegRight * ratioW), int(projRegBottom * ratioH), GL_COLOR_BUFFER_BIT, GL_NEAREST); - } - } - for (uint32_t j = 0; j < regionInfoTransfer[0].size(); j++) - { - uint16_t projRegLeft = regionInfoTransfer[0][j].second[0]; - uint16_t projRegTop = regionInfoTransfer[0][j].second[1]; - uint16_t projRegRight = regionInfoTransfer[0][j].second[2] + projRegLeft; - uint16_t projRegBottom = regionInfoTransfer[0][j].second[3] + projRegTop; - uint16_t packedRegLeft = regionInfoTransfer[0][j].second[4]; - uint16_t packedRegTop = regionInfoTransfer[0][j].second[5]; - uint16_t packedRegRight = regionInfoTransfer[0][j].second[6] + packedRegLeft; - uint16_t packedRegBottom = regionInfoTransfer[0][j].second[7] + packedRegTop; - glBlitFramebuffer(packedRegLeft, packedRegTop, packedRegRight, packedRegBottom, projRegLeft, projRegTop, projRegRight, projRegBottom, GL_COLOR_BUFFER_BIT, GL_NEAREST); - } - glBindFramebuffer(GL_FRAMEBUFFER, 0); - return RENDER_STATUS_OK; -} - -std::vector>>> RenderTarget::TransferRegionInfo(struct RegionInfo *regionInfo) -{ - std::vector>>> regionInfoTransfer(regionInfo->sourceNumber, std::vector>>()); - uint16_t highTileRowNumber = regionInfo->sourceInfo[HIGHRESOTYPE].tileRowNumber; - uint16_t highTileColumnNumber = regionInfo->sourceInfo[HIGHRESOTYPE].tileColumnNumber; - uint16_t lowTileRowNumber = regionInfo->sourceInfo[LOWRESOTYPE].tileRowNumber; - uint16_t lowTileColumnNumber = regionInfo->sourceInfo[LOWRESOTYPE].tileColumnNumber; - uint16_t numRegion = regionInfo->regionWisePacking->numRegions; - - //for each tileRegion, set up a relationship between tileId and info - TileType tileType = LOWRESOTYPE; - for (int32_t i = numRegion-1; i >= 0; i--) - { - uint32_t projRegLeft = regionInfo->regionWisePacking->rectRegionPacking[i].projRegLeft; - uint32_t projRegTop = regionInfo->regionWisePacking->rectRegionPacking[i].projRegTop; - uint32_t projRegWidth = regionInfo->regionWisePacking->rectRegionPacking[i].projRegWidth; - uint32_t projRegHeight = regionInfo->regionWisePacking->rectRegionPacking[i].projRegHeight; - uint16_t packedRegLeft = regionInfo->regionWisePacking->rectRegionPacking[i].packedRegLeft; - uint16_t packedRegTop = regionInfo->regionWisePacking->rectRegionPacking[i].packedRegTop; - uint16_t packedRegWidth = regionInfo->regionWisePacking->rectRegionPacking[i].packedRegWidth; - uint16_t packedRegHeight = regionInfo->regionWisePacking->rectRegionPacking[i].packedRegHeight; - uint16_t packedPicWidth = regionInfo->regionWisePacking->packedPicWidth; - uint16_t packedPicHeight = regionInfo->regionWisePacking->packedPicHeight; - uint16_t tileColumn = tileType == HIGHRESOTYPE ? highTileColumnNumber : lowTileColumnNumber; - uint16_t tileRow = tileType == HIGHRESOTYPE ? highTileRowNumber : lowTileRowNumber; - - std::pair coord(projRegLeft / projRegWidth, projRegTop / projRegHeight); - uint32_t tileId = (coord.first + 1) + tileColumn * coord.second; - uint32_t tileInfo[12] = {projRegLeft, projRegTop, projRegWidth, projRegHeight, packedRegLeft, packedRegTop, packedRegWidth, packedRegHeight, packedPicWidth, packedPicHeight, tileColumn, tileRow}; - std::vector tileInformation(tileInfo, tileInfo + 12); - std::pair> tileInfoPair(tileId, tileInformation); - regionInfoTransfer[tileType].push_back(tileInfoPair); - if (i > 0 && projRegLeft == 0 && projRegTop == 0) //when (0,0) occurs and i>0 - { - tileType = HIGHRESOTYPE; - } - } - return regionInfoTransfer; -} - -RenderStatus RenderTarget::GetRenderMultiSource(struct RegionInfo *regionInfo, std::vector &hasHighTileIds, std::vector &hasLowTileIds, std::vector>>> ®ionInfoTransfer) -{ - if (NULL == regionInfo) - { - return RENDER_ERROR; - } - //1.transfer the regionInfo - regionInfoTransfer = TransferRegionInfo(regionInfo); - //2 get the high and low tile ids - for (uint32_t i = 0; i < regionInfoTransfer.size(); i++) - { - for (uint32_t j = 0; j < regionInfoTransfer[i].size(); j++) - { - if (i == HIGHRESOTYPE) - { - hasHighTileIds.push_back(regionInfoTransfer[i][j].first); - } - else - { - hasLowTileIds.push_back(regionInfoTransfer[i][j].first); - } - } - } - //erase the similar element - sort(hasLowTileIds.begin(), hasLowTileIds.end()); - hasLowTileIds.erase(unique(hasLowTileIds.begin(), hasLowTileIds.end()), hasLowTileIds.end()); - - return RENDER_STATUS_OK; -} - -std::vector RenderTarget::GetRegionTileId(struct SphereRegion *sphereRegion, struct SourceInfo *sourceInfo) -{ - std::vector RegionTileId; - if (NULL == sphereRegion || NULL == sourceInfo) - { - return RegionTileId; - } - uint32_t width = sourceInfo->sourceWidth; - uint32_t height = sourceInfo->sourceHeight; - uint32_t tileRowNumber = sourceInfo->tileRowNumber; - uint32_t tileColumnNumber = sourceInfo->tileColumnNumber; - //1. process sphereregion - uint32_t centerX = float(((sphereRegion->centreAzimuth) >> 16) + 180) / 360 * width; - uint32_t centerY = float(((sphereRegion->centreElevation) >> 16) + 90) / 180 * height; - uint32_t marginX = float((sphereRegion->azimuthRange) >> 16) / 360 * width; - uint32_t marginY = float((sphereRegion->elevationRange) >> 16) / 180 * height; - //1.1 transfer to lefttop and rightbottom - uint32_t leftTopX = (centerX - marginX / 2 + width) % width; - uint32_t leftTopY = (centerY - marginY / 2 + height) % height; - uint32_t rightBottomX = (centerX + marginX / 2 + width) % width; - uint32_t rightBottomY = (centerY + marginY / 2 + height) % height; - u_int32_t delta = 1; - //fix - rightBottomX = rightBottomX % (width / tileColumnNumber) == 0 ? (rightBottomX - delta + width) % width : rightBottomX; - rightBottomY = rightBottomY % (height / tileRowNumber) == 0 ? (rightBottomY - delta + height) % height : rightBottomY; - //1.2 transfer to (0,0), (0,1), (1,0) … - uint32_t s1 = leftTopX / (width / tileColumnNumber); - uint32_t s2 = leftTopY / (height / tileRowNumber); - uint32_t e1 = rightBottomX / (width / tileColumnNumber); - uint32_t e2 = rightBottomY / (height / tileRowNumber); - - // need to consider the bundary change fix the problem when region only in one tile using >= - for (uint32_t p = s1; p <= (e1 >= s1 ? e1 : e1 + tileColumnNumber); p++) - { - for (uint32_t q = s2; q <= (e2 >= s2 ? e2 : e2 + tileRowNumber); q++) - { - RegionTileId.push_back(p % tileColumnNumber + (q % tileRowNumber) * tileColumnNumber + 1); //index from 1 - } - } - sort(RegionTileId.begin(), RegionTileId.end()); - return RegionTileId; -} - -RenderStatus RenderTarget::TransferTileIdToRegion(uint32_t tileId, SourceInfo *sourceInfo, SphereRegion *sphereRegion) -{ - if (NULL == sphereRegion || NULL == sourceInfo) - { - return RENDER_ERROR; - } - //1.transfer tileId to coord() - uint32_t tileColumnNumber = sourceInfo->tileColumnNumber; - uint32_t coordX = (tileId - 1) % tileColumnNumber; - uint32_t coordY = (tileId - 1) / tileColumnNumber; - //2.transfer to 0-width and 0-height - sphereRegion->azimuthRange = sourceInfo->sourceWidth / sourceInfo->tileColumnNumber; - sphereRegion->elevationRange = sourceInfo->sourceHeight / sourceInfo->tileRowNumber; - sphereRegion->centreAzimuth = coordX * (sourceInfo->sourceWidth / sourceInfo->tileColumnNumber) + sphereRegion->azimuthRange / 2; - sphereRegion->centreElevation = coordY * (sourceInfo->sourceHeight / sourceInfo->tileRowNumber) + sphereRegion->elevationRange / 2; - //3.transfer to standard - sphereRegion->azimuthRange = uint32_t(float(sphereRegion->azimuthRange) / sourceInfo->sourceWidth * 360) << 16; - sphereRegion->elevationRange = uint32_t(float(sphereRegion->elevationRange) / sourceInfo->sourceHeight * 180) << 16; - sphereRegion->centreAzimuth = int32_t(float(sphereRegion->centreAzimuth) / sourceInfo->sourceWidth * 360 - 180) << 16; - sphereRegion->centreElevation = int32_t(float(sphereRegion->centreElevation) / sourceInfo->sourceHeight * 180 - 90) << 16; - return RENDER_STATUS_OK; -} - -uint32_t RenderTarget::GetFboR2THandle() -{ - return m_fboR2THandle; -} - -uint32_t RenderTarget::GetFboOnScreenHandle() -{ - return m_fboOnScreenHandle; -} - -uint32_t RenderTarget::GetTextureOfR2S() -{ - return m_textureOfR2S; -} - -struct SourceWH RenderTarget::GetTargetWH() -{ - return m_targetWH; -} - -RenderStatus RenderTarget::SetFboOnScreenHandle(uint32_t handle) -{ - m_fboOnScreenHandle = handle; - return RENDER_STATUS_OK; -} - -RenderStatus RenderTarget::SetFboR2THandle(uint32_t handle) -{ - m_fboR2THandle = handle; - return RENDER_STATUS_OK; -} - -RenderStatus RenderTarget::SetTextureOfR2S(uint32_t texture) -{ - m_textureOfR2S = texture; - return RENDER_STATUS_OK; -} - -RenderStatus RenderTarget::SetTargetWH(struct SourceWH targetWH) -{ - m_targetWH = targetWH; - return RENDER_STATUS_OK; -} - -VCD_NS_END diff --git a/src/player/RenderType.h b/src/player/RenderType.h deleted file mode 100644 index 102a7e09..00000000 --- a/src/player/RenderType.h +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file RenderType.h -//! \brief define basic structure and data. -//! - -#ifndef _RENDERTYPE_H_ -#define _RENDERTYPE_H_ - -#include -#include -#include -#include "360SCVPAPI.h" - -typedef bool bool_t; -typedef char char_t; -typedef char utf8_t; - -typedef float float32_t; -typedef double float64_t; - -#ifndef NULL -#define NULL 0 -#endif - -#ifndef RENDER_PI -#define RENDER_PI 3.1415926f -#endif - -extern "C" -{ -#include -#include -#include -#include -#include -} - -namespace PixelFormat -{ -enum Enum -{ - INVALID, - PIX_FMT_RGB24, - PIX_FMT_YUV420P, - AV_PIX_FMT_NV12, - AV_PIX_FMT_NV12_DMA_BUFFER -}; -}; - -namespace MediaSourceType -{ -enum Enum -{ - SOURCE_NONE, - SOURCE_VOD = 1, - SOURCE_LIVE -}; -}; -enum -{ - DASH_SOURCE = 0, - FFMPEG_SOURCE, - WEBRTC_SOURCE -}; -enum -{ - SW_DECODER = 0, - VAAPI_DECODER -}; -enum RenderContextType -{ - DEFAULT_CONTEXT, - GLFW_CONTEXT = 0, - EGL_CONTEXT -}; -enum -{ - READY = 0, - PLAY, - PAUSE -}; -enum TileType -{ - HIGHRESOTYPE = 0, - LOWRESOTYPE, -}; -struct RenderConfig -{ - //from config - uint32_t windowWidth; - uint32_t windowHeight; - const char *url; - uint32_t sourceType; - uint32_t decoderType; - uint32_t contextType; - uint32_t useDMABuffer; - uint32_t viewportHFOV; - uint32_t viewportVFOV; - uint32_t viewportWidth; - uint32_t viewportHeight; - const char *cachePath; - //from media source - int32_t projFormat; - uint32_t renderInterval; -}; - -enum ThreadStatus{ - STATUS_CREATED=0, - STATUS_RUNNING, - STATUS_STOPPED, - STATUS_UNKNOWN, -}; -#ifdef LOW_LATENCY_USAGE -enum ProjectType{ - PT_UNKNOWN = -1, - PT_ERP = 0, - PT_CUBEMAP = 1, - PT_RESERVED, -}; -#endif -struct SphereRegion -{ - uint8_t viewIdc; //corresponding to view_idc[i] when view_idc_presence_flag is equal to 1 - int32_t centreAzimuth; - int32_t centreElevation; - int32_t centreTilt; - uint32_t azimuthRange; - uint32_t elevationRange; - bool interpolate; // can only be 0 here -}; - -struct SourceWH -{ - uint32_t *width; - uint32_t *height; -}; - -struct FrameInfo -{ - uint8_t **mBuffer; - struct RegionInfo *mRegionInfo; -}; - -struct MediaSourceInfo -{ - uint32_t width; - uint32_t height; - int32_t projFormat; // ERP or CUBE - int32_t pixFormat; // RGB or YUV - bool hasAudio; - uint32_t audioChannel; - uint32_t stride; - uint32_t numberOfStreams; // number of video streams. - uint32_t frameRate; - int64_t duration; - uint32_t frameNum; - uint32_t currentFrameNum; - uint32_t sourceNumber; - struct SourceWH *sourceWH; - //more information to do -}; - -struct SourceData -{ - AVFormatContext *fmt_ctx; - int stream_idx; - AVStream *video_stream; - AVCodecContext *codec_ctx; - AVCodec *decoder; - AVPacket *packet; - AVFrame *av_frame; - AVFrame *gl_frame; - struct SwsContext *conv_ctx; -}; - -struct Pose -{ - float yaw; - float pitch; -}; - -struct SourceInfo -{ - uint32_t sourceWidth; - uint32_t sourceHeight; - uint32_t tileRowNumber; - uint32_t tileColumnNumber; -}; - -struct RegionInfo -{ - uint32_t sourceNumber; - RegionWisePacking *regionWisePacking; - struct SourceInfo *sourceInfo; -}; - -struct BufferInfo -{ - uint8_t *buffer[4]; - uint32_t width; - uint32_t height; - uint32_t stride[4]; //rgb width*3 - PixelFormat::Enum pixelFormat; -}; - -struct MultiBufferInfo -{ - uint32_t bufferNumber; - struct BufferInfo *bufferInfo; -}; - -struct RenderInfo -{ - uint32_t width; - uint32_t height; - uint32_t windowWidth; - uint32_t windowHeight; -}; - -enum RenderStatus -{ - RENDER_ERROR = -1, - - RENDER_CREATE_ERROR, - RENDER_STATUS_OK -}; - -enum DecoderStatus -{ - PACKET_ERROR = -1, - - FRAME_ERROR, - DECODER_OK -}; - -#endif diff --git a/src/player/SWRenderSource.cpp b/src/player/SWRenderSource.cpp deleted file mode 100644 index b11d20cc..00000000 --- a/src/player/SWRenderSource.cpp +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file SWRenderSource.cpp -//! \brief Implement class for SWRenderSource. -//! - -#include "SWRenderSource.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "Render2TextureMesh.h" - -VCD_NS_BEGIN - -SWRenderSource::SWRenderSource(RenderBackend *renderBackend) -{ - //1.render to texture : vertex and texCoords assign - m_videoShaderOfR2T.Bind(); - m_meshOfR2T = new Render2TextureMesh(); - m_meshOfR2T->Create(); - uint32_t vertexAttribOfR2T = m_videoShaderOfR2T.SetAttrib("vPosition"); - uint32_t texCoordsAttribOfR2T = m_videoShaderOfR2T.SetAttrib("aTexCoord"); - m_meshOfR2T->Bind(renderBackend, vertexAttribOfR2T, texCoordsAttribOfR2T); -} - -SWRenderSource::~SWRenderSource() -{ - if (m_meshOfR2T) - { - delete m_meshOfR2T; - m_meshOfR2T = NULL; - } -} - -RenderStatus SWRenderSource::Initialize(struct MediaSourceInfo *mediaSourceInfo) -{ - if (NULL == mediaSourceInfo) - { - return RENDER_ERROR; - } - uint32_t number = 0; - struct SourceWH packedWH; - switch (mediaSourceInfo->pixFormat) - { - case PixelFormat::PIX_FMT_RGB24: - number = 1; - packedWH.width = new uint32_t[number]; - packedWH.height = new uint32_t[number]; - packedWH.width[0] = mediaSourceInfo->width; - packedWH.height[0] = mediaSourceInfo->height; - break; - case PixelFormat::PIX_FMT_YUV420P: - number = 3; - packedWH.width = new uint32_t[number]; - packedWH.height = new uint32_t[number]; - packedWH.width[0] = mediaSourceInfo->width; - packedWH.width[1] = packedWH.width[0] / 2; - packedWH.width[2] = packedWH.width[1]; - packedWH.height[0] = mediaSourceInfo->height; - packedWH.height[1] = packedWH.height[0] / 2; - packedWH.height[2] = packedWH.height[1]; - break; - default: - break; - } - SetSourceWH(packedWH); - SetSourceTextureNumber(number); - return RENDER_STATUS_OK; -} - -RenderStatus SWRenderSource::CreateRenderSource(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - if (CreateSourceTex(renderBackend) != RENDER_STATUS_OK || CreateR2TFBO(renderBackend) != RENDER_STATUS_OK) - { - return RENDER_ERROR; - } - return RENDER_STATUS_OK; -} - -RenderStatus SWRenderSource::CreateSourceTex(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - //1. initial r2t three textures. - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = new uint32_t[sourceTextureNumber]; - renderBackend->GenTextures(sourceTextureNumber, sourceTextureHandle); - SetSourceTextureHandle(sourceTextureHandle); - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - struct SourceWH sourceWH = GetSourceWH(); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[i]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_R8, sourceWH.width[i], sourceWH.height[i], 0, GL_RED, GL_UNSIGNED_BYTE, NULL); - } - return RENDER_STATUS_OK; -} - -RenderStatus SWRenderSource::CreateR2TFBO(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - //2.initial FBOs - uint32_t textureOfR2T; - renderBackend->GenTextures(1, &textureOfR2T); - renderBackend->BindTexture(GL_TEXTURE_2D, textureOfR2T); - SetTextureOfR2T(textureOfR2T); - struct SourceWH sourceWH = GetSourceWH(); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[0]); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - renderBackend->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - - renderBackend->TexImage2D(GL_TEXTURE_2D, 0, GL_RGB, sourceWH.width[0], sourceWH.height[0], 0, GL_RGB, GL_UNSIGNED_BYTE, NULL); - - uint32_t fboR2THandle; - renderBackend->GenFramebuffers(1, &fboR2THandle); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, fboR2THandle); - SetFboR2THandle(fboR2THandle); - renderBackend->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureOfR2T, 0); - - if (renderBackend->CheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) - { - printf("glCheckFramebufferStatus not complete\n"); - return RENDER_ERROR; - } - else - { - printf("glCheckFramebufferStatus complete\n"); - } - return RENDER_STATUS_OK; -} - -RenderStatus SWRenderSource::UpdateR2T(RenderBackend *renderBackend, void **buffer) -{ - if (NULL == renderBackend || NULL == buffer) - { - return RENDER_ERROR; - } - //1. update source texture - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = GetSourceTextureHandle(); - struct SourceWH sourceWH = GetSourceWH(); - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - renderBackend->PixelStorei(GL_UNPACK_ROW_LENGTH, sourceWH.width[i]); - if (GetSourceTextureNumber() == 1) - renderBackend->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, sourceWH.width[i], sourceWH.height[i], GL_RGB, GL_UNSIGNED_BYTE, buffer[i]); //use rgb data - else - renderBackend->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, sourceWH.width[i], sourceWH.height[i], GL_RED, GL_UNSIGNED_BYTE, buffer[i]); //use yuv data - } - //2. bind source texture and r2tFBO - uint32_t fboR2THandle = GetFboR2THandle(); - renderBackend->BindFramebuffer(GL_FRAMEBUFFER, fboR2THandle); - - m_videoShaderOfR2T.Bind(); - renderBackend->BindVertexArray(renderBackend->GetR2TVAOHandle()); // check - for (uint32_t i = 0; i < sourceTextureNumber; i++) - { - if (i == 0) - renderBackend->ActiveTexture(GL_TEXTURE0); - else if (i == 1) - renderBackend->ActiveTexture(GL_TEXTURE1); - else if (i == 2) - renderBackend->ActiveTexture(GL_TEXTURE2); - else if (i == 3) - renderBackend->ActiveTexture(GL_TEXTURE3); - renderBackend->BindTexture(GL_TEXTURE_2D, sourceTextureHandle[i]); - } - renderBackend->Viewport(0, 0, sourceWH.width[0], sourceWH.height[0]); - renderBackend->DrawArrays(GL_TRIANGLES, 0, 6); - return RENDER_STATUS_OK; -} - -RenderStatus SWRenderSource::DestroyRenderSource(RenderBackend *renderBackend) -{ - if (NULL == renderBackend) - { - return RENDER_ERROR; - } - uint32_t textureOfR2T = GetTextureOfR2T(); - if (textureOfR2T) - { - renderBackend->DeleteTextures(1, &textureOfR2T); - } - uint32_t sourceTextureNumber = GetSourceTextureNumber(); - uint32_t *sourceTextureHandle = GetSourceTextureHandle(); - if (sourceTextureHandle) - { - renderBackend->DeleteTextures(sourceTextureNumber, sourceTextureHandle); - } - uint32_t fboR2THandle = GetFboR2THandle(); - if (fboR2THandle) - { - renderBackend->DeleteFramebuffers(1, &fboR2THandle); - } - return RENDER_STATUS_OK; -} - -VCD_NS_END diff --git a/src/player/WebRTCMediaSource.cpp b/src/player/WebRTCMediaSource.cpp deleted file mode 100644 index 4adbb710..00000000 --- a/src/player/WebRTCMediaSource.cpp +++ /dev/null @@ -1,761 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file WebRTCMediaSource.cpp -//! \brief Implement class for WebRTCMediaSource. -//! -#ifdef _ENABLE_WEBRTC_SOURCE_ - -#include "Common.h" -#include "WebRTCMediaSource.h" - -#include -#include -#include -#include -#include - -#include "owt/base/commontypes.h" -#include "owt/base/globalconfiguration.h" -#include "owt/base/network.h" -#include "owt/base/options.h" -#include "owt/conference/conferenceclient.h" -#include "owt/conference/remotemixedstream.h" -#include "http.h" -#include "../utils/tinyxml2.h" - -#define SOURCENUMBER 2 - -VCD_NS_BEGIN -using namespace tinyxml2; - -static int isValidStartCode(uint8_t *data, int length) -{ - if (length < 3) - return -1; - - if (data[0] == 0 && data[1] == 0 && data[2] == 1) - return 3; - - if (length >= 4 && data[0] == 0 && data[1] == 0 && data[2] == 0 && data[3] == 1) - return 4; - - return 0; -} - -static int findFirstNALU(uint8_t *data, int length, int *offset, int *size) -{ - int i = 0; - int ret = -1; - int start_code_len = 0; - int nalu_offset = 0; - int nalu_type = -1; - int nalu_size = 0; - - while (true) { - ret = isValidStartCode(data + i, length - i); - if (ret < 0) - return -1; - else if (ret > 0) { - start_code_len = ret; - nalu_offset = i; - break; - } - - ++i; - } - - i += start_code_len; - while (true) { - ret = isValidStartCode(data + i, length - i); - if (ret < 0) { - nalu_size = length - nalu_offset; - break; - } else if (ret > 0) { - nalu_size = i - nalu_offset ; - break; - } - - ++i; - } - - *offset = nalu_offset; - *size = nalu_size; - return (data[nalu_offset + start_code_len] & 0x7e) >> 1; -} - -static int filterNALs(std::shared_ptr bitstream_buf, const std::vector &remove_types, std::shared_ptr sei_buf) -{ - int remove_nals_size = 0; - - uint8_t *buffer_start = bitstream_buf->data(); - int buffer_length = bitstream_buf->size(); - int nalu_offset = 0; - int nalu_size = 0; - int nalu_type; - - while (buffer_length > 0) { - nalu_type = findFirstNALU(buffer_start, buffer_length, &nalu_offset, &nalu_size); - if (nalu_type < 0) - break; - - if (std::find(remove_types.begin(), remove_types.end(), nalu_type) != remove_types.end()) { - //copy - sei_buf->insert(buffer_start + nalu_offset, nalu_size); - - //next - memmove(buffer_start, buffer_start + nalu_offset + nalu_size, buffer_length - nalu_offset - nalu_size); - buffer_length -= nalu_offset + nalu_size; - - remove_nals_size += nalu_offset + nalu_size; - continue; - } - - buffer_start += (nalu_offset + nalu_size); - buffer_length -= (nalu_offset + nalu_size); - } - - bitstream_buf->resize(bitstream_buf->size() - remove_nals_size); - return bitstream_buf->size(); -} - -static void filter_RWPK_SEI(std::shared_ptr bitstream_buf, std::shared_ptr sei_buf) -{ - std::vector sei_types; - sei_types.push_back(38); - sei_types.push_back(39); - sei_types.push_back(30); - - filterNALs(bitstream_buf, sei_types, sei_buf); -} - -WebRTCVideoFrame::WebRTCVideoFrame(AVFrame *frame, uint8_t *extra_data, int32_t extra_data_length) - : m_rwpk_sei(NULL) - , m_rwpk_sei_length(0) - , m_frame(NULL) { - - if (frame == NULL || extra_data == NULL || extra_data <= 0) - return; - - m_frame = av_frame_clone(frame); - -#if 0 - LOG("linesize: %d-%d-%d-%d" - , m_decFrame->linesize[0] - , m_decFrame->linesize[1] - , m_decFrame->linesize[2] - , m_decFrame->linesize[3]); -#endif - - // if pitch, we need recopy this data - m_buffer[0] = m_frame->data[0]; - m_buffer[1] = m_frame->data[1]; - m_buffer[2] = m_frame->data[2]; - - m_rwpk_sei = (uint8_t *)malloc(extra_data_length); - memcpy(m_rwpk_sei, extra_data, extra_data_length); - m_rwpk_sei_length = extra_data_length; - - return; -} - -WebRTCVideoFrame::~WebRTCVideoFrame() { - if (m_frame) - av_frame_free(&m_frame); - - if (m_rwpk_sei) - free(m_rwpk_sei); -} - -WebRTCMediaSource *WebRTCMediaSource::s_CurObj; -uint32_t WebRTCMediaSource::fullwidth; -uint32_t WebRTCMediaSource::fullheight; - -int WebRTCMediaSource::getParam(int *flag, int* projType) -{ - XMLDocument config; - config.LoadFile("config.xml"); - XMLElement *info = config.RootElement(); - if(!strcmp(info->FirstChildElement("resolution")->GetText(),"8k")) - *flag = 1; - else if(!strcmp(info->FirstChildElement("resolution")->GetText(),"4k")) - *flag = 2; - else - { - LOG(ERROR) << "wrong resolution,must be 4k or 8k !" << std::endl; - exit(-1); - } - const char* server_url = info->FirstChildElement("server_url")->GetText(); - if(!server_url) - { - LOG(ERROR) << "server_url can not null !" << std::endl; - exit(-1); - } - this->m_serverAddress = std::string(server_url); - - int rate = atoi(info->FirstChildElement("frameRate")->GetText()); - int num = atoi(info->FirstChildElement("frameNum")->GetText()); - if(!rate || !num) - { - LOG(ERROR) << "wrong frameRate or frameNum !" << std::endl; - exit(-1); - } - this->frameRate = rate; - this->frameNum = num; - - if(!strcmp(info->FirstChildElement("ProjectionType")->GetText(),"ERP")) - *projType = 1; - else if(!strcmp(info->FirstChildElement("ProjectionType")->GetText(),"CUBMAP")) - *projType = 2; - else - { - LOG(ERROR) << "wrong ProjectionType,must be ERP or CUBMAP !" << std::endl; - exit(-1); - } - - LOG(INFO) << "server_url " << info->FirstChildElement("server_url")->GetText() - << ", resolution " << info->FirstChildElement("resolution")->GetText() - << ", frameRate " << info->FirstChildElement("frameRate")->GetText() - << ", frameNum " << info->FirstChildElement("frameNum")->GetText() - << ", ProjectionType " << info->FirstChildElement("ProjectionType")->GetText() - << std::endl; - - return 0; -} - -void WebRTCMediaSource::subscribe_on_success_callback(std::shared_ptr sc) { - LOG(INFO) << "subscribe_on_success_callback" << std::endl; - - s_CurObj->m_subId = sc->Id(); - - std::unique_lock ulock(s_CurObj->m_mutex); - s_CurObj->m_ready = true; - s_CurObj->m_cond.notify_all(); -} - -void WebRTCMediaSource::subscribe_on_failure_callback(std::unique_ptr err) { - LOG(ERROR) << "subscribe_on_failure_callback: " << err->Message() << std::endl; - exit(1); -} - -void WebRTCMediaSource::join_on_success_callback(std::shared_ptr info) -{ - LOG(INFO) << "join_on_success_callback" << std::endl; - - std::vector> remoteStreams = info->RemoteStreams(); - for (auto &remoteStream : remoteStreams) { - if (remoteStream->Source().video == owt::base::VideoSourceInfo::kMixed) { - s_CurObj->m_mixed_stream = static_pointer_cast(remoteStream); - break; - } - } - - if (!s_CurObj->m_mixed_stream) { - LOG(ERROR) << "No mixed stream!" << std::endl; - exit(1); - } - - owt::base::VideoCodecParameters codecParams; - codecParams.name = owt::base::VideoCodec::kH265; - - owt::conference::SubscribeOptions options; - options.video.codecs.push_back(codecParams); - - options.video.resolution.width = fullwidth; - options.video.resolution.height = fullheight; - - LOG(INFO) << "Subscribe: " << options.video.resolution.width << "x" << options.video.resolution.height << std::endl; - s_CurObj->m_roomId = info->Id(); - s_CurObj->m_room->Subscribe(s_CurObj->m_mixed_stream, - options, - subscribe_on_success_callback, - subscribe_on_failure_callback); -} - -void WebRTCMediaSource::join_on_failure_callback(std::unique_ptr err) -{ - LOG(ERROR) << "join_on_failure_callback: " << err->Message() << std::endl; - exit(1); -} - -int32_t WebRTCMediaSource::RenderFrame(AVFrame *avFrame, uint8_t *extra_data, int32_t extra_data_length) { - std::shared_ptr frame = make_shared(avFrame, extra_data, extra_data_length); - if (!frame->isValid()) - return -1; - - { - std::unique_lock ulock(m_mutex); - - while (m_webrtc_render_frame_queue.size() >= 2) { - LOG(INFO) << "drop frame, queue size: " << m_webrtc_render_frame_queue.size() << std::endl; - //m_webrtc_render_frame_queue.pop_back(); - m_webrtc_render_frame_queue.clear(); - } - - m_webrtc_render_frame_queue.push_front(frame); - - if (m_webrtc_render_frame_queue.size() == 1) - m_cond.notify_all(); - } - - return 0; -} - -WebRTCMediaSource::WebRTCMediaSource() - : m_yaw(0) - , m_pitch(0) - , m_ready(false) -{ - LOG(INFO) << __FUNCTION__ << std::endl; - - m_parserRWPKParam.usedType = E_PARSER_FOR_CLIENT; - m_parserRWPKHandle = I360SCVP_Init(&m_parserRWPKParam); - - m_RWPK.rectRegionPacking = NULL; - s_CurObj = this; -} - -WebRTCMediaSource::~WebRTCMediaSource() -{ - LOG(INFO) << __FUNCTION__ << std::endl; - - if(m_RWPK.rectRegionPacking) - { - delete[] m_RWPK.rectRegionPacking; - m_RWPK.rectRegionPacking = NULL; - } - if(m_parserRWPKHandle) - I360SCVP_unInit(m_parserRWPKHandle); - m_parserRWPKHandle = NULL; -} - -RenderStatus WebRTCMediaSource::GetFrame(uint8_t **buffer, struct RegionInfo *regionInfo) -{ - std::shared_ptr frame; - - { - std::unique_lock ulock(m_mutex); - while(m_webrtc_render_frame_queue.size() == 0) - m_cond.wait(ulock); - - frame = m_webrtc_render_frame_queue.back(); - m_webrtc_render_frame_queue.pop_back(); - - m_free_queue.push_front(frame); - } - - //dosomething - buffer[0] = frame->m_buffer[0]; - buffer[1] = frame->m_buffer[1]; - buffer[2] = frame->m_buffer[2]; - - //1. get rwpk info - I360SCVP_ParseRWPK(m_parserRWPKHandle, &m_RWPK, frame->m_rwpk_sei, frame->m_rwpk_sei_length); - regionInfo->regionWisePacking = &m_RWPK; - - //2. set regionInfo - SetRegionInfo(regionInfo); - - return RENDER_STATUS_OK; -} - -RenderStatus WebRTCMediaSource::SetRegionInfo(struct RegionInfo *regionInfo) -{ - if (NULL == regionInfo) - { - return RENDER_ERROR; - } - regionInfo->sourceNumber = SOURCENUMBER; - regionInfo->sourceInfo = (struct SourceInfo *)malloc(sizeof(struct SourceInfo) * regionInfo->sourceNumber); - if (NULL == regionInfo->sourceInfo) - { - return RENDER_ERROR; - } - - regionInfo->sourceInfo[0].sourceWidth = regionInfo->regionWisePacking->projPicWidth; - regionInfo->sourceInfo[0].sourceHeight = regionInfo->regionWisePacking->projPicHeight; - regionInfo->sourceInfo[0].tileColumnNumber = regionInfo->sourceInfo[0].sourceWidth / regionInfo->regionWisePacking->rectRegionPacking[0].projRegWidth; - regionInfo->sourceInfo[0].tileRowNumber = regionInfo->sourceInfo[0].sourceHeight / regionInfo->regionWisePacking->rectRegionPacking[0].projRegHeight; - //low reso tile hard code - regionInfo->sourceInfo[1].sourceWidth = this->lowwidth;//LOWWIDTH; - regionInfo->sourceInfo[1].sourceHeight = this->lowheight;//LOWHEIGHT; - regionInfo->sourceInfo[1].tileColumnNumber = regionInfo->sourceInfo[1].sourceWidth / regionInfo->regionWisePacking->rectRegionPacking[regionInfo->regionWisePacking->numRegions - 1].projRegWidth; - regionInfo->sourceInfo[1].tileRowNumber = regionInfo->sourceInfo[1].sourceHeight / regionInfo->regionWisePacking->rectRegionPacking[regionInfo->regionWisePacking->numRegions - 1].projRegHeight; - - if(regionInfo->sourceInfo[0].tileColumnNumber ==0) - LOG(INFO) << "tileColumnNumber" << regionInfo->sourceInfo[0].tileColumnNumber << std::endl; - - return RENDER_STATUS_OK; -} - -RenderStatus WebRTCMediaSource::Initialize(struct RenderConfig renderConfig) -{ - int flag, projType; - getParam(&flag, &projType); - if(flag == 1){ - fullwidth = 7680; - fullheight = 3840; - this->lowwidth = 512; - this->lowheight = 1280; - this->packedwidth = 2816; - this->packedheight = 2560; - }else if(flag == 2){ - fullwidth = 3840; - fullheight = 2048; - this->lowwidth = 1280; - this->lowheight = 768; - this->packedwidth = 2304; - this->packedheight = 1280; - } - if(projType == 1){ - this->projType = PT_ERP; - }else if(projType == 2){ - this->projType = PT_CUBEMAP; - } - - owt::base::GlobalConfiguration::SetEncodedVideoFrameEnabled(true); - unique_ptr decoder(new WebRTCFFmpegVideoDecoder(this)); - owt::base::GlobalConfiguration::SetCustomizedVideoDecoderEnabled(std::move(decoder)); - - owt::conference::ConferenceClientConfiguration configuration; - - owt::base::IceServer ice; - ice.urls.push_back("stun:61.152.239.56"); - ice.username = ""; - ice.password = ""; - std::vector ice_servers; - ice_servers.push_back(ice); - - configuration.ice_servers = ice_servers; - - string roomId = ""; - string token = CHttp::getToken(m_serverAddress, roomId); - - if (token == "") { - LOG(ERROR) << "invalid token!" << std::endl; - return RENDER_ERROR; - } - m_room = owt::conference::ConferenceClient::Create(configuration); - - m_room->Join(token, - join_on_success_callback, - join_on_failure_callback); - - - { - std::unique_lock ulock(m_mutex); - while(m_webrtc_render_frame_queue.size() == 0) - m_cond.wait(ulock); - } - - memset(&m_mediaSourceInfo, 0, sizeof(m_mediaSourceInfo)); - - m_mediaSourceInfo.width = this->packedwidth; - m_mediaSourceInfo.height = this->packedheight; - m_mediaSourceInfo.stride = m_mediaSourceInfo.width; - m_mediaSourceInfo.projFormat = this->projType; - m_mediaSourceInfo.pixFormat = PixelFormat::PIX_FMT_YUV420P; - m_mediaSourceInfo.hasAudio = false; - m_mediaSourceInfo.audioChannel = 0; - m_mediaSourceInfo.numberOfStreams = 1; - m_mediaSourceInfo.frameRate = this->frameRate; - m_mediaSourceInfo.frameNum = this->frameNum; - m_mediaSourceInfo.currentFrameNum = 0; - m_mediaSourceInfo.sourceWH = new SourceWH; - m_mediaSourceInfo.sourceWH->width = new uint32_t[SOURCENUMBER]; - m_mediaSourceInfo.sourceWH->width[0] = fullwidth; - m_mediaSourceInfo.sourceWH->width[1] = this->lowwidth; - m_mediaSourceInfo.sourceWH->height = new uint32_t[SOURCENUMBER]; - m_mediaSourceInfo.sourceWH->height[0] = fullheight; - m_mediaSourceInfo.sourceWH->height[1] = this->lowheight; - isAllValid = true; - m_RWPK.rectRegionPacking = new RectangularRegionWisePacking[DEFAULT_REGION_NUM]; - if(!m_RWPK.rectRegionPacking){ - LOG(ERROR) << "Can not alloc mem!" << std::endl; - return RENDER_ERROR; - } - - LOG(INFO) << "Initialized!" << std::endl; - return RENDER_STATUS_OK; -} - -RenderStatus WebRTCMediaSource::SetMediaSourceInfo(void *mediaInfo) -{ - LOG(INFO) << __FUNCTION__ << std::endl; - return RENDER_STATUS_OK; -} - -struct MediaSourceInfo WebRTCMediaSource::GetMediaSourceInfo() -{ - LOG(INFO) << __FUNCTION__ << std::endl; - return m_mediaSourceInfo; -} - -void *WebRTCMediaSource::GetSourceMetaData() -{ - LOG(INFO) << __FUNCTION__ << std::endl; - return NULL; -} - -bool WebRTCMediaSource::IsEOS() -{ - return false; -} - -RenderStatus WebRTCMediaSource::ChangeViewport(float yaw, float pitch) -{ - if ((int)yaw == m_yaw && (int)pitch == m_pitch) - return RENDER_STATUS_OK; - - LOG(INFO) << "yaw: " << yaw << ", pitch: " << pitch << std::endl; - - m_yaw = yaw; - m_pitch = pitch; - - int yawValue = m_yaw + 180; - int pitchValue = m_pitch; - int value = (pitchValue << 16) | (yawValue & 0xffff); - - std::string path = "/media/video/fov"; - std::string url = m_serverAddress + "/rooms/" + m_roomId + "/recordings/" + m_subId; - std::string content = "[{\"op\":\"replace\",\"path\":\"" + path + "\",\"value\":" + to_string(value) + "}]"; - - std::string response = CHttp::http_patch(url.c_str(), content); - - return RENDER_STATUS_OK; -} - -void WebRTCMediaSource::DeleteBuffer(uint8_t **buffer) -{ - std::unique_lock ulock(m_mutex); - m_free_queue.pop_back(); - - return; -} - -void WebRTCMediaSource::ClearRWPK(RegionWisePacking *rwpk) -{ - //LOG(INFO) << __FUNCTION__ << std::endl; - - return; -} - -SimpleBuffer::SimpleBuffer() - : m_data(NULL) - , m_size(0) - , m_max_size(0) -{ -} - -SimpleBuffer::~SimpleBuffer() -{ - if (m_data) - free(m_data); -} - -void SimpleBuffer::insert(const uint8_t *data, int size) -{ - if (!m_data) { - m_max_size = 1024 * 4; - m_data = (uint8_t *)malloc(m_max_size); - } - - if (size > m_max_size - m_size) { - int new_max_size = m_max_size; - while (size > new_max_size - m_size) { - new_max_size += 1024; - } - m_data = (uint8_t *)realloc(m_data, new_max_size); - m_max_size = new_max_size; - } - - memcpy(m_data + m_size, data, size); - m_size += size; -} - -WebRTCFFmpegVideoDecoder::WebRTCFFmpegVideoDecoder(WebRTCVideoRenderer *renderer) - : m_decCtx(NULL) - , m_decFrame(NULL) - , m_needKeyFrame(true) - , m_renderer(renderer) -{ - LOG(INFO) << "avcodec version: " - << ((avcodec_version() >> 16) & 0xff) << "." - << ((avcodec_version() >> 8) & 0xff) << "." - << ((avcodec_version()) & 0xff) - << std::endl; -} - -owt::base::VideoDecoderInterface* WebRTCFFmpegVideoDecoder::Copy() -{ - WebRTCFFmpegVideoDecoder* decoder = new WebRTCFFmpegVideoDecoder(m_renderer); - return decoder; -} - -WebRTCFFmpegVideoDecoder::~WebRTCFFmpegVideoDecoder() -{ - LOG(INFO) << __FUNCTION__ << std::endl; -} - -bool WebRTCFFmpegVideoDecoder::InitDecodeContext(owt::base::VideoCodec video_codec) -{ - LOG(INFO) << __FUNCTION__ << std::endl; - - if (!createDecoder(video_codec)) - return false; - - m_bitstream_buf = std::make_shared(); - return true; -} - -bool WebRTCFFmpegVideoDecoder::Release() -{ - LOG(INFO) << __FUNCTION__ << std::endl; - return true; -} - -bool WebRTCFFmpegVideoDecoder::OnEncodedFrame(unique_ptr frame) -{ - int ret; - - if (m_needKeyFrame) { - if (!frame->is_key_frame) - return false; - - m_needKeyFrame = false; - } - - m_bitstream_buf->resize(0); - m_bitstream_buf->insert(frame->buffer, frame->length); - - std::shared_ptr sei_buf = std::make_shared(); - filter_RWPK_SEI(m_bitstream_buf, sei_buf); - if (sei_buf->size() <= 0) { - LOG(ERROR) << "No valid rwpk sei in bitstream!" << std::endl; - return true; - } - - av_init_packet(&m_packet); - m_packet.data = const_cast(m_bitstream_buf->data()); - m_packet.size = m_bitstream_buf->size(); - m_packet.dts = frame->time_stamp; - m_packet.pts = frame->time_stamp; - - ret = avcodec_send_packet(m_decCtx, &m_packet); - if (ret < 0) { - LOG(ERROR) << "Error while send packet" << std::endl; - return false; - } - - m_sei_queue.push_back(sei_buf); - - while(true) { - ret = avcodec_receive_frame(m_decCtx, m_decFrame); - if (ret == AVERROR(EAGAIN)) { - return true; - }else if (ret < 0) { - LOG(ERROR) << "Error while receive frame" << std::endl; - return false; - } - - std::shared_ptr sei_buf_out; - if (m_sei_queue.empty()) { - LOG(ERROR) << "Empty rwpk sei queue!" << std::endl; - continue; - } - - sei_buf = m_sei_queue.front(); - m_sei_queue.pop_front(); - - if (m_renderer) - m_renderer->RenderFrame(m_decFrame, sei_buf->data(), sei_buf->size()); - } - - return true; -} - -bool WebRTCFFmpegVideoDecoder::createDecoder(const owt::base::VideoCodec video_codec) -{ - int ret = 0; - AVCodecID codec_id = AV_CODEC_ID_NONE; - AVCodec* dec = NULL; - - switch (video_codec) { - case owt::base::VideoCodec::kH265: - codec_id = AV_CODEC_ID_H265; - LOG(INFO) << "video decoder HEVC" << std::endl; - break; - - default: - LOG(ERROR) << "Only support HEVC:" << (int32_t)video_codec << std::endl; - return false; - } - - dec = avcodec_find_decoder(codec_id); - if (!dec) { - LOG(ERROR) << "Could not find ffmpeg decoder: " << avcodec_get_name(codec_id) << std::endl; - return false; - } - - m_decCtx = avcodec_alloc_context3(dec); - if (!m_decCtx ) { - LOG(ERROR) << "Could not alloc ffmpeg decoder context" << std::endl; - return false; - } - - m_decCtx->active_thread_type = FF_THREAD_FRAME; - m_decCtx->thread_type = FF_THREAD_FRAME; - m_decCtx->thread_count = 16; - ret = avcodec_open2(m_decCtx, dec , NULL); - if (ret < 0) { - LOG(ERROR) << "Could not open ffmpeg decoder context" << std::endl; - return false; - } - - m_decFrame = av_frame_alloc(); - if (!m_decFrame) { - LOG(ERROR) << "Could not allocate dec frame" << std::endl; - return false; - } - - memset(&m_packet, 0, sizeof(m_packet)); - - LOG(INFO) << "Create decoder successfully" << std::endl; - return true; -} - -char *WebRTCFFmpegVideoDecoder::ff_err2str(int errRet) -{ - av_strerror(errRet, (char*)(&m_errbuff), 500); - return m_errbuff; -} - -VCD_NS_END - -#endif /* _ENABLE_WEBRTC_SOURCE_ */ diff --git a/src/player/WebRTCMediaSource.h b/src/player/WebRTCMediaSource.h deleted file mode 100644 index d8694911..00000000 --- a/src/player/WebRTCMediaSource.h +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Copyright (c) 2019, Intel Corporation - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - - * - */ - -//! -//! \file WebRTCMediaSource.h -//! \brief Defines class for WebRTCMediaSource. -//! -#ifdef _ENABLE_WEBRTC_SOURCE_ - -#ifndef _WebRTCMediaSource_H_ -#define _WebRTCMediaSource_H_ - -#include "MediaSource.h" - -#include -#include -#include - -#include "owt/base/exception.h" -#include "owt/base/videorendererinterface.h" -#include "owt/base/videodecoderinterface.h" -#include "owt/conference/conferenceclient.h" - -#include "360SCVPAPI.h" - -extern "C" { -#include -} - -VCD_NS_BEGIN - - -class WebRTCVideoFrame -{ -public: - WebRTCVideoFrame(AVFrame *frame, uint8_t *extra_data, int32_t extra_data_length); - - virtual ~WebRTCVideoFrame(); - - bool isValid() {return m_frame != NULL;}; - - uint8_t *m_buffer[3]; - uint8_t *m_rwpk_sei; - size_t m_rwpk_sei_length; - -private: - AVFrame *m_frame; -}; - -class WebRTCVideoRenderer -{ -public: - virtual ~WebRTCVideoRenderer() {} - - virtual int32_t RenderFrame(AVFrame *avFrame, uint8_t *extra_data, int32_t extra_data_length) = 0; -}; - -class WebRTCMediaSource : public MediaSource, public WebRTCVideoRenderer -{ - static WebRTCMediaSource *s_CurObj; - - static void join_on_success_callback(std::shared_ptr info); - static void join_on_failure_callback(std::unique_ptr err); - - static void subscribe_on_success_callback(std::shared_ptr subscription); - static void subscribe_on_failure_callback(std::unique_ptr err); - -public: - WebRTCMediaSource(); - virtual ~WebRTCMediaSource(); - //! \brief Get a frame from the Media Source - //! - //! [out] uint8_t ** - //! the frame buffer - //! [out] struct RegionInfo * - //! regionInfo - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus GetFrame(uint8_t **buffer, struct RegionInfo *regionInfo); - //! \brief Initial in DashMediaInfo - //! - //! \param [in] const char * - //! media url - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus Initialize(struct RenderConfig renderConfig); - //! \brief Set Media Source Info - //! - //! \param [in] void * - //! mediaInfo - //! - //! \return void * - //! - virtual RenderStatus SetMediaSourceInfo(void *mediaInfo); - //! \brief Get Media Source Info - //! - //! \return struct MediaSourceInfo - //! - virtual struct MediaSourceInfo GetMediaSourceInfo(); - //! \brief Get SourceMetaData - //! - //! \return void* - //! - virtual void* GetSourceMetaData(); - //! \brief Check is player ends - //! - //! \return bool - //! - virtual bool IsEOS(); - //! \brief set yaw and pitch to change Viewport - //! - //! \param [in] float - //! yaw angle - //! [in] pitch - //! pitch angle - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus ChangeViewport(float yaw, float pitch); - //! \brief set region information - //! - //! \param [in] struct RegionInfo* - //! regionInfo - //! - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual RenderStatus SetRegionInfo(struct RegionInfo* regionInfo); - //! \brief delete buffer data - //! - //! \param [in] uint8_t ** - //! buffer - //! - //! \brief delete Region Wise Packing data - //! - //! \param [in] RegionWisePacking * - //! rwpk - //! - virtual void ClearRWPK(RegionWisePacking *rwpk); - //! \return RenderStatus - //! RENDER_STATUS_OK if success, else fail reason - //! - virtual void DeleteBuffer(uint8_t **buffer); - - int getParam(int *flag, int* projType); - - int32_t RenderFrame(AVFrame *frame, uint8_t *extra_data, int32_t extra_data_length) override; - -private: - std::string m_serverAddress; - std::shared_ptr m_room; - std::string m_roomId; - std::shared_ptr m_mixed_stream; - std::string m_subId; - int m_yaw; - int m_pitch; - std::mutex m_mutex; - std::condition_variable m_cond; - - std::deque> m_webrtc_render_frame_queue; - std::deque> m_free_queue; - - param_360SCVP m_parserRWPKParam; - void* m_parserRWPKHandle; - RegionWisePacking m_RWPK; - - bool m_ready; - static uint32_t fullwidth,fullheight; - uint32_t lowwidth,lowheight,packedwidth,packedheight,frameRate,frameNum; - ProjectType projType; -}; - -class SimpleBuffer { -public: - SimpleBuffer(); - virtual ~SimpleBuffer(); - - void insert(const uint8_t *data, int size); - void resize(int new_size) {m_size = new_size <= m_max_size ? new_size : 0;} - - uint8_t *data() {return m_data;} - int size() {return m_size;} - -private: - uint8_t *m_data; - int m_size; - int m_max_size; -}; - -class WebRTCFFmpegVideoDecoder : public owt::base::VideoDecoderInterface { -public: - WebRTCFFmpegVideoDecoder(WebRTCVideoRenderer *renderer); - ~WebRTCFFmpegVideoDecoder(); - - bool InitDecodeContext(owt::base::VideoCodec video_codec) override; - owt::base::VideoDecoderInterface* Copy() override; - bool Release() override; - - bool OnEncodedFrame(std::unique_ptr frame) override; - -protected: - bool createDecoder(const owt::base::VideoCodec video_codec); - -private: - AVCodecContext *m_decCtx; - AVFrame *m_decFrame; - AVPacket m_packet; - - bool m_needKeyFrame; - - std::shared_ptr m_bitstream_buf; - std::deque> m_sei_queue; - - WebRTCVideoRenderer *m_renderer; - - char m_errbuff[500]; - char *ff_err2str(int errRet); -}; - -VCD_NS_END; -#endif /* _WebRTCMediaSource_H_ */ - -#endif /* _ENABLE_WEBRTC_SOURCE_ */ diff --git a/src/player/app/CMakeLists.txt b/src/player/app/CMakeLists.txt new file mode 100644 index 00000000..bcdabb02 --- /dev/null +++ b/src/player/app/CMakeLists.txt @@ -0,0 +1,59 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) + +OPTION(LINUX_OS + "Use linux os" + ON +) + +OPTION(ANDROID_OS + "Use android os" + OFF +) + +PROJECT(app) + +IF(LINUX_OS) + ADD_DEFINITIONS("-g -c -fPIC -lglog -std=c++11 -fpermissive") +ENDIF() + +IF(LINUX_OS) + ADD_DEFINITIONS("-D_LINUX_OS_") +ENDIF() + +IF(ANDROID_OS) + ADD_DEFINITIONS("-D_ANDROID_OS_") +ENDIF() + +IF(LINUX_OS) + INCLUDE_DIRECTORIES(/usr/local/include ../../utils) + LINK_DIRECTORIES(/usr/local/lib) +ENDIF() + +AUX_SOURCE_DIRECTORY(. DIR_SRC) +AUX_SOURCE_DIRECTORY(android ANDROID_SRC) +AUX_SOURCE_DIRECTORY(linux LINUX_SRC) +IF(LINUX_OS) + SET(DIR_SRC + ${DIR_SRC} + ${LINUX_SRC} + ) + IF(DEFINED WEBRTC_LINUX_SDK) + INCLUDE_DIRECTORIES("${WEBRTC_LINUX_SDK}/include") + LINK_DIRECTORIES("${WEBRTC_LINUX_SDK}/lib") + ENDIF() +ENDIF() + +IF(ANDROID_OS) + SET(DIR_SRC + ${DIR_SRC} + ${ANDROID_SRC} + ) +ENDIF() + +ADD_EXECUTABLE(render ${DIR_SRC}) + +IF(LINUX_OS) + SET(LINK_LIB ${LINK_LIB} MediaPlayer glog glfw GL GLU safestring_shared) +ENDIF() + +TARGET_LINK_LIBRARIES(render ${LINK_LIB}) diff --git a/src/player/app/android/.gradle/4.6/fileChanges/last-build.bin b/src/player/app/android/.gradle/4.6/fileChanges/last-build.bin new file mode 100644 index 00000000..f76dd238 Binary files /dev/null and b/src/player/app/android/.gradle/4.6/fileChanges/last-build.bin differ diff --git a/src/player/app/android/.gradle/4.6/fileContent/fileContent.lock b/src/player/app/android/.gradle/4.6/fileContent/fileContent.lock new file mode 100644 index 00000000..ac89e1be Binary files /dev/null and b/src/player/app/android/.gradle/4.6/fileContent/fileContent.lock differ diff --git a/src/player/app/android/.gradle/4.6/fileHashes/resourceHashesCache.bin b/src/player/app/android/.gradle/4.6/fileHashes/resourceHashesCache.bin new file mode 100644 index 00000000..81287bee Binary files /dev/null and b/src/player/app/android/.gradle/4.6/fileHashes/resourceHashesCache.bin differ diff --git a/src/player/app/android/.gradle/4.6/javaCompile/jarAnalysis.bin b/src/player/app/android/.gradle/4.6/javaCompile/jarAnalysis.bin new file mode 100644 index 00000000..fb7bb25f Binary files /dev/null and b/src/player/app/android/.gradle/4.6/javaCompile/jarAnalysis.bin differ diff --git a/OMAF-Sample/client/.gitkeep b/src/player/app/android/.gradle/vcsWorkingDirs/gc.properties similarity index 100% rename from OMAF-Sample/client/.gitkeep rename to src/player/app/android/.gradle/vcsWorkingDirs/gc.properties diff --git a/src/player/app/android/.idea/caches/gradle_models.ser b/src/player/app/android/.idea/caches/gradle_models.ser new file mode 100644 index 00000000..b0699dcd Binary files /dev/null and b/src/player/app/android/.idea/caches/gradle_models.ser differ diff --git a/src/player/app/android/.idea/codeStyles/Project.xml b/src/player/app/android/.idea/codeStyles/Project.xml new file mode 100644 index 00000000..681f41ae --- /dev/null +++ b/src/player/app/android/.idea/codeStyles/Project.xml @@ -0,0 +1,116 @@ + + + + + + + +
+ + + + xmlns:android + + ^$ + + + +
+
+ + + + xmlns:.* + + ^$ + + + BY_NAME + +
+
+ + + + .*:id + + http://schemas.android.com/apk/res/android + + + +
+
+ + + + .*:name + + http://schemas.android.com/apk/res/android + + + +
+
+ + + + name + + ^$ + + + +
+
+ + + + style + + ^$ + + + +
+
+ + + + .* + + ^$ + + + BY_NAME + +
+
+ + + + .* + + http://schemas.android.com/apk/res/android + + + ANDROID_ATTRIBUTE_ORDER + +
+
+ + + + .* + + .* + + + BY_NAME + +
+
+
+
+
+
\ No newline at end of file diff --git a/src/player/app/android/.idea/gradle.xml b/src/player/app/android/.idea/gradle.xml new file mode 100644 index 00000000..169fd0dd --- /dev/null +++ b/src/player/app/android/.idea/gradle.xml @@ -0,0 +1,19 @@ + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_core_common_1_1_1_jar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_core_common_1_1_1_jar.xml new file mode 100644 index 00000000..bf16fff6 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_core_common_1_1_1_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_core_runtime_1_1_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_core_runtime_1_1_1_aar.xml new file mode 100644 index 00000000..044cbe22 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_core_runtime_1_1_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_common_1_1_1_jar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_common_1_1_1_jar.xml new file mode 100644 index 00000000..27b18675 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_common_1_1_1_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_livedata_1_1_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_livedata_1_1_1_aar.xml new file mode 100644 index 00000000..ccec325b --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_livedata_1_1_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_livedata_core_1_1_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_livedata_core_1_1_1_aar.xml new file mode 100644 index 00000000..9ff9df29 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_livedata_core_1_1_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_runtime_1_1_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_runtime_1_1_1_aar.xml new file mode 100644 index 00000000..5e034cb6 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_runtime_1_1_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_viewmodel_1_1_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_viewmodel_1_1_1_aar.xml new file mode 100644 index 00000000..ca085b6d --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__android_arch_lifecycle_viewmodel_1_1_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_animated_vector_drawable_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_animated_vector_drawable_28_0_0_aar.xml new file mode 100644 index 00000000..ccd7950a --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_animated_vector_drawable_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_appcompat_v7_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_appcompat_v7_28_0_0_aar.xml new file mode 100644 index 00000000..fec9f18a --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_appcompat_v7_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_asynclayoutinflater_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_asynclayoutinflater_28_0_0_aar.xml new file mode 100644 index 00000000..3576f4d8 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_asynclayoutinflater_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_collections_28_0_0_jar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_collections_28_0_0_jar.xml new file mode 100644 index 00000000..80e4d731 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_collections_28_0_0_jar.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_1_1_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_1_1_0_aar.xml new file mode 100644 index 00000000..193419af --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_1_1_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_solver_1_1_0_jar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_solver_1_1_0_jar.xml new file mode 100644 index 00000000..3297c5bd --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_solver_1_1_0_jar.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_coordinatorlayout_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_coordinatorlayout_28_0_0_aar.xml new file mode 100644 index 00000000..ea4d04b8 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_coordinatorlayout_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_cursoradapter_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_cursoradapter_28_0_0_aar.xml new file mode 100644 index 00000000..5a3ff476 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_cursoradapter_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_customview_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_customview_28_0_0_aar.xml new file mode 100644 index 00000000..67dbd6ae --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_customview_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_documentfile_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_documentfile_28_0_0_aar.xml new file mode 100644 index 00000000..90bf2713 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_documentfile_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_drawerlayout_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_drawerlayout_28_0_0_aar.xml new file mode 100644 index 00000000..ea52da12 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_drawerlayout_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_interpolator_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_interpolator_28_0_0_aar.xml new file mode 100644 index 00000000..c4d0afa6 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_interpolator_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_loader_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_loader_28_0_0_aar.xml new file mode 100644 index 00000000..6f00913b --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_loader_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_localbroadcastmanager_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_localbroadcastmanager_28_0_0_aar.xml new file mode 100644 index 00000000..292c7a96 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_localbroadcastmanager_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_print_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_print_28_0_0_aar.xml new file mode 100644 index 00000000..fd909617 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_print_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_slidingpanelayout_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_slidingpanelayout_28_0_0_aar.xml new file mode 100644 index 00000000..8c86bd15 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_slidingpanelayout_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_annotations_28_0_0_jar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_annotations_28_0_0_jar.xml new file mode 100644 index 00000000..0fdecce0 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_annotations_28_0_0_jar.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_compat_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_compat_28_0_0_aar.xml new file mode 100644 index 00000000..3a6d0d3f --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_compat_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_core_ui_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_core_ui_28_0_0_aar.xml new file mode 100644 index 00000000..852f1e44 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_core_ui_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_core_utils_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_core_utils_28_0_0_aar.xml new file mode 100644 index 00000000..9d8c489e --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_core_utils_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_fragment_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_fragment_28_0_0_aar.xml new file mode 100644 index 00000000..a85c856c --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_fragment_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_media_compat_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_media_compat_28_0_0_aar.xml new file mode 100644 index 00000000..0d50228c --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_media_compat_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_v4_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_v4_28_0_0_aar.xml new file mode 100644 index 00000000..a162b542 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_v4_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_vector_drawable_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_vector_drawable_28_0_0_aar.xml new file mode 100644 index 00000000..3d034c65 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_support_vector_drawable_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_swiperefreshlayout_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_swiperefreshlayout_28_0_0_aar.xml new file mode 100644 index 00000000..4d3dfcfb --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_swiperefreshlayout_28_0_0_aar.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_core_3_0_2_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_core_3_0_2_aar.xml new file mode 100644 index 00000000..d302e172 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_core_3_0_2_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_idling_resource_3_0_2_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_idling_resource_3_0_2_aar.xml new file mode 100644 index 00000000..44491ef8 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_idling_resource_3_0_2_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_monitor_1_0_2_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_monitor_1_0_2_aar.xml new file mode 100644 index 00000000..79e2714c --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_monitor_1_0_2_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_runner_1_0_2_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_runner_1_0_2_aar.xml new file mode 100644 index 00000000..1e218661 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_test_runner_1_0_2_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_versionedparcelable_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_versionedparcelable_28_0_0_aar.xml new file mode 100644 index 00000000..324149de --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_versionedparcelable_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_android_support_viewpager_28_0_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_android_support_viewpager_28_0_0_aar.xml new file mode 100644 index 00000000..c9ca3b8c --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_android_support_viewpager_28_0_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_2_6_1_aar.xml new file mode 100644 index 00000000..93621efe --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_2_6_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_core_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_core_2_6_1_aar.xml new file mode 100644 index 00000000..bf32da93 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_core_2_6_1_aar.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_dash_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_dash_2_6_1_aar.xml new file mode 100644 index 00000000..e57b8818 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_dash_2_6_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_hls_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_hls_2_6_1_aar.xml new file mode 100644 index 00000000..39bc11a7 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_hls_2_6_1_aar.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_smoothstreaming_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_smoothstreaming_2_6_1_aar.xml new file mode 100644 index 00000000..3218bbf0 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_smoothstreaming_2_6_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_ui_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_ui_2_6_1_aar.xml new file mode 100644 index 00000000..79d17e3a --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_exoplayer_ui_2_6_1_aar.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_extension_gvr_2_6_1_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_extension_gvr_2_6_1_aar.xml new file mode 100644 index 00000000..889da6de --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_android_exoplayer_extension_gvr_2_6_1_aar.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1_jar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1_jar.xml new file mode 100644 index 00000000..947e2512 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1_jar.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_protobuf_nano_protobuf_javanano_3_1_0_jar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_protobuf_nano_protobuf_javanano_3_1_0_jar.xml new file mode 100644 index 00000000..cfd1c027 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_protobuf_nano_protobuf_javanano_3_1_0_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_audio_1_80_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_audio_1_80_0_aar.xml new file mode 100644 index 00000000..c2ca6470 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_audio_1_80_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_base_1_160_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_base_1_160_0_aar.xml new file mode 100644 index 00000000..25deb3e5 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_base_1_160_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_common_1_160_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_common_1_160_0_aar.xml new file mode 100644 index 00000000..80b45af4 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_common_1_160_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_controller_1_160_0_aar.xml b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_controller_1_160_0_aar.xml new file mode 100644 index 00000000..5e646954 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_google_vr_sdk_controller_1_160_0_aar.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1_jar.xml b/src/player/app/android/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1_jar.xml new file mode 100644 index 00000000..5c4dd4e8 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__javax_inject_javax_inject_1_jar.xml b/src/player/app/android/.idea/libraries/Gradle__javax_inject_javax_inject_1_jar.xml new file mode 100644 index 00000000..f9c9a306 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__javax_inject_javax_inject_1_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__junit_junit_4_12_jar.xml b/src/player/app/android/.idea/libraries/Gradle__junit_junit_4_12_jar.xml new file mode 100644 index 00000000..f4f25a81 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__junit_junit_4_12_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__net_sf_kxml_kxml2_2_3_0_jar.xml b/src/player/app/android/.idea/libraries/Gradle__net_sf_kxml_kxml2_2_3_0_jar.xml new file mode 100644 index 00000000..5855a297 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__net_sf_kxml_kxml2_2_3_0_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3_jar.xml b/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3_jar.xml new file mode 100644 index 00000000..50cf2b92 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3_jar.xml b/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3_jar.xml new file mode 100644 index 00000000..a5eaca5b --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3_jar.xml b/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3_jar.xml new file mode 100644 index 00000000..b4dabdc1 --- /dev/null +++ b/src/player/app/android/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3_jar.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/misc.xml b/src/player/app/android/.idea/misc.xml new file mode 100644 index 00000000..dfd2c799 --- /dev/null +++ b/src/player/app/android/.idea/misc.xml @@ -0,0 +1,9 @@ + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/runConfigurations.xml b/src/player/app/android/.idea/runConfigurations.xml new file mode 100644 index 00000000..7f68460d --- /dev/null +++ b/src/player/app/android/.idea/runConfigurations.xml @@ -0,0 +1,12 @@ + + + + + + \ No newline at end of file diff --git a/src/player/app/android/.idea/vcs.xml b/src/player/app/android/.idea/vcs.xml new file mode 100644 index 00000000..4fce1d86 --- /dev/null +++ b/src/player/app/android/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/src/player/app/android/README.md b/src/player/app/android/README.md new file mode 100644 index 00000000..b0ca3da5 --- /dev/null +++ b/src/player/app/android/README.md @@ -0,0 +1,46 @@ +# Guidance of android player getting started + +------ + +### 1. How to get libMediaPlayer.so and its dependent libraries using NDK compile +```bash +git clone from the repo +cd src/external +./prebuild_android.sh +./make_android.sh +``` +And then you can get libs at the following path: +| LIB name | path | +| -------- | -----: | +| libglog.so | src/build/external/android/glog/build/ | +| libssl.so | src/build/external/android/openssl-output/lib/ | +| libcrypto.so | src/build/external/android/openssl-output/lib/ | +| libcurl.so | src/build/external/android/curl-output/arm64-v8a/lib/ | +| lib360SCVP.so | src/build/android/360SCVP/ | +| libdashparser.a | src/build/android/isolib/dash_parser/ | +| libOmafDashAccess.so | src/build/android/OmafDashAccess/ | +| libMediaPlayer.so | src/build/android/player/player_lib/ | + +Copy libs to the path "./player/app/android/app/src/main/jniLibs/arm64-v8a/". +### 2. Prerequistes +To build the whole android project, there are some prerequistes must be ready. +```bash +install Android studio 3.5.1 +JRE: 1.8.0_202-release-1483-b49-5587405 amd64 +JVM: OpenJDK 64-Bit Server VM by JetBrains s.r.o + +Android Gradle Plugion version = 3.5.1 +Android Gradle version = 5.4.1 +Compile SDK version = API 29: Android 10.0(Q) +Build Tools verion = 29.0.3 +Install and configure NDK in android studio + +arm processer: arm64-v8a +``` + +### 3. How to run android player project + +> 1. Once the required libs and the tools mentioned above are installed, you can open the "player/app/android" project with Android Studio. +> 2. Check the android cellphone is recognized as running device. +> 3. Check input parameters in assets/cfg.json. +> 4. Click 'green triangle' run button. diff --git a/src/player/app/android/android.iml b/src/player/app/android/android.iml new file mode 100644 index 00000000..686bd2aa --- /dev/null +++ b/src/player/app/android/android.iml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/app/.gitignore b/src/player/app/android/app/.gitignore new file mode 100644 index 00000000..796b96d1 --- /dev/null +++ b/src/player/app/android/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/src/player/app/android/app/CMakeLists.txt b/src/player/app/android/app/CMakeLists.txt new file mode 100644 index 00000000..0f1196db --- /dev/null +++ b/src/player/app/android/app/CMakeLists.txt @@ -0,0 +1,44 @@ +# For more information about using CMake with Android Studio, read the +# documentation: https://d.android.com/studio/projects/add-native-code.html + +# Sets the minimum version of CMake required to build the native library. + +cmake_minimum_required(VERSION 3.4.1) + +# Creates and names a library, sets it as either STATIC +# or SHARED, and provides the relative paths to its source code. +# You can define multiple libraries, and CMake builds them for you. +# Gradle automatically packages shared libraries with your APK. + +add_library( # Sets the name of the library. + native-lib + + # Sets the library as a shared library. + SHARED + + # Provides a relative path to your source file(s). + src/main/cpp/native-lib.cpp) + +# Searches for a specified prebuilt library and stores the path as a +# variable. Because CMake includes system libraries in the search path by +# default, you only need to specify the name of the public NDK library +# you want to add. CMake verifies that the library exists before +# completing its build. + +find_library( # Sets the name of the path variable. + log-lib + + # Specifies the name of the NDK library that + # you want CMake to locate. + log) + +# Specifies libraries CMake should link to your target library. You +# can link multiple libraries, such as libraries you define in this +# build script, prebuilt third-party libraries, or system libraries. + +target_link_libraries( # Specifies the target library. + native-lib + + # Links the target library to the log library + # included in the NDK. + ${log-lib}) \ No newline at end of file diff --git a/src/player/app/android/app/build.gradle b/src/player/app/android/app/build.gradle new file mode 100644 index 00000000..e8ddddd7 --- /dev/null +++ b/src/player/app/android/app/build.gradle @@ -0,0 +1,51 @@ +apply plugin: 'com.android.application' + +android { + sourceSets { + main{ + jniLibs.srcDirs = ['src/main/jniLibs','libs','libs/arm64-v8a'] + } + } + compileSdkVersion 28 + defaultConfig { + applicationId "com.vcd.immersive.omafplayer" + minSdkVersion 25 + targetSdkVersion 26 + versionCode 1 + versionName "1.0" + testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" + externalNativeBuild { + cmake { + cppFlags "-std=c++11" + } + } + } + buildTypes { + release { + minifyEnabled true + proguardFiles.add(file('../../gvr-android-sdk-1.200/proguard-gvr.txt')) + //minifyEnabled false + //proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + externalNativeBuild { + cmake { + path "CMakeLists.txt" + } + } +} + +dependencies { + implementation fileTree(include: ['*.jar'], dir: 'libs') + implementation 'com.android.support:appcompat-v7:28.0.0' + implementation 'com.android.support.constraint:constraint-layout:1.1.0' + implementation 'com.android.support:support-v4:28.0.0' + implementation 'com.android.support:support-annotations:28.0.0' + testImplementation 'junit:junit:4.12' + androidTestImplementation 'com.android.support.test:runner:1.0.2' + androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' + implementation 'com.google.vr:sdk-base:1.160.0' + implementation 'com.google.vr:sdk-controller:1.160.0' + + implementation files('libs/jna.jar') +} diff --git a/src/player/app/android/app/libs/arm64-v8a/libjnidispatch.so b/src/player/app/android/app/libs/arm64-v8a/libjnidispatch.so new file mode 100644 index 00000000..61553b21 Binary files /dev/null and b/src/player/app/android/app/libs/arm64-v8a/libjnidispatch.so differ diff --git a/src/player/app/android/app/libs/jna.jar b/src/player/app/android/app/libs/jna.jar new file mode 100644 index 00000000..af5dd080 Binary files /dev/null and b/src/player/app/android/app/libs/jna.jar differ diff --git a/src/player/app/android/app/proguard-rules.pro b/src/player/app/android/app/proguard-rules.pro new file mode 100644 index 00000000..f1b42451 --- /dev/null +++ b/src/player/app/android/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/src/player/app/android/app/src/androidTest/java/com/vcd/immersive/omafplayer/ExampleInstrumentedTest.java b/src/player/app/android/app/src/androidTest/java/com/vcd/immersive/omafplayer/ExampleInstrumentedTest.java new file mode 100644 index 00000000..cfafcdd6 --- /dev/null +++ b/src/player/app/android/app/src/androidTest/java/com/vcd/immersive/omafplayer/ExampleInstrumentedTest.java @@ -0,0 +1,26 @@ +package com.vcd.immersive.omafplayer; + +import android.content.Context; +import android.support.test.InstrumentationRegistry; +import android.support.test.runner.AndroidJUnit4; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import static org.junit.Assert.*; + +/** + * Instrumented test, which will execute on an Android device. + * + * @see Testing documentation + */ +@RunWith(AndroidJUnit4.class) +public class ExampleInstrumentedTest { + @Test + public void useAppContext() { + // Context of the app under test. + Context appContext = InstrumentationRegistry.getTargetContext(); + + assertEquals("com.vcd.immersive.omafplayer", appContext.getPackageName()); + } +} diff --git a/src/player/app/android/app/src/main/AndroidManifest.xml b/src/player/app/android/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..86727ae0 --- /dev/null +++ b/src/player/app/android/app/src/main/AndroidManifest.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/player/app/android/app/src/main/assets/cfg.json b/src/player/app/android/app/src/main/assets/cfg.json new file mode 100644 index 00000000..a2f727c6 --- /dev/null +++ b/src/player/app/android/app/src/main/assets/cfg.json @@ -0,0 +1,19 @@ +{ + "windowWidth": 960, + "windowHeight": 960, + "url": "http://xxx.xxx.xxx.xxx:xxx/OMAFLive_8k/Test.mpd", + "sourceType": 0, + "enableExtractor": false, + "viewportHFOV": 80, + "viewportVFOV": 80, + "viewportWidth": 960, + "viewportHeight": 960, + "cachePath": "sdcard/Android/data/tmp/", + "maxVideoDecodeWidth": 4096, + "maxVideoDecodeHeight": 2304, + "predict": { + "enable": false, + "name": "", + "path": "" + } +} diff --git a/src/player/app/android/app/src/main/cpp/native-lib.cpp b/src/player/app/android/app/src/main/cpp/native-lib.cpp new file mode 100644 index 00000000..4a953ac0 --- /dev/null +++ b/src/player/app/android/app/src/main/cpp/native-lib.cpp @@ -0,0 +1,10 @@ +#include +#include + +extern "C" JNIEXPORT jstring JNICALL +Java_com_vcd_immersive_omafplayer_MainActivity_stringFromJNI( + JNIEnv *env, + jobject /* this */) { + std::string hello = "Hello from C++"; + return env->NewStringUTF(hello.c_str()); +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MainActivity.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MainActivity.java new file mode 100644 index 00000000..57ed6ecd --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MainActivity.java @@ -0,0 +1,29 @@ +package com.vcd.immersive.omafplayer; + +import android.support.v7.app.AppCompatActivity; +import android.os.Bundle; +import android.widget.TextView; + +public class MainActivity extends AppCompatActivity { + + // Used to load the 'native-lib' library on application startup. + static { + System.loadLibrary("native-lib"); + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + + // Example of a call to a native method + TextView tv = (TextView) findViewById(R.id.sample_text); + tv.setText(stringFromJNI()); + } + + /** + * A native method that is implemented by the 'native-lib' native library, + * which is packaged with this application. + */ + public native String stringFromJNI(); +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaLoader.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaLoader.java new file mode 100644 index 00000000..6bf8f59c --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaLoader.java @@ -0,0 +1,342 @@ +package com.vcd.immersive.omafplayer; + +import android.content.Context; +import android.content.Intent; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.SurfaceTexture; +import android.os.AsyncTask; +import android.support.annotation.AnyThread; +import android.support.annotation.MainThread; +import android.util.Log; +import android.util.Pair; +import android.view.Surface; + +import com.vcd.immersive.omafplayer.MediaPlayer.NativeMediaPlayer; +import com.vcd.immersive.omafplayer.Rendering.CubeMapMesh; +import com.vcd.immersive.omafplayer.Rendering.ERPMesh; +import com.vcd.immersive.omafplayer.Rendering.Mesh; +import com.vcd.immersive.omafplayer.Rendering.SceneRenderer; +import com.vcd.immersive.omafplayer.Rendering.Utils; + +import static com.vcd.immersive.omafplayer.Rendering.Utils.checkGlError; + +/** + * MediaLoader takes an Intent from the user and loads the specified media file. + * + *

The process to load media requires multiple threads since the media is read from disk on a + * background thread, but it needs to be loaded into the GL scene only after GL initialization is + * complete. + * + *

To keep the sample simple, this class doesn't have any support for handling multiple Intents + * within a single Activity lifecycle. + * + *

The Intent used to launch {@link VideoActivity} or {@link VrVideoActivity} is parsed by this + * class and the extra & data fields are extracted. The data field should have a URI useable by + * {@link MediaPlayer} or {@link BitmapFactory}. There should also be an integer extra matching one + * of the MEDIA_* types in {@link Mesh}. + * + *

Example intents compatible with adb are: + *

    + *
  • + * A top-bottom stereo image in the VR Activity. + * adb shell am start -a android.intent.action.VIEW \ + * -n com.google.vr.sdk.samples.video360/.VrVideoActivity \ + * -d "file:///sdcard/IMAGE.JPG" \ + * --ei stereoFormat 2 + * + *
  • + *
  • + * A monoscopic video in the 2D Activity. + * adb shell am start -a android.intent.action.VIEW \ + * -n com.google.vr.sdk.samples.video360/.VideoActivity \ + * -d "file:///sdcard/VIDEO.MP4" \ + * --ei stereoFormat 0 + * + *
  • + *
+ * + *

This sample does not validiate that a given file is readable by the Android media decoders. + * You should validate that the file plays on your target devices via + * adb shell am start -a android.intent.action.VIEW -t video/mpeg -d "file:///VIDEO.MP4" + */ +public class MediaLoader { + private static final String TAG = "MediaLoader"; + + public static final String MEDIA_FORMAT_KEY = "stereoFormat"; + private static final int MAX_SURFACE_NUM = 5; + + /** A spherical mesh for video should be large enough that there are no stereo artifacts. */ + private static final int SPHERE_RADIUS_METERS = 50; + + /** These should be configured based on the video type. But this sample assumes 360 video. */ + private static final int DEFAULT_SPHERE_VERTICAL_DEGREES = 180; + private static final int DEFAULT_SPHERE_HORIZONTAL_DEGREES = 360; + + /** The 360 x 180 sphere has 15 degree quads. Increase these if lines in your video look wavy. */ + private static final int DEFAULT_SPHERE_ROWS = 12; + private static final int DEFAULT_SPHERE_COLUMNS = 24; + + private static final int PF_ERP = 0; + private static final int PF_CUBEMAP = 1; + + private final Context context; + // This can be replaced by any media player that renders to a Surface. In a real app, this + // media player would be separated from the rendering code. It is left in this class for + // simplicity. + // This should be set or cleared in a synchronized manner. + NativeMediaPlayer mediaPlayer; + + // Due to the slow loading media times, it's possible to tear down the app before mediaPlayer is + // ready. In that case, abandon all the pending work. + // This should be set or cleared in a synchronized manner. + private boolean isDestroyed = false; + + // The type of mesh created depends on the type of media. + Mesh mesh; + // The sceneRenderer is set after GL initialization is complete. + private SceneRenderer sceneRenderer; + // The displaySurface is configured after both GL initialization and media loading. + private Surface[] decodeSurface = new Surface[MAX_SURFACE_NUM]; + private Surface displaySurface; + + // The actual work of loading media happens on a background thread. + private MediaLoaderTask mediaLoaderTask; + + public MediaLoader(Context context) { + this.context = context; + } + + /** + * Loads custom videos based on the Intent or load the default video. See the Javadoc for this + * class for information on generating a custom intent via adb. + */ + public void handleIntent(Intent intent, VideoUiView uiView) { + // Load the bitmap in a background thread to avoid blocking the UI thread. This operation can + // take 100s of milliseconds. + // Note that this sample doesn't cancel any pending mediaLoaderTasks since it assumes only one + // Intent will ever be fired for a single Activity lifecycle. + mediaLoaderTask = new MediaLoaderTask(uiView); + mediaLoaderTask.execute(intent); + } + + /** Notifies MediaLoader that GL components have initialized. */ + public void onGlSceneReady(SceneRenderer sceneRenderer) { + this.sceneRenderer = sceneRenderer; + Log.i(TAG, "Scene ready!"); + if (mediaPlayer != null) + this.sceneRenderer.setMediaPlayer(mediaPlayer); + displayWhenReady();//real operate + } + + /** + * Helper class to media loading. This accesses the disk and decodes images so it needs to run in + * the background. + */ + private class MediaLoaderTask extends AsyncTask { + private final VideoUiView uiView; + + public MediaLoaderTask(VideoUiView uiView) { + this.uiView = uiView; + } + + @Override + protected Void doInBackground(Intent... intent) { + + mediaPlayer = new NativeMediaPlayer(context); + Log.i(TAG, "Create native media player!"); + int ret = mediaPlayer.Initialize(); + if (ret != 0) + { + Log.e(TAG, "native media player init failed!"); + return null; + } + + displayWhenReady(); + return null; + } + + @Override + public void onPostExecute(Void unused) { + // Set or clear the UI's mediaPlayer on the UI thread. + if (uiView != null) { + uiView.setMediaPlayer(mediaPlayer); + } + if (sceneRenderer != null) + sceneRenderer.setMediaPlayer(mediaPlayer); + } + } + + /** + * Creates the 3D scene and load the media after sceneRenderer & mediaPlayer are ready. This can + * run on the GL Thread or a background thread. + */ + @AnyThread + private synchronized void displayWhenReady() { + if (isDestroyed) { + // This only happens when the Activity is destroyed immediately after creation. + if (mediaPlayer != null) { + mediaPlayer.Close(); + mediaPlayer = null; + } + return; + } + + if (displaySurface != null || decodeSurface[0] != null) { + // Avoid double initialization caused by sceneRenderer & mediaPlayer being initialized before + // displayWhenReady is executed. + return; + } + if (mediaPlayer == null) Log.i(TAG, "media player is null"); + if (sceneRenderer == null) Log.i(TAG, "scene renderer is null"); + if (mediaPlayer == null || sceneRenderer == null) { + // Wait for everything to be initialized. + Log.i(TAG, "wait to init!"); + return; + } + // The important methods here are the setSurface & lockCanvas calls. These will have to happen + // after the GLView is created. + if (mediaPlayer != null && sceneRenderer.decode_surface_ready) { + // 1. create decode surfaces and set them to native player. + for (int i=0;i decoder_surface = sceneRenderer.createDecodeSurface( + mediaPlayer.mConfig.maxVideoDecodeWidth, mediaPlayer.mConfig.maxVideoDecodeHeight, i); + Log.i(TAG, "Complete to create one decode surface! surface id is " + i); + mediaPlayer.SetDecodeSurface(decoder_surface.second, decoder_surface.first, i);//set surface + Log.i(TAG, "ready to set decode surface!"); + decodeSurface[i] = decoder_surface.second; + Log.i(TAG, "decode id in java " + decoder_surface.first); + } + // 2. create native player and get display width and height and projection format + int ret = mediaPlayer.Create("./config.xml"); + if (ret != 0) + { + Log.e(TAG, "native media player create failed!"); + return; + } + // 3. create mesh according to PF + int stereoFormat = Mesh.MEDIA_MONOSCOPIC; + Mesh.MeshParams params = new Mesh.MeshParams(); + int projFormat = mediaPlayer.GetProjectionFormat(); + Log.i(TAG, "pf is " + projFormat); + if (projFormat == PF_CUBEMAP) { + mesh = CubeMapMesh.Create(params, context); + Log.i(TAG, "Create cubemap mesh!"); + } + else { + params.radius = SPHERE_RADIUS_METERS; + params.latitudes = DEFAULT_SPHERE_ROWS; + params.longitudes = DEFAULT_SPHERE_COLUMNS; + params.vFOV = DEFAULT_SPHERE_VERTICAL_DEGREES; + params.hFOV = DEFAULT_SPHERE_HORIZONTAL_DEGREES; + params.mediaFormat = stereoFormat; + mesh = ERPMesh.Create(params); + Log.i(TAG, "Create ERP mesh!"); + if (projFormat != PF_ERP) { + Log.e(TAG, "Projection format is invalid! Default is ERP format!"); + } + } + // 4. get width / height and create display surface and set it to native player + int displayWidth = mediaPlayer.GetWidth(); + int displayHeight = mediaPlayer.GetHeight(); + if (projFormat == PF_ERP) { + sceneRenderer.displayTexId = Utils.glCreateTextureFor2D(mediaPlayer.GetWidth(), mediaPlayer.GetHeight()); + Log.i(TAG, "ERP Display texture id is " + sceneRenderer.displayTexId); + }else if (projFormat == PF_CUBEMAP) { + sceneRenderer.displayTexId = Utils.glCreateTextureForCube(mediaPlayer.GetWidth(), mediaPlayer.GetHeight()); + Log.i(TAG, "Cubemap Display texture id is " + sceneRenderer.displayTexId); + }else { + sceneRenderer.displayTexId = 0; + Log.e(TAG, "Projection format is invalid! displayer texture id is set to zero!"); + } + sceneRenderer.displayTexture = new SurfaceTexture(sceneRenderer.displayTexId); + checkGlError(); + Log.i(TAG, "display width is " + displayWidth + " display height is " + displayHeight); + Pair display_surface = sceneRenderer.createDisplaySurface( + displayWidth, displayHeight, mesh); + Log.i(TAG, "ready to create display surface"); + mediaPlayer.SetDisplaySurface(display_surface.first); + + displaySurface = display_surface.second; + // 4. start native player thread + Log.i(TAG, "start to start!"); + mediaPlayer.Start(); + }else + { + Log.e(TAG, "media player is invalid!"); + } + } + + /** + * Renders a placeholder grid with optional error text. + */ + private static void renderEquirectangularGrid(Canvas canvas, String message) { + // Configure the grid. Each square will be 15 x 15 degrees. + final int width = canvas.getWidth(); + final int height = canvas.getHeight(); + // This assumes a 4k resolution. + final int majorWidth = width / 256; + final int minorWidth = width / 1024; + final Paint paint = new Paint(); + + // Draw a black ground & gray sky background + paint.setColor(Color.BLACK); + canvas.drawRect(0, height / 2, width, height, paint); + paint.setColor(Color.GRAY); + canvas.drawRect(0, 0, width, height / 2, paint); + + // Render the grid lines. + paint.setColor(Color.WHITE); + + for (int i = 0; i < DEFAULT_SPHERE_COLUMNS; ++i) { + int x = width * i / DEFAULT_SPHERE_COLUMNS; + paint.setStrokeWidth((i % 3 == 0) ? majorWidth : minorWidth); + canvas.drawLine(x, 0, x, height, paint); + } + + for (int i = 0; i < DEFAULT_SPHERE_ROWS; ++i) { + int y = height * i / DEFAULT_SPHERE_ROWS; + paint.setStrokeWidth((i % 3 == 0) ? majorWidth : minorWidth); + canvas.drawLine(0, y, width, y, paint); + } + + // Render optional text. + if (message != null) { + paint.setTextSize(height / 64); + paint.setColor(Color.RED); + float textWidth = paint.measureText(message); + + canvas.drawText( + message, + width / 2 - textWidth / 2, // Horizontally center the text. + 9 * height / 16, // Place it slightly below the horizon for better contrast. + paint); + } + } + + @MainThread + public synchronized void pause() { + if (mediaPlayer != null) { + mediaPlayer.Pause(); + } + } + + @MainThread + public synchronized void resume() { + if (mediaPlayer != null) { + mediaPlayer.Resume(); + } + } + + /** Tears down MediaLoader and prevents further work from happening. */ + @MainThread + public synchronized void destroy() { + if (mediaPlayer != null) { + mediaPlayer.Stop(); + } + isDestroyed = true; + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaPlayer/NativeMediaPlayer.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaPlayer/NativeMediaPlayer.java new file mode 100644 index 00000000..b9becdb4 --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaPlayer/NativeMediaPlayer.java @@ -0,0 +1,469 @@ +/* + * Copyright (c) 2019, Intel Corporation + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.vcd.immersive.omafplayer.MediaPlayer; +import android.content.Context; +import android.content.res.AssetManager; +import android.util.Log; +import android.view.Surface; + +import org.json.JSONException; +import org.json.JSONObject; + +import java.io.BufferedReader; + +import java.io.IOException; + +import java.io.InputStreamReader; +import java.io.UnsupportedEncodingException; + + +public class NativeMediaPlayer { + private final String TAG = "NATIVE_MEDIA_PLAYER"; + private long mHandler; + public RenderConfig mConfig; + private int status = 0; + private Context context; + + static { + System.loadLibrary("MediaPlayer"); + } + // input parameters + public class RenderConfig { + public int windowWidth; + public int windowHeight; + public String url; + public int sourceType; + public int viewportHFOV; + public int viewportVFOV; + public int viewportWidth; + public int viewportHeight; + public String cachePath; + public boolean enableExtractor; + public boolean enablePredictor; + public String predictPluginName; + public String libPath; + public int projFormat; + public int renderInterval; + public int minLogLevel; + public int maxVideoDecodeWidth; + public int maxVideoDecodeHeight; + + public RenderConfig() { + windowWidth = 0; + windowHeight = 0; + url = ""; + sourceType = 0; + viewportHFOV = 0; + viewportVFOV = 0; + viewportWidth = 0; + viewportHeight = 0; + cachePath = ""; + enableExtractor = false; + enablePredictor = false; + predictPluginName = ""; + libPath = ""; + projFormat = 0; + renderInterval = 0; + minLogLevel = 0; + maxVideoDecodeWidth = 0; + maxVideoDecodeHeight = 0; + } + } + + public static class HeadPose { + public float yaw; + public float pitch; + public long pts; + + public HeadPose() { + yaw = 0; + pitch = 0; + pts = 0; + } + } + + public int GetCurrentStatus() + { + return status; + } + + public void SetCurrentStatus(int st) + { + status = st; + } + /** + * Original signature : Handler Init()
+ * native declaration : line 82 + */ + public native long Init(); + /** + * Original signature : int Create(Handler, RenderConfig)
+ * native declaration : line 84 + */ + public native int Create(long hdl, RenderConfig config); + /** + * Original signature : int Play(Handler)
+ * native declaration : line 86 + */ + public native int Play(long hdl); + /** + * Original signature : int Pause(Handler)
+ * native declaration : line 88 + */ + public native int Pause(long hdl); + /** + * Original signature : int Resume(Handler)
+ * native declaration : line 90 + */ + public native int Resume(long hdl); + /** + * Original signature : int Stop(Handler)
+ * native declaration : line 92 + */ + public native int Stop(long hdl); + /** + * Original signature : int Seek(Handler)
+ * native declaration : line 94 + */ + public native int Seek(long hdl); + /** + * Original signature : int Start(Handler, void*)
+ * native declaration : line 96 + */ + public native int Start(long hdl); + /** + * Original signature : int Close(Handler)
+ * native declaration : line 98 + */ + public native int Close(long hdl); + /** + * Original signature : int GetStatus(Handler)
+ * native declaration : line 100 + */ + public native int GetStatus(long hdl); + /** + * Original signature : bool IsPlaying(Handler)
+ * native declaration : line 102 + */ + public native boolean IsPlaying(long hdl); + /** + * Original signature : HeadPose GetCurrentPosition(Handler)
+ * native declaration : line 104 + */ + public native void SetCurrentPosition(long hdl, HeadPose pose); + /** + * Original signature : uint32_t GetWidth(Handler)
+ * native declaration : line 106 + */ + public native int GetWidth(long hdl); + /** + * Original signature : uint32_t GetHeight(Handler)
+ * native declaration : line 108 + */ + public native int GetHeight(long hdl); + /** + * Original signature : uint32_t GetProjectionFormat(Handler)
+ * native declaration : line 108 + */ + public native int GetProjectionFormat(long hdl); + /** + * Original signature : void SetDecodeSurface(Handler, Surface, int, int)
+ * native declaration : line 110 + */ + public native void SetDecodeSurface(long hdl, Surface decodeSurface, int texId, int video_id); + /** + * Original signature : void SetDisplaySurface(Handler, int)
+ * native declaration : line 110 + */ + public native void SetDisplaySurface(long hdl, int texId); + /** + * Original signature : int UpdateDisplayTex(Handler, int)
+ * native declaration : line 110 + */ + public native int UpdateDisplayTex(long hdl, int render_count); + /** + * Original signature : int GetTransformType(Handler)
+ * native declaration : line 110 + */ + public native int[] GetTransformType(long hdl); + + public NativeMediaPlayer(Context text) + { + mHandler = 0; + mConfig = null; + context = text; + ParseXmlConfig(); + } + + public int Initialize() + { + mHandler = Init();//native + + if (mHandler == 0) + { + Log.e(TAG, "Failed to init jni media player!"); + return -1; + } + return 0; + } + + private String GetCfgJson() + { + StringBuilder stringBuilder = new StringBuilder(); + AssetManager assetManager = context.getAssets(); + + try { + BufferedReader bufferedReader = new BufferedReader(new InputStreamReader( + assetManager.open("cfg.json"), "utf-8" + )); + String jsonLines; + while ((jsonLines = bufferedReader.readLine()) != null) { + stringBuilder.append(jsonLines); + } + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + return stringBuilder.toString(); + } + + private int ParseXmlConfig() + { + String cfgJsonStr = GetCfgJson(); + JSONObject cfgJsonObject = null; + try { + cfgJsonObject = new JSONObject(cfgJsonStr); + } catch (JSONException e) { + e.printStackTrace(); + } + mConfig = new RenderConfig(); + try { + mConfig.windowHeight = cfgJsonObject.getInt("windowHeight"); + mConfig.windowWidth = cfgJsonObject.getInt("windowWidth"); + mConfig.url = cfgJsonObject.getString("url"); + mConfig.sourceType = cfgJsonObject.getInt("sourceType"); + mConfig.viewportHFOV = cfgJsonObject.getInt("viewportHFOV"); + mConfig.viewportVFOV = cfgJsonObject.getInt("viewportVFOV"); + mConfig.viewportWidth = cfgJsonObject.getInt("viewportWidth"); + mConfig.viewportHeight = cfgJsonObject.getInt("viewportHeight"); + + mConfig.cachePath = cfgJsonObject.getString("cachePath"); + mConfig.enableExtractor = cfgJsonObject.getBoolean("enableExtractor"); + String predictStr = cfgJsonObject.getString("predict"); + JSONObject predictObj = new JSONObject(predictStr); + mConfig.enablePredictor = predictObj.getBoolean("enable"); + mConfig.predictPluginName = predictObj.getString("name"); + mConfig.libPath = predictObj.getString("path"); + mConfig.maxVideoDecodeWidth = cfgJsonObject.getInt("maxVideoDecodeWidth"); + mConfig.maxVideoDecodeHeight = cfgJsonObject.getInt("maxVideoDecodeHeight"); + } catch (JSONException e) { + e.printStackTrace(); + } + return 0; + } + + public int Create(String config_url) + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Create(mHandler, mConfig); + } + + public int Play() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Play(mHandler); + } + + public int Pause() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Pause(mHandler); + } + + public int Resume() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Resume(mHandler); + } + + public int Stop() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Stop(mHandler); + } + + public int Seek() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Seek(mHandler); + } + + public int Start() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Start(mHandler); + } + + public int Close() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return Close(mHandler); + } + + public int GetStatus() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return GetStatus(mHandler); + } + + public boolean IsPlaying() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return false; + } + return IsPlaying(mHandler); + } + + public void SetCurrentPosition(HeadPose pose) + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return; + } + Log.i(TAG, "native player pose yaw : " + pose.yaw + " pose pitch : " + pose.pitch); + SetCurrentPosition(mHandler, pose); + } + + public int GetWidth() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return GetWidth(mHandler); + } + + public int GetHeight() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return GetHeight(mHandler); + } + + public int GetProjectionFormat() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return GetProjectionFormat(mHandler); + } + + public void SetDecodeSurface(Surface surface, int texId, int video_id) + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return; + } + SetDecodeSurface(mHandler, surface, texId, video_id); + } + + public void SetDisplaySurface(int texId) + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return; + } + SetDisplaySurface(mHandler, texId); + } + + public int UpdateDisplayTex(int count) + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return -1; + } + return UpdateDisplayTex(mHandler, count); + } + + public int[] GetTransformType() + { + if (mHandler == 0) + { + Log.e(TAG, "Native media player is invalid!"); + return null; + } + return GetTransformType(mHandler); + } +} \ No newline at end of file diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MonoscopicView.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MonoscopicView.java new file mode 100644 index 00000000..c731739b --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MonoscopicView.java @@ -0,0 +1,363 @@ +package com.vcd.immersive.omafplayer; + +import android.content.Context; +import android.content.Intent; +import android.graphics.PointF; +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.opengl.Matrix; +import android.support.annotation.AnyThread; +import android.support.annotation.BinderThread; +import android.support.annotation.UiThread; +import android.util.AttributeSet; +import android.util.Log; +import android.view.MotionEvent; +import android.view.View; +import com.google.vr.sdk.base.Eye.Type; +import com.vcd.immersive.omafplayer.MediaPlayer.NativeMediaPlayer; +import com.vcd.immersive.omafplayer.Rendering.SceneRenderer; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import static android.support.constraint.Constraints.TAG; + +/** + * Renders a GL scene in a non-VR Activity that is affected by phone orientation and touch input. + * + *

The two input components are the TYPE_GAME_ROTATION_VECTOR Sensor and a TouchListener. The GL + * renderer combines these two inputs to render a scene with the appropriate camera orientation. + * + *

The primary complexity in this class is related to the various rotations. It is important to + * apply the touch and sensor rotations in the correct order or the user's touch manipulations won't + * match what they expect. + */ +public final class MonoscopicView extends GLSurfaceView { + // We handle all the sensor orientation detection ourselves. + private SensorManager sensorManager; + private Sensor orientationSensor; + private PhoneOrientationListener phoneOrientationListener; + + private MediaLoader mediaLoader; + private Renderer renderer; + private TouchTracker touchTracker; + private VideoUiView uiView; + + /** Inflates a standard GLSurfaceView. */ + public MonoscopicView(Context context, AttributeSet attributeSet) { + super(context, attributeSet); + setPreserveEGLContextOnPause(true); + } + + /** + * Finishes initialization. This should be called immediately after the View is inflated. + * + * @param uiView the video UI that should be bound to the underlying SceneRenderer + */ + public void initialize(VideoUiView uiView) { + this.uiView = uiView; + mediaLoader = new MediaLoader(getContext()); + + // Configure OpenGL. + renderer = new Renderer(uiView, mediaLoader); + setEGLContextClientVersion(2); + setRenderer(renderer); + setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); + + // Configure sensors and touch. + sensorManager = (SensorManager) getContext().getSystemService(Context.SENSOR_SERVICE); + // TYPE_GAME_ROTATION_VECTOR is the easiest sensor since it handles all the complex math for + // fusion. It's used instead of TYPE_ROTATION_VECTOR since the latter uses the mangetometer on + // devices. When used indoors, the magnetometer can take some time to settle depending on the + // device and amount of metal in the environment. + orientationSensor = sensorManager.getDefaultSensor(Sensor.TYPE_GAME_ROTATION_VECTOR); + phoneOrientationListener = new PhoneOrientationListener(); + + touchTracker = new TouchTracker(renderer); + setOnTouchListener(touchTracker); + } + + /** Starts the sensor & video only when this View is active. */ + @Override + public void onResume() { + super.onResume(); + // Use the fastest sensor readings. + sensorManager.registerListener( + phoneOrientationListener, orientationSensor, SensorManager.SENSOR_DELAY_FASTEST); + mediaLoader.resume(); + } + + /** Stops the sensors & video when the View is inactive to avoid wasting battery. */ + @Override + public void onPause() { + mediaLoader.pause(); + sensorManager.unregisterListener(phoneOrientationListener); + super.onPause(); + } + + /** Destroys the underlying resources. If this is not called, the MediaLoader may leak. */ + public void destroy() { + uiView.setMediaPlayer(null); + mediaLoader.destroy(); + } + + /** Parses the Intent and loads the appropriate media. */ + public void loadMedia(Intent intent) { + mediaLoader.handleIntent(intent, uiView); + } + + /** Detects sensor events and saves them as a matrix. */ + private class PhoneOrientationListener implements SensorEventListener { + private final float[] phoneInWorldSpaceMatrix = new float[16]; + private final float[] remappedPhoneMatrix = new float[16]; + private final float[] angles = new float[3]; + + @Override + @BinderThread + public void onSensorChanged(SensorEvent event) { + SensorManager.getRotationMatrixFromVector(phoneInWorldSpaceMatrix, event.values); + + // Extract the phone's roll and pass it on to touchTracker & renderer. Remapping is required + // since we need the calculated roll of the phone to be independent of the phone's pitch & + // yaw. Any operation that decomposes rotation to Euler angles needs to be performed + // carefully. + SensorManager.remapCoordinateSystem( + phoneInWorldSpaceMatrix, + SensorManager.AXIS_X, SensorManager.AXIS_MINUS_Z, + remappedPhoneMatrix); + SensorManager.getOrientation(remappedPhoneMatrix, angles); + float yaw = (float)(angles[0] * 180 / Math.PI); + if (yaw < 0) yaw += 360;//[0, 360] + float pitch = (float)(angles[1] * 180 / Math.PI); + float roll = angles[2]; + touchTracker.setRoll(roll); + NativeMediaPlayer.HeadPose pose = new NativeMediaPlayer.HeadPose(); + float tmpYaw = yaw - renderer.touchYaw; + tmpYaw = tmpYaw % 360 > 0 ? tmpYaw % 360 : tmpYaw % 360 + 360;//[0, 360] + pose.yaw = tmpYaw - 180;//[-180, 180] + + pose.pitch = pitch - renderer.touchPitch; + if (pose.pitch < -90) pose.pitch = -90.0f; + if (pose.pitch > 90) pose.pitch = 90.0f; +// Log.e(TAG, "YAW is " + pose.yaw + " PITCH is " + pose.pitch); + renderer.scene.SetCurrentPosition(pose); + + // Rotate from Android coordinates to OpenGL coordinates. Android's coordinate system + // assumes Y points North and Z points to the sky. OpenGL has Y pointing up and Z pointing + // toward the user. + Matrix.rotateM(phoneInWorldSpaceMatrix, 0, 90, 1, 0, 0); + renderer.setDeviceOrientation(phoneInWorldSpaceMatrix, roll); + } + + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) {} + } + + /** + * Basic touch input system. + * + *

Mixing touch input and gyro input results in a complicated UI so this should be used + * carefully. This touch system implements a basic (X, Y) -> (yaw, pitch) transform. This works + * for basic UI but fails in edge cases where the user tries to drag scene up or down. There is no + * good UX solution for this. The least bad solution is to disable pitch manipulation and only let + * the user adjust yaw. This example tries to limit the awkwardness by restricting pitch + * manipulation to +/- 45 degrees. + * + *

It is also important to get the order of operations correct. To match what users expect, + * touch interaction manipulates the scene by rotating the world by the yaw offset and tilting the + * camera by the pitch offset. If the order of operations is incorrect, the sensors & touch + * rotations will have strange interactions. The roll of the phone is also tracked so that the + * x & y are correctly mapped to yaw & pitch no matter how the user holds their phone. + * + *

This class doesn't handle any scrolling inertia but Android's + * {@link com.google.vr.sdk.widgets.common.TouchTracker.FlingGestureListener} can be used with + * this code for a nicer UI. An even more advanced UI would reproject the user's touch point into + * 3D and drag the Mesh as the user moves their finger. However, that requires quaternion + * interpolation and is beyond the scope of this sample. + */ + static class TouchTracker implements OnTouchListener { + // Arbitrary touch speed number. This should be tweaked so the scene smoothly follows the + // finger or derived from DisplayMetrics. + static final float PX_PER_DEGREES = 25; + // Touch input won't change the pitch beyond +/- 45 degrees. This reduces awkward situations + // where the touch-based pitch and gyro-based pitch interact badly near the poles. + static final float MAX_PITCH_DEGREES = 90; + // With every touch event, update the accumulated degrees offset by the new pixel amount. + private final PointF previousTouchPointPx = new PointF(); + private final PointF accumulatedTouchOffsetDegrees = new PointF(); + // The conversion from touch to yaw & pitch requires compensating for device roll. This is set + // on the sensor thread and read on the UI thread. + private volatile float roll; + + private final Renderer renderer; + + public TouchTracker(Renderer renderer) { + this.renderer = renderer; + } + + /** + * Converts ACTION_MOVE events to pitch & yaw events while compensating for device roll. + * + * @return true if we handled the event + */ + @Override + public boolean onTouch(View v, MotionEvent event) { + switch (event.getAction()) { + case MotionEvent.ACTION_DOWN: + // Initialize drag gesture. + previousTouchPointPx.set(event.getX(), event.getY()); + return true; + case MotionEvent.ACTION_MOVE: + // Calculate the touch delta in screen space. + float touchX = (event.getX() - previousTouchPointPx.x) / PX_PER_DEGREES; + float touchY = (event.getY() - previousTouchPointPx.y) / PX_PER_DEGREES; + previousTouchPointPx.set(event.getX(), event.getY()); + + float r = roll; // Copy volatile state. + float cr = (float) Math.cos(r); + float sr = (float) Math.sin(r); + // To convert from screen space to the 3D space, we need to adjust the drag vector based + // on the roll of the phone. This is standard rotationMatrix(roll) * vector math but has + // an inverted y-axis due to the screen-space coordinates vs GL coordinates. + // Handle yaw. + accumulatedTouchOffsetDegrees.x -= cr * touchX - sr * touchY; + // Handle pitch and limit it to 45 degrees. + accumulatedTouchOffsetDegrees.y += sr * touchX + cr * touchY; + accumulatedTouchOffsetDegrees.y = + Math.max(-MAX_PITCH_DEGREES, + Math.min(MAX_PITCH_DEGREES, accumulatedTouchOffsetDegrees.y)); + + renderer.setPitchOffset(accumulatedTouchOffsetDegrees.y); + renderer.setYawOffset(accumulatedTouchOffsetDegrees.x); + return true; + default: + return false; + } + } + + @BinderThread + public void setRoll(float roll) { + // We compensate for roll by rotating in the opposite direction. + this.roll = -roll; + } + } + + /** + * Standard GL Renderer implementation. The notable code is the matrix multiplication in + * onDrawFrame and updatePitchMatrix. + */ + static class Renderer implements GLSurfaceView.Renderer { + private final SceneRenderer scene = SceneRenderer.createFor2D(); + + // Arbitrary vertical field of view. Adjust as desired. + private static final int FIELD_OF_VIEW_DEGREES_V = 80; + private static final int FIELD_OF_VIEW_DEGREES_H = 80; + private static final float Z_NEAR = .1f; + private static final float Z_FAR = 100; + private final float[] projectionMatrix = new float[16]; + + // There is no model matrix for this scene so viewProjectionMatrix is used for the mvpMatrix. + private final float[] viewProjectionMatrix = new float[16]; + + // Device orientation is derived from sensor data. This is accessed in the sensor's thread and + // the GL thread. + private final float[] deviceOrientationMatrix = new float[16]; + + // Optional pitch and yaw rotations are applied to the sensor orientation. These are accessed on + // the UI, sensor and GL Threads. + private final float[] touchPitchMatrix = new float[16]; + private final float[] touchYawMatrix = new float[16]; + private float touchPitch; + private float touchYaw; + private float deviceRoll; + + private int screenWidth; + private int screenHeight; + + // viewMatrix = touchPitch * deviceOrientation * touchYaw. + private final float[] viewMatrix = new float[16]; + private final float[] tempMatrix = new float[16]; + + private final VideoUiView uiView; + private final MediaLoader mediaLoader; + + public Renderer(VideoUiView uiView, MediaLoader mediaLoader) { + Matrix.setIdentityM(deviceOrientationMatrix, 0); + Matrix.setIdentityM(touchPitchMatrix, 0); + Matrix.setIdentityM(touchYawMatrix, 0); + this.uiView = uiView; + this.mediaLoader = mediaLoader; + } + + @Override + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + scene.glInit(); + if (uiView != null) { + scene.setVideoFrameListener(uiView.getFrameListener()); + } + mediaLoader.onGlSceneReady(scene); + } + + @Override + public void onSurfaceChanged(GL10 gl, int width, int height) { + screenWidth = width; + screenHeight = height; + GLES20.glViewport(0, 0, width, height); + Matrix.perspectiveM( + projectionMatrix, 0, mediaLoader.mediaPlayer.mConfig.viewportVFOV, (float) width / height, Z_NEAR, Z_FAR); + } + + @Override + public void onDrawFrame(GL10 gl) { + // Combine touch & sensor data. + // Orientation = pitch * sensor * yaw since that is closest to what most users expect the + // behavior to be. + synchronized (this) { + Matrix.multiplyMM(tempMatrix, 0, deviceOrientationMatrix, 0, touchYawMatrix, 0); + Matrix.multiplyMM(viewMatrix, 0, touchPitchMatrix, 0, tempMatrix, 0); + } + + Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0); + scene.glDrawFrame(viewProjectionMatrix, Type.MONOCULAR, screenWidth, screenHeight); + } + + /** Adjusts the GL camera's rotation based on device rotation. Runs on the sensor thread. */ + @BinderThread + public synchronized void setDeviceOrientation(float[] matrix, float deviceRoll) { + System.arraycopy(matrix, 0, deviceOrientationMatrix, 0, deviceOrientationMatrix.length); + this.deviceRoll = -deviceRoll; + updatePitchMatrix(); + } + + /** + * Updates the pitch matrix after a physical rotation or touch input. The pitch matrix rotation + * is applied on an axis that is dependent on device rotation so this must be called after + * either touch or sensor update. + */ + @AnyThread + private void updatePitchMatrix() { + // The camera's pitch needs to be rotated along an axis that is parallel to the real world's + // horizon. This is the <1, 0, 0> axis after compensating for the device's roll. + Matrix.setRotateM(touchPitchMatrix, 0, + -touchPitch, (float) Math.cos(deviceRoll), (float) Math.sin(deviceRoll), 0); + } + + /** Set the pitch offset matrix. */ + @UiThread + public synchronized void setPitchOffset(float pitchDegrees) { + touchPitch = pitchDegrees; + updatePitchMatrix(); + } + + /** Set the yaw offset matrix. */ + @UiThread + public synchronized void setYawOffset(float yawDegrees) { + touchYaw = yawDegrees; + Matrix.setRotateM(touchYawMatrix, 0, -yawDegrees, 0, 1, 0); + } + } +} \ No newline at end of file diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/CanvasQuad.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/CanvasQuad.java new file mode 100644 index 00000000..5ae909d1 --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/CanvasQuad.java @@ -0,0 +1,259 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import static android.support.constraint.Constraints.TAG; +import static com.vcd.immersive.omafplayer.Rendering.Utils.checkGlError; + +import android.graphics.Canvas; +import android.graphics.PointF; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.util.Log; +import android.view.Surface; +import android.widget.FrameLayout; +import com.google.vr.sdk.controller.Orientation; +import java.nio.FloatBuffer; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Renders a floating, textured, translucent quad in VR at a hardcoded distance. + * + *

In this sample, the class is only used to render the Android View containing the UI. It also + * contains the {@link Surface} and {@link SurfaceTexture} which hold the {@link Canvas} that + * VideoUiView renders to. + * + *

A CanvasQuad can be created on any thread, but {@link #glInit()} needs to be called on + * the GL thread before it can be rendered. + */ +public class CanvasQuad { + // The size of the quad is hardcoded for this sample and the quad doesn't have a model matrix so + // these dimensions are used by translateClick() for touch interaction. + private static final float WIDTH = 1f; + private static final float HEIGHT = 1 / 8f; + private static final float DISTANCE = 1f; + // The number of pixels in this quad affect how Android positions Views in it. VideoUiView in VR + // will be 1024 x 128 px in size which is similar to its 2D size. For Views that only have VR + // layouts, using a number that results in ~10-15 px / degree is good. + public static final int PX_PER_UNIT = 1024; + + // Standard vertex shader that passes through the texture data. + private static final String[] vertexShaderCode = { + "uniform mat4 uMvpMatrix;", + // 3D position data. + "attribute vec3 aPosition;", + // 2D UV vertices. + "attribute vec2 aTexCoords;", + "varying vec2 vTexCoords;", + + // Standard transformation. + "void main() {", + " gl_Position = uMvpMatrix * vec4(aPosition, 1);", + " vTexCoords = aTexCoords;", + "}" + }; + + // Renders the texture of the quad using uAlpha for transparency. + private static final String[] fragmentShaderCode = { + // This is required since the texture data is GL_TEXTURE_EXTERNAL_OES. + "#extension GL_OES_EGL_image_external : require", + "precision mediump float;", + + // Standard texture rendering shader with extra alpha channel. + "uniform samplerExternalOES uTexture;", + "uniform float uAlpha;", + "varying vec2 vTexCoords;", + "void main() {", + " gl_FragColor.xyz = texture2D(uTexture, vTexCoords).xyz;", + " gl_FragColor.a = uAlpha;", + "}" + }; + + // Program-related GL items. These are only valid if program != 0. + private int program = 0; + private int mvpMatrixHandle; + private int positionHandle; + private int textureCoordsHandle; + private int textureHandle; + private int textureId; + private int alphaHandle; + + // Components used to manage the Canvas that the View is rendered to. These are only valid after + // GL initialization. The client of this class acquires a Canvas from the Surface, writes to it + // and posts it. This marks the Surface as dirty. The GL code then updates the SurfaceTexture + // when rendering only if it is dirty. + private SurfaceTexture displaySurfaceTexture; + private Surface displaySurface; + private final AtomicBoolean surfaceDirty = new AtomicBoolean(); + + // The quad has 2 triangles built from 4 total vertices. Each vertex has 3 position & 2 texture + // coordinates. + private static final int POSITION_COORDS_PER_VERTEX = 3; + private static final int TEXTURE_COORDS_PER_VERTEX = 2; + private static final int COORDS_PER_VERTEX = + POSITION_COORDS_PER_VERTEX + TEXTURE_COORDS_PER_VERTEX; + private static final int BYTES_PER_COORD = 4; // float. + private static final int VERTEX_STRIDE_BYTES = COORDS_PER_VERTEX * BYTES_PER_COORD; + + // Interlaced position & texture data. + private static final float[] vertexData = { + -WIDTH / 2, -HEIGHT / 2, -DISTANCE, + 0, 1, + WIDTH / 2, -HEIGHT / 2, -DISTANCE, + 1, 1, + -WIDTH / 2, HEIGHT / 2, -DISTANCE, + 0, 0, + WIDTH / 2, HEIGHT / 2, -DISTANCE, + 1, 0 + }; + private static final FloatBuffer vertexBuffer = Utils.createBuffer(vertexData); + + /** Only SceneRenderer can create a CanvasQuad. */ + /* package */ CanvasQuad() { } + + /** Gets LayoutParams used by Android to properly layout VideoUiView. */ + public static FrameLayout.LayoutParams getLayoutParams() { + return new FrameLayout.LayoutParams((int) (WIDTH * PX_PER_UNIT), (int) (HEIGHT * PX_PER_UNIT)); + } + + /** + * Calls {@link Surface#lockCanvas(Rect)}. + * + * @return {@link Canvas} for the View to render to or {@code null} if {@link #glInit()} has not + * yet been called. + */ + public Canvas lockCanvas() { + return displaySurface == null ? null : displaySurface.lockCanvas(null /* dirty Rect */); + } + + /** + * Calls {@link Surface#unlockCanvasAndPost(Canvas)} and marks the SurfaceTexture as dirty. + * + * @param canvas the canvas returned from {@link #lockCanvas()} + */ + public void unlockCanvasAndPost(Canvas canvas) { + if (canvas == null || displaySurface == null) { + // glInit() hasn't run yet. + return; + } + displaySurface.unlockCanvasAndPost(canvas); + surfaceDirty.set(true); + } + + /** Finishes constructing this object on the GL Thread. */ + /* package */ void glInit() { + if (program != 0) { + return; + } + + // Create the program. + program = Utils.compileProgram(vertexShaderCode, fragmentShaderCode); + mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix"); + positionHandle = GLES20.glGetAttribLocation(program, "aPosition"); + textureCoordsHandle = GLES20.glGetAttribLocation(program, "aTexCoords"); + textureHandle = GLES20.glGetUniformLocation(program, "uTexture"); + textureId = Utils.glCreateExternalTexture(); + alphaHandle = GLES20.glGetUniformLocation(program, "uAlpha"); + checkGlError(); + + // Create the underlying SurfaceTexture with the appropriate size. + displaySurfaceTexture = new SurfaceTexture(textureId); + displaySurfaceTexture.setDefaultBufferSize( + (int) (WIDTH * PX_PER_UNIT), (int) (HEIGHT * PX_PER_UNIT)); + displaySurface = new Surface(displaySurfaceTexture); + } + + /** + * Renders the quad. + * + * @param viewProjectionMatrix Array of floats containing the quad's 4x4 perspective matrix in the + * {@link android.opengl.Matrix} format. + * @param alpha Specifies the opacity of this quad. + */ + /* package */ void glDraw(float[] viewProjectionMatrix, float alpha) { + // Configure shader. + GLES20.glUseProgram(program); + checkGlError(); + + GLES20.glEnableVertexAttribArray(positionHandle); + GLES20.glEnableVertexAttribArray(textureCoordsHandle); + checkGlError(); + + GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, viewProjectionMatrix, 0); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); + GLES20.glUniform1i(textureHandle, 0); + GLES20.glUniform1f(alphaHandle, alpha); + checkGlError(); + + // Load position data. + vertexBuffer.position(0); + GLES20.glVertexAttribPointer(positionHandle, POSITION_COORDS_PER_VERTEX, GLES20.GL_FLOAT, + false, VERTEX_STRIDE_BYTES, vertexBuffer); + checkGlError(); + + // Load texture data. + vertexBuffer.position(POSITION_COORDS_PER_VERTEX); + GLES20.glVertexAttribPointer(textureCoordsHandle, TEXTURE_COORDS_PER_VERTEX, GLES20.GL_FLOAT, + false, VERTEX_STRIDE_BYTES, vertexBuffer); + checkGlError(); + + if (surfaceDirty.compareAndSet(true, false)) { + // If the Surface has been written to, get the new data onto the SurfaceTexture. + displaySurfaceTexture.updateTexImage(); + } + + // Render. + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexData.length / COORDS_PER_VERTEX); + checkGlError(); + + GLES20.glDisableVertexAttribArray(positionHandle); + GLES20.glDisableVertexAttribArray(textureCoordsHandle); + } + + /** Frees GL resources. */ + /* package */ void glShutdown() { + if (program != 0) { + GLES20.glDeleteProgram(program); + GLES20.glDeleteTextures(1, new int[]{textureId}, 0); + } + + if (displaySurfaceTexture != null) { + displaySurfaceTexture.release(); + } + } + + /** + * Translates a Daydream Controller Orientation into a Point that can be passed to Android's + * click handling system. + * + *

This is a minimal hit detection system that works for this quad because + * it has no model matrix. All the math is based on the fact that its size & distance are + * hard-coded into this class. For a more complex 3D mesh, a general bounding box & ray collision + * system would be required. + * + * @param orientation a {@link com.google.vr.sdk.controller.Controller}'s {@link Orientation}. + */ + /* package */ static PointF translateClick(Orientation orientation) { + float[] angles = orientation.toYawPitchRollRadians(new float[3]); + // Make a rough guess of the bounds of the Quad in polar coordinates. This works as long as the + // Quad isn't too large. + float horizontalHalfAngle = (float) Math.atan2(WIDTH / 2, DISTANCE); + float verticleHalfAngle = (float) Math.atan2(HEIGHT / 2, DISTANCE); + + if (angles[1] < -verticleHalfAngle || angles[1] > verticleHalfAngle + || angles[0] < -horizontalHalfAngle || angles[0] > horizontalHalfAngle) { + // Click is outside of the quad. + return null; + } + + // Convert from the polar coordinates of the controller to the rectangular coordinates of the + // View. Note the negative yaw & pitch used to generate Android-compliant x & y coordinates. + float xPercent = (horizontalHalfAngle - angles[0]) / (2 * horizontalHalfAngle); + float yPercent = (verticleHalfAngle - angles[1]) / (2 * verticleHalfAngle); + float xPx = xPercent * WIDTH * PX_PER_UNIT; + float yPx = yPercent * HEIGHT * PX_PER_UNIT; + + return new PointF(xPx, yPx); + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/CubeMapMesh.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/CubeMapMesh.java new file mode 100644 index 00000000..a340cc0a --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/CubeMapMesh.java @@ -0,0 +1,394 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import static android.support.constraint.Constraints.TAG; + +import static com.vcd.immersive.omafplayer.Rendering.Utils.checkGlError; +import static java.lang.Math.cos; +import static java.lang.Math.sin; + +import android.content.Context; + +import android.opengl.GLES20; + +import android.util.Log; + +import com.google.vr.sdk.base.Eye; + + +/** + * Utility class to generate & render spherical meshes for video or images. Use the static creation + * methods to construct the Mesh's data. Then call the Mesh constructor on the GL thread when ready. + * Use glDraw method to render it. + */ +public final class CubeMapMesh extends Mesh { + + private class TransformType { + public static final int NO_TRANSFORM = 0; + public static final int MIRRORING_HORIZONTALLY = 1; + public static final int ROTATION_180_ANTICLOCKWISE = 2; + public static final int ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR = 3; + public static final int ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR= 4; + public static final int ROTATION_90_ANTICLOCKWISE = 5; + public static final int ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR = 6; + public static final int ROTATION_270_ANTICLOCKWISE = 7; + } + + private class FaceID { + public static final int CUBE_MAP_RIGHT = 0; + public static final int CUBE_MAP_LEFT = 1; + public static final int CUBE_MAP_TOP = 2; + public static final int CUBE_MAP_BOTTOM = 3; + public static final int CUBE_MAP_BACK = 4; + public static final int CUBE_MAP_FRONT = 5; + } + + private static final int VIEWPORT_HFOV = 1920; + + private static final int VIEWPORT_VFOV = 1920; + + // Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data. + private static final String[] VERTEX_SHADER_CODE = + new String[] { + "uniform mat4 uMvpMatrix;", + "attribute vec3 aPosition;", + "attribute vec3 transPosition;", + "varying vec3 vTexCoords;", + + // Standard transformation. + "void main() {", + " vTexCoords = transPosition;", + " gl_Position = uMvpMatrix * vec4(aPosition, 1.0);", + " gl_Position = gl_Position.xyww;", + "}" + }; + private static final String[] FRAGMENT_SHADER_CODE = + new String[] { + // This is required since the texture data is GL_TEXTURE_EXTERNAL_OES. + // "#extension GL_OES_EGL_image_external : require", + "precision mediump float;", + + // Standard texture rendering shader. + // "uniform samplerExternalOES uTexture;", + "uniform samplerCube uTexture;", + "varying vec3 vTexCoords;", + "void main() {", + " gl_FragColor = textureCube(uTexture, vTexCoords);", + "}" + }; + private boolean isChangeTransformType = false; + // Constants related to vertex data. + private static final int VERTEX_NUM_FOR_SKYBOX = 36; + + private static final int POSITION_COORDS_PER_VERTEX = 3; // X, Y, Z. + // The vertex contains texture coordinates for both the left & right eyes. If the scene is + // rendered in VR, the appropriate part of the vertex will be selected at runtime. For a mono + // scene, only the left eye's UV coordinates are used. + // For mono media, the UV coordinates are duplicated in each. For stereo media, the UV coords + // point to the appropriate part of the source media. + /** + * Generates a 3D UV sphere for rendering monoscopic or stereoscopic video. + * + *

This can be called on any thread. The returned {@link Mesh} isn't valid until + * {@link #glInit(int)} is called. + * + * @param radius Size of the sphere. Must be > 0. + * @param latitudes Number of rows that make up the sphere. Must be >= 1. + * @param longitudes Number of columns that make up the sphere. Must be >= 1. + * @param verticalFovDegrees Total latitudinal degrees that are covered by the sphere. Must be in + * (0, 180]. + * @param horizontalFovDegrees Total longitudinal degrees that are covered by the sphere.Must be + * in (0, 360]. + * @param mediaFormat A MEDIA_* value. + * @return Unintialized Mesh. + */ + public static CubeMapMesh Create( + MeshParams params, Context context) { + float[] vertexData = new float[] { + // vertex postion transform position + // right- z (for flip) + 1.0f, -1.0f, -1.0f, + 1.0f, -1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, -1.0f, + 1.0f, -1.0f, -1.0f, + // left- z (for flip) + -1.0f, -1.0f, 1.0f, + -1.0f, -1.0f, -1.0f, + -1.0f, 1.0f, -1.0f, + -1.0f, 1.0f, -1.0f, + -1.0f, 1.0f, 1.0f, + -1.0f, -1.0f, 1.0f, + // top- z (for flip) + -1.0f, 1.0f, -1.0f, + 1.0f, 1.0f, -1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + -1.0f, 1.0f, 1.0f, + -1.0f, 1.0f, -1.0f, + // bottom- z (for flip) + -1.0f, -1.0f, -1.0f, + -1.0f, -1.0f, 1.0f, + 1.0f, -1.0f, -1.0f, + 1.0f, -1.0f, -1.0f, + -1.0f, -1.0f, 1.0f, + 1.0f, -1.0f, 1.0f, + // back- x (for flip) + -1.0f, -1.0f, 1.0f, + -1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, -1.0f, 1.0f, + -1.0f, -1.0f, 1.0f, + // front- x (for flip) + -1.0f, 1.0f, -1.0f, + -1.0f, -1.0f, -1.0f, + 1.0f, -1.0f, -1.0f, + 1.0f, -1.0f, -1.0f, + 1.0f, 1.0f, -1.0f, + -1.0f, 1.0f, -1.0f, + }; + float[] transData = new float[] { + // vertex postion transform position + // right- z (for flip) + 1.0f, -1.0f, 1.0f, + 1.0f, -1.0f, -1.0f, + 1.0f, 1.0f, -1.0f, + 1.0f, 1.0f, -1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, -1.0f, 1.0f, + // left- z (for flip) + -1.0f, -1.0f, -1.0f, + -1.0f, -1.0f, 1.0f, + -1.0f, 1.0f, 1.0f, + -1.0f, 1.0f, 1.0f, + -1.0f, 1.0f, -1.0f, + -1.0f, -1.0f, -1.0f, + // top- z (for flip) + -1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + 1.0f, 1.0f, -1.0f, + 1.0f, 1.0f, -1.0f, + -1.0f, 1.0f, -1.0f, + -1.0f, 1.0f, 1.0f, + // bottom- z (for flip) + -1.0f, -1.0f, 1.0f, + -1.0f, -1.0f, -1.0f, + 1.0f, -1.0f, 1.0f, + 1.0f, -1.0f, 1.0f, + -1.0f, -1.0f, -1.0f, + 1.0f, -1.0f, -1.0f, + // back- x (for flip) + 1.0f, -1.0f, 1.0f, + 1.0f, 1.0f, 1.0f, + -1.0f, 1.0f, 1.0f, + -1.0f, 1.0f, 1.0f, + -1.0f, -1.0f, 1.0f, + 1.0f, -1.0f, 1.0f, + // front- x (for flip) + 1.0f, 1.0f, -1.0f, + 1.0f, -1.0f, -1.0f, + -1.0f, -1.0f, -1.0f, + -1.0f, -1.0f, -1.0f, + -1.0f, 1.0f, -1.0f, + 1.0f, 1.0f, -1.0f, + }; + return new CubeMapMesh(vertexData, transData, context); + } + + /** Used by static constructors. */ + private CubeMapMesh(float[] vertexData, float[] transData, Context context) { + vertices = vertexData; + transVertices = transData; + vertexBuffer = Utils.createBuffer(vertices); + transBuffer = Utils.createBuffer(transData); + mContext = context; + } + + /** + * Finishes initialization of the GL components. + * + * @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh. + */ + /* package */ void glInit(int textureId) { + this.textureId = textureId; + + program = Utils.compileProgram(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE); + + mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix"); + positionHandle = GLES20.glGetAttribLocation(program, "aPosition"); + transpositionHandle = GLES20.glGetAttribLocation(program, "transPosition"); + textureHandle = GLES20.glGetUniformLocation(program, "uTexture"); + + checkGlError(); + Log.e(TAG, "Cubemap Mesh " + positionHandle + textureHandle); + } + + /** + * Renders the mesh. This must be called on the GL thread. + * + * @param mvpMatrix The Model View Projection matrix. + * @param eyeType An {@link Eye.Type} value. + */ + /* package */ void glDraw(float[] mvpMatrix, int eyeType, int[] transformType, int width, int height) { + // Configure shader. + GLES20.glUseProgram(program); + checkGlError(); + GLES20.glViewport(0,0, width, height); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, textureId); + + GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0); + GLES20.glUniform1i(textureHandle, 0); + if (!isChangeTransformType && transformType != null) + { + ChangeTransformVertices(transformType); + isChangeTransformType = true; + } + checkGlError(); + GLES20.glEnableVertexAttribArray(positionHandle); + GLES20.glVertexAttribPointer(positionHandle, POSITION_COORDS_PER_VERTEX, + GLES20.GL_FLOAT, false, 0, vertexBuffer); + GLES20.glEnableVertexAttribArray(transpositionHandle); + GLES20.glVertexAttribPointer(transpositionHandle, POSITION_COORDS_PER_VERTEX, + GLES20.GL_FLOAT, false, 0, transBuffer); + GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, VERTEX_NUM_FOR_SKYBOX); + GLES20.glUseProgram(0); + Log.e(TAG, "Cubemap Mesh draw"); + } + + /** Cleans up the GL resources. */ + /* package */ void glShutdown() { + if (program != 0) { + GLES20.glDeleteProgram(program); + GLES20.glDeleteTextures(1, new int[]{textureId}, 0); + } + } + + void ChangeTransformVertices(int[] transformtype) + { + boolean needChanged = false; + for (int i = 0; i < transformtype.length; i++) + { + if (transformtype[i] != TransformType.NO_TRANSFORM) + { + needChanged = true; + break; + } + } + if (!needChanged) + { + return; + } + + int face_id = FaceID.CUBE_MAP_RIGHT; + for (; face_id <= FaceID.CUBE_MAP_FRONT; face_id++) + { + if (transformtype[face_id] != TransformType.NO_TRANSFORM) + { + for (int j = face_id * 18; j < (face_id + 1) * 18; j++) + { + if (j % 3 == 0) + { + if (transformtype[face_id] == TransformType.MIRRORING_HORIZONTALLY) + { + if (face_id == FaceID.CUBE_MAP_RIGHT || face_id == FaceID.CUBE_MAP_LEFT) + transVertices[j + 2] = -transVertices[j + 2]; + else if (face_id == FaceID.CUBE_MAP_TOP || face_id == FaceID.CUBE_MAP_BOTTOM) + transVertices[j] = -transVertices[j]; + else if (face_id == FaceID.CUBE_MAP_BACK || face_id == FaceID.CUBE_MAP_FRONT) + transVertices[j] = -transVertices[j]; + } + else + { + if (transformtype[face_id] == TransformType.ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR) + { + if (face_id == FaceID.CUBE_MAP_RIGHT || face_id == FaceID.CUBE_MAP_LEFT) + transVertices[j + 2] = -transVertices[j + 2]; + else if (face_id == FaceID.CUBE_MAP_TOP || face_id == FaceID.CUBE_MAP_BOTTOM) + transVertices[j] = -transVertices[j]; + else if (face_id == FaceID.CUBE_MAP_BACK || face_id == FaceID.CUBE_MAP_FRONT) + transVertices[j] = -transVertices[j]; + } + float transDegree = 0; + if (face_id == FaceID.CUBE_MAP_RIGHT || face_id == FaceID.CUBE_MAP_BOTTOM || face_id == FaceID.CUBE_MAP_BACK) + { + if (transformtype[face_id] == TransformType.ROTATION_180_ANTICLOCKWISE || transformtype[face_id] == TransformType.ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR) + transDegree = (float) Math.PI; + else if (transformtype[face_id] == TransformType.ROTATION_90_ANTICLOCKWISE || transformtype[face_id] == TransformType.ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR) + transDegree = (float) Math.PI / 2; + else if (transformtype[face_id] == TransformType.ROTATION_270_ANTICLOCKWISE || transformtype[face_id] == TransformType.ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR) + transDegree = (float) Math.PI / 2 * 3; + } + else + { + if (transformtype[face_id] == TransformType.ROTATION_180_ANTICLOCKWISE || transformtype[face_id] == TransformType.ROTATION_180_ANTICLOCKWISE_AFTER_MIRRORING_HOR) + transDegree = (float) Math.PI; + else if (transformtype[face_id] == TransformType.ROTATION_90_ANTICLOCKWISE || transformtype[face_id] == TransformType.ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR) + transDegree = (float) Math.PI / 2 * 3; + else if (transformtype[face_id] == TransformType.ROTATION_270_ANTICLOCKWISE || transformtype[face_id] == TransformType.ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR) + transDegree = (float) Math.PI / 2; + } + + // different face id + if (face_id == FaceID.CUBE_MAP_RIGHT) // NY + { + float y = transVertices[j + 1]; + float z = transVertices[j + 2]; + transVertices[j + 1] = (float) (y * cos(transDegree) - z * sin(transDegree)); + transVertices[j + 2] = (float) (y * sin(transDegree) + z * cos(transDegree)); + } + else if (face_id == FaceID.CUBE_MAP_LEFT) // PY + { + float y = transVertices[j + 1]; + float z = transVertices[j + 2]; + transVertices[j + 1] = (float) (y * cos(transDegree) - z * sin(transDegree)); + transVertices[j + 2] = (float) (y * sin(transDegree) + z * cos(transDegree)); + } + else if (face_id == FaceID.CUBE_MAP_TOP) // PZ + { + float x = transVertices[j]; + float z = transVertices[j + 2]; + transVertices[j] = (float) (x * cos(transDegree) - z * sin(transDegree)); + transVertices[j + 2] = (float) (x * sin(transDegree) + z * cos(transDegree)); + } + else if (face_id == FaceID.CUBE_MAP_BOTTOM) // NZ + { + float x = transVertices[j]; + float z = transVertices[j + 2]; + transVertices[j] = (float) (x * cos(transDegree) - z * sin(transDegree)); + transVertices[j + 2] = (float) (x * sin(transDegree) + z * cos(transDegree)); + } + else if (face_id == FaceID.CUBE_MAP_BACK) // NX + { + float x = transVertices[j]; + float y = transVertices[j + 1]; + transVertices[j] = (float) (x * cos(transDegree) - y * sin(transDegree)); + transVertices[j + 1] = (float) (x * sin(transDegree) + y * cos(transDegree)); + } + else if (face_id == FaceID.CUBE_MAP_FRONT) // PX + { + float x = transVertices[j]; + float y = transVertices[j + 1]; + transVertices[j] = (float) (x * cos(transDegree) - y * sin(transDegree)); + transVertices[j + 1] = (float) (x * sin(transDegree) + y * cos(transDegree)); + } + // first anti-clockwise and then hor-mirror + if (transformtype[face_id] == TransformType.ROTATION_90_ANTICLOCKWISE_BEFORE_MIRRORING_HOR || transformtype[face_id] == TransformType.ROTATION_270_ANTICLOCKWISE_BEFORE_MIRRORING_HOR) + { + if (face_id == FaceID.CUBE_MAP_RIGHT || face_id == FaceID.CUBE_MAP_LEFT) + transVertices[j + 2] = -transVertices[j + 2]; + else if (face_id == FaceID.CUBE_MAP_TOP || face_id == FaceID.CUBE_MAP_BOTTOM) + transVertices[j] = -transVertices[j]; + else if (face_id == FaceID.CUBE_MAP_BACK || face_id == FaceID.CUBE_MAP_FRONT) + transVertices[j] = -transVertices[j]; + } + } + } + } + } + } + transBuffer.put(transVertices); + transBuffer.position(0); + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/ERPMesh.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/ERPMesh.java new file mode 100644 index 00000000..81bd9f4a --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/ERPMesh.java @@ -0,0 +1,243 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import static android.support.constraint.Constraints.TAG; +import static com.vcd.immersive.omafplayer.Rendering.Utils.checkGlError; + +import android.opengl.GLES20; +import android.util.Log; + +import com.google.vr.sdk.base.Eye; + +/** + * Utility class to generate & render spherical meshes for video or images. Use the static creation + * methods to construct the Mesh's data. Then call the Mesh constructor on the GL thread when ready. + * Use glDraw method to render it. + */ +public final class ERPMesh extends Mesh { + + // Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data. + private static final String[] VERTEX_SHADER_CODE = + new String[] { + "uniform mat4 uMvpMatrix;", + "attribute vec4 aPosition;", + "attribute vec2 aTexCoords;", + "varying vec2 vTexCoords;", + + // Standard transformation. + "void main() {", + " gl_Position = uMvpMatrix * aPosition;", + " vTexCoords = aTexCoords;", + "}" + }; + private static final String[] FRAGMENT_SHADER_CODE = + new String[] { + // This is required since the texture data is GL_TEXTURE_EXTERNAL_OES. + // "#extension GL_OES_EGL_image_external : require", + "precision mediump float;", + + // Standard texture rendering shader. + // "uniform samplerExternalOES uTexture;", + "uniform sampler2D uTexture;", + "varying vec2 vTexCoords;", + "void main() {", + " gl_FragColor = texture2D(uTexture, vTexCoords);", + "}" + }; + + // Constants related to vertex data. + private static final int POSITION_COORDS_PER_VERTEX = 3; // X, Y, Z. + // The vertex contains texture coordinates for both the left & right eyes. If the scene is + // rendered in VR, the appropriate part of the vertex will be selected at runtime. For a mono + // scene, only the left eye's UV coordinates are used. + // For mono media, the UV coordinates are duplicated in each. For stereo media, the UV coords + // point to the appropriate part of the source media. + private static final int TEXTURE_COORDS_PER_VERTEX = 2 * 2; + // COORDS_PER_VERTEX + private static final int CPV = POSITION_COORDS_PER_VERTEX + TEXTURE_COORDS_PER_VERTEX; + // Data is tightly packed. Each vertex is [x, y, z, u_left, v_left, u_right, v_right]. + private static final int VERTEX_STRIDE_BYTES = CPV * Utils.BYTES_PER_FLOAT; + + + /** + * Generates a 3D UV sphere for rendering monoscopic or stereoscopic video. + * + *

This can be called on any thread. The returned {@link Mesh} isn't valid until + * {@link #glInit(int)} is called. + * + * @param radius Size of the sphere. Must be > 0. + * @param latitudes Number of rows that make up the sphere. Must be >= 1. + * @param longitudes Number of columns that make up the sphere. Must be >= 1. + * @param verticalFovDegrees Total latitudinal degrees that are covered by the sphere. Must be in + * (0, 180]. + * @param horizontalFovDegrees Total longitudinal degrees that are covered by the sphere.Must be + * in (0, 360]. + * @param mediaFormat A MEDIA_* value. + * @return Unintialized Mesh. + */ + public static ERPMesh Create( + MeshParams params) { + if (params.radius <= 0 + || params.latitudes < 1 || params.longitudes < 1 + || params.hFOV <= 0 || params.vFOV > 180 + || params.hFOV <= 0 || params.vFOV > 360) { + throw new IllegalArgumentException("Invalid parameters for sphere."); + } + + // Compute angular size in radians of each UV quad. + float verticalFovRads = (float) Math.toRadians(params.vFOV); + float horizontalFovRads = (float) Math.toRadians(params.hFOV); + float quadHeightRads = verticalFovRads / params.latitudes; + float quadWidthRads = horizontalFovRads / params.longitudes; + + // Each latitude strip has 2 * (longitudes quads + extra edge) vertices + 2 degenerate vertices. + int vertexCount = (2 * (params.longitudes + 1) + 2) * params.latitudes; + // Buffer to return. + float[] vertexData = new float[vertexCount * CPV]; + + // Generate the data for the sphere which is a set of triangle strips representing each + // latitude band. + int v = 0; // Index into the vertex array. + // (i, j) represents a quad in the equirectangular sphere. + for (int j = 0; j < params.latitudes; ++j) { // For each horizontal triangle strip. + // Each latitude band lies between the two phi values. Each vertical edge on a band lies on + // a theta value. + float phiLow = (quadHeightRads * j - verticalFovRads / 2); + float phiHigh = (quadHeightRads * (j + 1) - verticalFovRads / 2); + + for (int i = 0; i < params.longitudes + 1; ++i) { // For each vertical edge in the band. + for (int k = 0; k < 2; ++k) { // For low and high points on an edge. + // For each point, determine it's position in polar coordinates. + float phi = (k == 0) ? phiLow : phiHigh; + float theta = quadWidthRads * i + (float) Math.PI - horizontalFovRads / 2; + + // Set vertex position data as Cartesian coordinates. + vertexData[CPV * v + 0] = -(float) (params.radius * Math.sin(theta) * Math.cos(phi)); + vertexData[CPV * v + 1] = (float) (params.radius * Math.sin(phi)); + vertexData[CPV * v + 2] = (float) (params.radius * Math.cos(theta) * Math.cos(phi)); + + // Set vertex texture.x data. + if (params.mediaFormat == MEDIA_STEREO_LEFT_RIGHT) { + // For left-right media, each eye's x coordinate points to the left or right half of the + // texture. + vertexData[CPV * v + 3] = (i * quadWidthRads / horizontalFovRads) / 2; + vertexData[CPV * v + 5] = (i * quadWidthRads / horizontalFovRads) / 2 + .5f; + } else { + // For top-bottom or monoscopic media, the eye's x spans the full width of the texture. + vertexData[CPV * v + 3] = i * quadWidthRads / horizontalFovRads; + vertexData[CPV * v + 5] = i * quadWidthRads / horizontalFovRads; + } + + // Set vertex texture.y data. The "1 - ..." is due to Canvas vs GL coords. + if (params.mediaFormat == MEDIA_STEREO_TOP_BOTTOM) { + // For top-bottom media, each eye's y coordinate points to the top or bottom half of the + // texture. + vertexData[CPV * v + 4] = 1 - (((j + k) * quadHeightRads / verticalFovRads) / 2 + .5f); + vertexData[CPV * v + 6] = 1 - ((j + k) * quadHeightRads / verticalFovRads) / 2; + } else { + // For left-right or monoscopic media, the eye's y spans the full height of the texture. + vertexData[CPV * v + 4] = 1 - (j + k) * quadHeightRads / verticalFovRads; + vertexData[CPV * v + 6] = 1 - (j + k) * quadHeightRads / verticalFovRads; + } + v++; + + // Break up the triangle strip with degenerate vertices by copying first and last points. + if ((i == 0 && k == 0) || (i == params.longitudes && k == 1)) { + System.arraycopy(vertexData, CPV * (v - 1), vertexData, CPV * v, CPV); + v++; + } + } + // Move on to the next vertical edge in the triangle strip. + } + // Move on to the next triangle strip. + } + + return new ERPMesh(vertexData); + } + + /** Used by static constructors. */ + private ERPMesh(float[] vertexData) { + vertices = vertexData; + vertexBuffer = Utils.createBuffer(vertices); + } + + /** + * Finishes initialization of the GL components. + * + * @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh. + */ + /* package */ void glInit(int textureId) { + this.textureId = textureId; + + program = Utils.compileProgram(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE); + + mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix"); + positionHandle = GLES20.glGetAttribLocation(program, "aPosition"); + texCoordsHandle = GLES20.glGetAttribLocation(program, "aTexCoords"); + textureHandle = GLES20.glGetUniformLocation(program, "uTexture"); + Log.e(TAG, "ERP Mesh " + texCoordsHandle + textureHandle); + } + + /** + * Renders the mesh. This must be called on the GL thread. + * + * @param mvpMatrix The Model View Projection matrix. + * @param eyeType An {@link Eye.Type} value. + */ + /* package */ void glDraw(float[] mvpMatrix, int eyeType, int[] transformType, int width, int height) { + // Configure shader. + GLES20.glUseProgram(program); + checkGlError(); + + GLES20.glViewport(0,0, width, height); + + GLES20.glEnableVertexAttribArray(positionHandle); + GLES20.glEnableVertexAttribArray(texCoordsHandle); + checkGlError(); + + GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); + GLES20.glUniform1i(textureHandle, 0); + checkGlError(); + + // Load position data. + vertexBuffer.position(0); + GLES20.glVertexAttribPointer( + positionHandle, + POSITION_COORDS_PER_VERTEX, + GLES20.GL_FLOAT, + false, + VERTEX_STRIDE_BYTES, + vertexBuffer); + checkGlError(); + + // Load texture data. Eye.Type.RIGHT uses the left eye's data. + int textureOffset = + (eyeType == Eye.Type.RIGHT) ? POSITION_COORDS_PER_VERTEX + 2 : POSITION_COORDS_PER_VERTEX; + vertexBuffer.position(textureOffset); + GLES20.glVertexAttribPointer( + texCoordsHandle, + TEXTURE_COORDS_PER_VERTEX, + GLES20.GL_FLOAT, + false, + VERTEX_STRIDE_BYTES, + vertexBuffer); + checkGlError(); + + // Render. + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertices.length / CPV); + checkGlError(); + + GLES20.glDisableVertexAttribArray(positionHandle); + GLES20.glDisableVertexAttribArray(texCoordsHandle); + Log.e(TAG, "ERP Mesh draw"); + } + + /** Cleans up the GL resources. */ + /* package */ void glShutdown() { + if (program != 0) { + GLES20.glDeleteProgram(program); + GLES20.glDeleteTextures(1, new int[]{textureId}, 0); + } + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Mesh.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Mesh.java new file mode 100644 index 00000000..4c925f98 --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Mesh.java @@ -0,0 +1,90 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import android.content.Context; + +import com.google.vr.sdk.base.Eye; +import java.nio.FloatBuffer; + +/** + * Utility class to generate & render spherical meshes for video or images. Use the static creation + * methods to construct the Mesh's data. Then call the Mesh constructor on the GL thread when ready. + * Use glDraw method to render it. + */ +public class Mesh { + /** Standard media where a single camera frame takes up the entire media frame. */ + public static final int MEDIA_MONOSCOPIC = 0; + /** + * Stereo media where the left & right halves of the frame are rendered for the left & right eyes, + * respectively. If the stereo media is rendered in a non-VR display, only the left half is used. + */ + public static final int MEDIA_STEREO_LEFT_RIGHT = 1; + /** + * Stereo media where the top & bottom halves of the frame are rendered for the left & right eyes, + * respectively. If the stereo media is rendered in a non-VR display, only the top half is used. + */ + public static final int MEDIA_STEREO_TOP_BOTTOM = 2; + + // Vertices for the mesh with 3D position + left 2D texture UV + right 2D texture UV. + public float[] vertices; + public float[] transVertices; + public FloatBuffer vertexBuffer; + public FloatBuffer transBuffer; + + // Program related GL items. These are only valid if program != 0. + public int program; + public int mvpMatrixHandle; + public int positionHandle; + public int transpositionHandle; + public int texCoordsHandle; + public int textureHandle; + public int textureId; + + public Context mContext; + + public static class MeshParams { + // For ERP + public float radius; + public int latitudes; + public int longitudes; + public float vFOV; + public float hFOV; + public int mediaFormat; + // For CMP + // ... + } + /** + * Generates a 3D UV sphere for rendering monoscopic or stereoscopic video. + * + *

This can be called on any thread. The returned {@link Mesh} isn't valid until + * {@link #glInit(int)} is called. + * + * @param Mesh.MeshParams parameters for creating mesh. + * @return Unintialized Mesh. + */ + public static Mesh Create(MeshParams params) { return new Mesh(null); }; + + public Mesh() {} + /** Used by static constructors. */ + private Mesh(float[] vertexData) { + vertices = vertexData; + vertexBuffer = Utils.createBuffer(vertices); + } + + /** + * Finishes initialization of the GL components. + * + * @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh. + */ + /* package */ void glInit(int textureId) {} + + /** + * Renders the mesh. This must be called on the GL thread. + * + * @param mvpMatrix The Model View Projection matrix. + * @param eyeType An {@link Eye.Type} value. + */ + /* package */ void glDraw(float[] mvpMatrix, int eyeType, int[] transformType, int width, int height) {} + + /** Cleans up the GL resources. */ + /* package */ void glShutdown() {} +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Reticle.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Reticle.java new file mode 100644 index 00000000..b09eeb6e --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Reticle.java @@ -0,0 +1,126 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import static com.vcd.immersive.omafplayer.Rendering.Utils.checkGlError; + +import android.opengl.GLES20; +import android.opengl.Matrix; +import com.google.vr.sdk.controller.Orientation; +import java.nio.FloatBuffer; + +/** + * Renders a reticle in VR for the Daydream Controller. + * + *

This is a minimal example that renders a circle at 1 meter from the user based on the rotation + * of the controller. + */ +final class Reticle { + // The reticle quad is 2 * SIZE units. + private static final float SIZE = .01f; + private static final float DISTANCE = 1; + + // Standard vertex shader. + private static final String[] vertexShaderCode = + new String[] { + "uniform mat4 uMvpMatrix;", + "attribute vec3 aPosition;", + "varying vec2 vCoords;", + + // Passthrough normalized vertex coordinates. + "void main() {", + " gl_Position = uMvpMatrix * vec4(aPosition, 1);", + " vCoords = aPosition.xy / vec2(" + SIZE + ", " + SIZE + ");", + "}" + }; + + // Procedurally render a ring on the quad between the specified radii. + private static final String[] fragmentShaderCode = + new String[] { + "precision mediump float;", + "varying vec2 vCoords;", + + // Simple ring shader that is white between the radii and transparent elsewhere. + "void main() {", + " float r = length(vCoords);", + // Blend the edges of the ring at .55 +/- .05 and .85 +/- .05. + " float alpha = smoothstep(0.5, 0.6, r) * (1.0 - smoothstep(0.8, 0.9, r));", + " if (alpha == 0.0) {", + " discard;", + " } else {", + " gl_FragColor = vec4(alpha);", + " }", + "}" + }; + + // Program-related GL items. These are only valid if program != 0. + private int program = 0; + private int mvpMatrixHandle; + private int positionHandle; + + // Simple quad mesh. + private static final int COORDS_PER_VERTEX = 3; + private static final float[] vertexData = { + -SIZE, -SIZE, -DISTANCE, + SIZE, -SIZE, -DISTANCE, + -SIZE, SIZE, -DISTANCE, + SIZE, SIZE, -DISTANCE, + }; + private static final FloatBuffer vertexBuffer = Utils.createBuffer(vertexData); + + // The reticle doesn't have a real modelMatrix. Its distance is baked into the mesh and it + // uses a rotation matrix when rendered. + private final float[] modelViewProjectionMatrix = new float[16]; + + /** Finishes initialization of this object on the GL thread. */ + public void glInit() { + if (program != 0) { + return; + } + + program = Utils.compileProgram(vertexShaderCode, fragmentShaderCode); + mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix"); + positionHandle = GLES20.glGetAttribLocation(program, "aPosition"); + checkGlError(); + } + + /** + * Renders the reticle. + * + * @param viewProjectionMatrix Scene's view projection matrix. + * @param orientation Rotation matrix derived from {@link Orientation#toRotationMatrix(float[])}. + */ + public void glDraw(float[] viewProjectionMatrix, float[] orientation) { + // Configure shader. + GLES20.glUseProgram(program); + checkGlError(); + + Matrix.multiplyMM(modelViewProjectionMatrix, 0, viewProjectionMatrix, 0, orientation, 0); + GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjectionMatrix, 0); + checkGlError(); + + // Render quad. + GLES20.glEnableVertexAttribArray(positionHandle); + checkGlError(); + + vertexBuffer.position(0); + GLES20.glVertexAttribPointer( + positionHandle, + COORDS_PER_VERTEX, + GLES20.GL_FLOAT, + false, + 0, + vertexBuffer); + checkGlError(); + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexData.length / COORDS_PER_VERTEX); + checkGlError(); + + GLES20.glDisableVertexAttribArray(positionHandle); + } + + /** Frees GL resources. */ + public void glShutdown() { + if (program != 0) { + GLES20.glDeleteProgram(program); + } + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/SceneRenderer.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/SceneRenderer.java new file mode 100644 index 00000000..caddf188 --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/SceneRenderer.java @@ -0,0 +1,411 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import static com.vcd.immersive.omafplayer.Rendering.Utils.checkGlError; + +import android.content.Context; +import android.graphics.PointF; +import android.graphics.SurfaceTexture; +import android.graphics.SurfaceTexture.OnFrameAvailableListener; +import android.opengl.GLES20; +import android.opengl.Matrix; +import android.os.Handler; +import android.os.Looper; +import android.os.SystemClock; +import android.support.annotation.AnyThread; +import android.support.annotation.BinderThread; +import android.support.annotation.MainThread; +import android.support.annotation.Nullable; +import android.util.Log; +import android.util.Pair; +import android.view.InputDevice; +import android.view.MotionEvent; +import android.view.Surface; +import android.view.ViewGroup; +import com.google.vr.sdk.controller.Orientation; +import com.vcd.immersive.omafplayer.MediaPlayer.NativeMediaPlayer; +import com.vcd.immersive.omafplayer.VideoUiView; +import java.util.concurrent.atomic.AtomicBoolean; +/** + * Controls and renders the GL Scene. + * + *

This class is shared between MonoscopicView & VrVideoActivity. It renders the display mesh, UI + * and controller reticle as required. It also has basic Controller input which allows the user to + * interact with {@link VideoUiView} while in VR. + */ +public final class SceneRenderer { + private static final String TAG = "SceneRenderer"; + private static final long Interval = 33; + private static final int MULTI_DECODER_MAX_NUM = 5; + // This is the primary interface between the Media Player and the GL Scene. + private Surface[] decodeSurface = new Surface[MULTI_DECODER_MAX_NUM]; + private Surface displaySurface; + public SurfaceTexture displayTexture; + private SurfaceTexture[] decodeTexture = new SurfaceTexture[MULTI_DECODER_MAX_NUM]; + private final AtomicBoolean frameAvailable = new AtomicBoolean(); + // Used to notify clients that displayTexture has a new frame. This requires synchronized access. + @Nullable + private OnFrameAvailableListener externalFrameListener; + + // GL components for the mesh that display the media. displayMesh should only be accessed on the + // GL Thread, but requestedDisplayMesh needs synchronization. + @Nullable + private Mesh displayMesh; + @Nullable + private Mesh requestedDisplayMesh; + public int displayTexId; + public int[] decodeTexId = new int[MULTI_DECODER_MAX_NUM]; + + private int drawTimes = 0; + + private int renderCount = 0; + + private int cnt = 0; + + public boolean decode_surface_ready = false; + private NativeMediaPlayer mediaPlayer; + // These are only valid if createForVR() has been called. In the 2D Activity, these are null + // since the UI is rendered in the standard Android layout. + @Nullable + private final CanvasQuad canvasQuad; + @Nullable + private final VideoUiView videoUiView; + @Nullable + private final Handler uiHandler; + + // Controller components. + private final Reticle reticle = new Reticle(); + @Nullable + private Orientation controllerOrientation; + // This is accessed on the binder & GL Threads. + private final float[] controllerOrientationMatrix = new float[16]; + boolean hasTransformTypeSent = false; + int[] transformType = null; + + /** + * Constructs the SceneRenderer with the given values. + */ + /* package */ SceneRenderer( + CanvasQuad canvasQuad, VideoUiView videoUiView, Handler uiHandler, + SurfaceTexture.OnFrameAvailableListener externalFrameListener) { + this.canvasQuad = canvasQuad; + this.videoUiView = videoUiView; + this.uiHandler = uiHandler; + this.externalFrameListener = externalFrameListener; + } + + /** + * Creates a SceneRenderer for 2D but does not initialize it. {@link #glInit()} is used to finish + * initializing the object on the GL thread. + */ + public static SceneRenderer createFor2D() { + return new SceneRenderer(null, null, null, null); + } + + /** + * Creates a SceneRenderer for VR but does not initialize it. {@link #glInit()} is used to finish + * initializing the object on the GL thread. + * + *

The also creates a {@link VideoUiView} that is bound to the VR scene. The View is backed by + * a {@link CanvasQuad} and is meant to be rendered in a VR scene. + * + * @param context the {@link Context} used to initialize the {@link VideoUiView} + * @param parent the new view is attached to the parent in order to properly handle Android + * events + * @return a SceneRender configured for VR and a bound {@link VideoUiView} that can be treated + * similar to a View returned from findViewById. + */ + @MainThread + public static Pair createForVR(Context context, ViewGroup parent) { + CanvasQuad canvasQuad = new CanvasQuad(); + VideoUiView videoUiView = VideoUiView.createForOpenGl(context, parent, canvasQuad); + OnFrameAvailableListener externalFrameListener = videoUiView.getFrameListener(); + + SceneRenderer scene = new SceneRenderer( + canvasQuad, videoUiView, new Handler(Looper.getMainLooper()), externalFrameListener); + return Pair.create(scene, videoUiView); + } + + public void setMediaPlayer(NativeMediaPlayer mediaPlayer) { + this.mediaPlayer = mediaPlayer; + } + + /** + * Performs initialization on the GL thread. The scene isn't fully initialized until + * glConfigureScene() completes successfully. + */ + public void glInit() { + checkGlError(); + Matrix.setIdentityM(controllerOrientationMatrix, 0); + + // Set the background frame color. This is only visible if the display mesh isn't a full sphere. + GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f); + checkGlError(); + + // Create the texture used to render each frame of video. + for (int i = 0; i < MULTI_DECODER_MAX_NUM; i++) { + decodeTexId[i] = Utils.glCreateExternalTexture(); + decodeTexture[i] = new SurfaceTexture(decodeTexId[i]); + checkGlError(); + } + Log.e(TAG, "decode texture is created!"); + // When the video decodes a new frame, tell the GL thread to update the image. + decodeTexture[0].setOnFrameAvailableListener( + new OnFrameAvailableListener() { + @Override + public void onFrameAvailable(SurfaceTexture surfaceTexture) { + frameAvailable.set(true); + + synchronized (SceneRenderer.this) { + if (externalFrameListener != null) { + externalFrameListener.onFrameAvailable(surfaceTexture); + } + } + } + }); + + if (canvasQuad != null) { + canvasQuad.glInit(); + } + reticle.glInit(); + + decode_surface_ready = true; + } + + /** + * Creates the decode Surface used by the MediaPlayer to decode video. + * + * @param width passed to {@link SurfaceTexture#setDefaultBufferSize(int, int)} + * @param height passed to {@link SurfaceTexture#setDefaultBufferSize(int, int)} + * @return a Surface that can be passed to + */ + @AnyThread + public synchronized @Nullable + Pair createDecodeSurface(int width, int height, int id) { + if (decodeTexture[id] == null) { + Log.e(TAG, ".createDecode called before GL Initialization completed."); + return null; + } + Log.i(TAG, "set default size : width " + width + " height " + height); + decodeTexture[id].setDefaultBufferSize(width, height); + decodeSurface[id] = new Surface(decodeTexture[id]); + Pair ret = new Pair<>(decodeTexId[id], decodeSurface[id]); + return ret; + } + + /** + * Creates the Surface & Mesh used by the MediaPlayer to render video. + * + * @param width passed to {@link SurfaceTexture#setDefaultBufferSize(int, int)} + * @param height passed to {@link SurfaceTexture#setDefaultBufferSize(int, int)} + * @param mesh {@link Mesh} used to display video + * @return a Surface that can be passed to {@link android.media.MediaPlayer#setSurface(Surface)} + */ + @AnyThread + public synchronized @Nullable + Pair createDisplaySurface(int width, int height, Mesh mesh) { + if (displayTexture == null) { + Log.e(TAG, ".createDisplay called before GL Initialization completed."); + return null; + } + + requestedDisplayMesh = mesh; + + displayTexture.setDefaultBufferSize(width, height); + displaySurface = new Surface(displayTexture); + Pair ret = new Pair<>(displayTexId, displaySurface); + return ret; + } + /** + * Configures any late-initialized components. + * + *

Since the creation of the Mesh can depend on disk access, this configuration needs to run + * during each drawFrame to determine if the Mesh is ready yet. This also supports replacing an + * existing mesh while the app is running. + * + * @return true if the scene is ready to be drawn + */ + private synchronized boolean glConfigureScene() { + if (displayMesh == null && requestedDisplayMesh == null) { + // The scene isn't ready and we don't have enough information to configure it. + return false; + } + + // The scene is ready and we don't need to change it so we can glDraw it. + if (requestedDisplayMesh == null) { + return true; + } + + // Configure or reconfigure the scene. + if (displayMesh != null) { + // Reconfiguration. + displayMesh.glShutdown(); + } + + displayMesh = requestedDisplayMesh; + requestedDisplayMesh = null; + displayMesh.glInit(displayTexId); + + return true; + } + + /** + * Draws the scene with a given eye pose and type. + * + * @param viewProjectionMatrix 16 element GL matrix. + * @param eyeType an {@link com.google.vr.sdk.base.Eye.Type} value + */ + public void glDrawFrame(float[] viewProjectionMatrix, int eyeType, int width, int height) { + Log.i(TAG, "begin to draw frame !"); + if (mediaPlayer != null) { + mediaPlayer.SetCurrentStatus(1); + } + else{ + return; + } + if (!glConfigureScene()) { + // displayMesh isn't ready. + Log.e(TAG, "gl configure scene is not ready!"); + return; + } + + // glClear isn't strictly necessary when rendering fully spherical panoramas, but it can improve + // performance on tiled renderers by causing the GPU to discard previous data. + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + checkGlError(); + // The uiQuad uses alpha. + GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); + GLES20.glEnable(GLES20.GL_BLEND); + + Log.i(TAG, "begin to update display image!"); + if (frameAvailable.compareAndSet(true, false)) + { + for (int i = 0; i < MULTI_DECODER_MAX_NUM; i++){ + decodeTexture[i].updateTexImage(); + } + Log.i(TAG, "update tex image at pts " + cnt++); + } + if (drawTimes++ % 2 == 0) + { + Log.i(TAG, "begin to update display tex!"); + int ret = 0; + + ret = mediaPlayer.UpdateDisplayTex(renderCount); + if (ret == 0) renderCount++; + checkGlError(); + if (ret == 0 && !hasTransformTypeSent) { + transformType = mediaPlayer.GetTransformType(); + hasTransformTypeSent = true; + } + long drawTimeEnd = System.currentTimeMillis(); + Log.e(TAG, "draw time is " + drawTimeEnd); + } + Log.i(TAG, "begin to draw mesh!"); + displayMesh.glDraw(viewProjectionMatrix, eyeType, transformType, width, height); + if (videoUiView != null) { + canvasQuad.glDraw(viewProjectionMatrix, videoUiView.getAlpha()); + } + reticle.glDraw(viewProjectionMatrix, controllerOrientationMatrix); + } + + /** Cleans up the GL resources. */ + public void glShutdown() { + if (displayMesh != null) { + displayMesh.glShutdown(); + } + if (canvasQuad != null) { + canvasQuad.glShutdown(); + } + reticle.glShutdown(); + } + + /** Updates the Reticle's position with the latest Controller pose. */ + @BinderThread + public synchronized void setControllerOrientation(Orientation currentOrientation) { + this.controllerOrientation = currentOrientation; + controllerOrientation.toRotationMatrix(controllerOrientationMatrix); + } + + /** + * Processes Daydream Controller clicks and dispatches the event to {@link VideoUiView} as a + * synthetic {@link MotionEvent}. + * + *

This is a minimal input system that works because CanvasQuad is a simple rectangle with a + * hardcoded location. If the quad had a transformation matrix, then those transformations would + * need to be used when converting from the Controller's pose to a 2D click event. + */ + @MainThread + public void handleClick() { + if (videoUiView.getAlpha() == 0) { + // When the UI is hidden, clicking anywhere will make it visible. + toggleUi(); + return; + } + + if (controllerOrientation == null) { + // Race condition between click & pose events. + return; + } + + final PointF clickTarget = CanvasQuad.translateClick(controllerOrientation); + if (clickTarget == null) { + // When the click is outside of the View, hide the UI. + toggleUi(); + return; + } + + // The actual processing of the synthetic event needs to happen in the UI thread. + uiHandler.post( + new Runnable() { + @Override + public void run() { + // Generate a pair of down/up events to make the Android View processing handle the + // click. + long now = SystemClock.uptimeMillis(); + MotionEvent down = MotionEvent.obtain( + now, now, // Timestamps. + MotionEvent.ACTION_DOWN, clickTarget.x, clickTarget.y, // The important parts. + 1, 1, 0, 1, 1, 0, 0); // Unused config data. + down.setSource(InputDevice.SOURCE_GAMEPAD); + videoUiView.dispatchTouchEvent(down); + + // Clone the down event but change action. + MotionEvent up = MotionEvent.obtain(down); + up.setAction(MotionEvent.ACTION_UP); + videoUiView.dispatchTouchEvent(up); + } + }); + } + + /** Uses Android's animation system to fade in/out when the user wants to show/hide the UI. */ + @AnyThread + public void toggleUi() { + // This can be trigged via a controller action so switch to main thread to manipulate the View. + uiHandler.post( + new Runnable() { + @Override + public void run() { + if (videoUiView.getAlpha() == 0) { + videoUiView.animate().alpha(1).start(); + } else { + videoUiView.animate().alpha(0).start(); + } + } + }); + } + + /** + * Binds a listener used by external clients that need to know when a new video frame is ready. + * This is used by MonoscopicView to update the video position slider each frame. + */ + @AnyThread + public synchronized void setVideoFrameListener(OnFrameAvailableListener videoFrameListener) { + externalFrameListener = videoFrameListener; + } + + public void SetCurrentPosition(NativeMediaPlayer.HeadPose pose) + { + if (mediaPlayer != null && mediaPlayer.GetCurrentStatus() != 0) { + mediaPlayer.SetCurrentPosition(pose); + } + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Utils.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Utils.java new file mode 100644 index 00000000..9acf9b5a --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/Utils.java @@ -0,0 +1,166 @@ +package com.vcd.immersive.omafplayer.Rendering; + +import static android.opengl.GLU.gluErrorString; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.text.TextUtils; +import android.util.Log; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.nio.IntBuffer; + +/** GL utility methods. */ +public class Utils { + private static final String TAG = "Video360.Utils"; + + public static final int BYTES_PER_FLOAT = 4; + + /** Debug builds should fail quickly. Release versions of the app should have this disabled. */ + private static final boolean HALT_ON_GL_ERROR = true; + + /** Class only contains static methods. */ + private Utils() {} + + /** Checks GLES20.glGetError and fails quickly if the state isn't GL_NO_ERROR. */ + public static void checkGlError() { + int error = GLES20.glGetError(); + int lastError; + if (error != GLES20.GL_NO_ERROR) { + do { + lastError = error; + Log.e(TAG, "glError " + gluErrorString(lastError)); + error = GLES20.glGetError(); + } while (error != GLES20.GL_NO_ERROR); + + if (HALT_ON_GL_ERROR) { + throw new RuntimeException("glError " + gluErrorString(lastError)); + } + } + } + + /** + * Builds a GL shader program from vertex & fragment shader code. The vertex and fragment shaders + * are passed as arrays of strings in order to make debugging compilation issues easier. + * + * @param vertexCode GLES20 vertex shader program. + * @param fragmentCode GLES20 fragment shader program. + * @return GLES20 program id. + */ + public static int compileProgram(String[] vertexCode, String[] fragmentCode) { + checkGlError(); + // prepare shaders and OpenGL program + int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); + GLES20.glShaderSource(vertexShader, TextUtils.join("\n", vertexCode)); + GLES20.glCompileShader(vertexShader); + checkGlError(); + + int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); + GLES20.glShaderSource(fragmentShader, TextUtils.join("\n", fragmentCode)); + GLES20.glCompileShader(fragmentShader); + checkGlError(); + + int program = GLES20.glCreateProgram(); + GLES20.glAttachShader(program, vertexShader); + GLES20.glAttachShader(program, fragmentShader); + + // Link and check for errors. + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + String errorMsg = "Unable to link shader program: \n" + GLES20.glGetProgramInfoLog(program); + Log.e(TAG, errorMsg); + if (HALT_ON_GL_ERROR) { + throw new RuntimeException(errorMsg); + } + } + checkGlError(); + + return program; + } + + /** Allocates a FloatBuffer with the given data. */ + public static FloatBuffer createBuffer(float[] data) { + ByteBuffer bb = ByteBuffer.allocateDirect(data.length * BYTES_PER_FLOAT); + bb.order(ByteOrder.nativeOrder()); + FloatBuffer buffer = bb.asFloatBuffer(); + buffer.put(data); + buffer.position(0); + + return buffer; + } + /** Allocates a ByteBuffer with the given data. */ + public static ByteBuffer createByteBuffer(byte[] coords) { + ByteBuffer buffer = ByteBuffer.allocateDirect(coords.length); + buffer.order(ByteOrder.nativeOrder()); + buffer.put(coords); + buffer.position(0); + return buffer; + } + + /** + * Creates a GL_TEXTURE_EXTERNAL_OES with default configuration of GL_LINEAR filtering and + * GL_CLAMP_TO_EDGE wrapping. + */ + public static int glCreateExternalTexture() { + int[] texId = new int[1]; + GLES20.glGenTextures(1, IntBuffer.wrap(texId)); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId[0]); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri( + GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + checkGlError(); + return texId[0]; + } + + public static int glCreateTextureFor2D(int width, int height) { + int[] texId = new int[1]; + GLES20.glGenTextures(1, IntBuffer.wrap(texId)); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId[0]); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + checkGlError(); + return texId[0]; + } + + public static int glCreateTextureForCube(int width, int height) { + int[] texId = new int[1]; + GLES20.glGenTextures(1, IntBuffer.wrap(texId)); + GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, texId[0]); + //for cube map, face size is 6 (LEFT, FRONT, RIGHT, BACK, TOP, BOTTOM), and (row,col) = (2,3) + int face_size = 6; + int cube_map_col = 3; + int cube_map_row = 2; + width = width / cube_map_col; + height = height / cube_map_row; + Log.i(TAG, "face width is " + width + " face height is " + height); + for (int i = 0; i < face_size; i++){ + GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, GLES20.GL_RGB, width, height, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, null); + } + checkGlError(); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri( + GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + checkGlError(); + return texId[0]; + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VideoActivity.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VideoActivity.java new file mode 100644 index 00000000..52975719 --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VideoActivity.java @@ -0,0 +1,160 @@ +package com.vcd.immersive.omafplayer; + +import android.Manifest; +import android.Manifest.permission; +import android.app.Activity; +import android.content.ComponentName; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.os.Bundle; +import android.support.v4.app.ActivityCompat; +import android.support.v4.content.ContextCompat; +import android.view.View; +import android.view.View.OnClickListener; +import android.view.ViewGroup; +import com.google.vr.ndk.base.DaydreamApi; +import com.vcd.immersive.omafplayer.Rendering.Mesh; +/** + * Basic Activity to hold {@link MonoscopicView} and render a 360 video in 2D. + * + * Most of this Activity's code is related to Android & VR permission handling. The real work is in + * MonoscopicView. + * + * The default intent for this Activity will load a 360 placeholder panorama. For more options on + * how to load other media using a custom Intent, see {@link MediaLoader}. + */ +public class VideoActivity extends Activity { + private static final String TAG = "VideoActivity"; + private static final int READ_EXTERNAL_STORAGE_PERMISSION_ID = 1; + private static final int WRITE_EXTERNAL_STORAGE_PERMISSION_ID = 2; + private MonoscopicView videoView; + + /** + * Checks that the appropriate permissions have been granted. Otherwise, the sample will wait + * for the user to grant the permission. + * + * @param savedInstanceState unused in this sample but it could be used to track video position + */ + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.video_activity); + + // Configure the MonoscopicView which will render the video and UI. + videoView = (MonoscopicView) findViewById(R.id.video_view); + VideoUiView videoUi = (VideoUiView) findViewById(R.id.video_ui_view); + videoUi.setVrIconClickListener( + new OnClickListener() { + @Override + public void onClick(View v) { + // Convert the Intent used to launch the 2D Activity into one that can launch the VR + // Activity. This flow preserves the extras and data in the Intent. + DaydreamApi api = DaydreamApi.create(VideoActivity.this); + if (api != null){ + // Launch the VR Activity with the proper intent. + Intent intent = DaydreamApi.createVrIntent( + new ComponentName(VideoActivity.this, VrVideoActivity.class)); + intent.setData(getIntent().getData()); + intent.putExtra( + MediaLoader.MEDIA_FORMAT_KEY, + getIntent().getIntExtra(MediaLoader.MEDIA_FORMAT_KEY, Mesh.MEDIA_MONOSCOPIC)); + api.launchInVr(intent); + api.close(); + } else { + // Fall back for devices that don't have Google VR Services. This flow should only + // be used for older Cardboard devices. + Intent intent = + new Intent(getIntent()).setClass(VideoActivity.this, VrVideoActivity.class); + intent.removeCategory(Intent.CATEGORY_LAUNCHER); + intent.setFlags(0); // Clear any flags from the previous intent. + startActivity(intent); + } + + // See VrVideoActivity's launch2dActivity() for more info about why this finish() call + // may be required. + finish(); + } + }); + videoView.initialize(videoUi); + + // Boilerplate for checking runtime permissions in Android. + if (ContextCompat.checkSelfPermission(this, permission.WRITE_EXTERNAL_STORAGE) + != PackageManager.PERMISSION_GRANTED) { + View button = findViewById(R.id.permission_button); + button.setOnClickListener( + new OnClickListener() { + @Override + public void onClick(View v) { + ActivityCompat.requestPermissions( + VideoActivity.this, + new String[] {Manifest.permission.WRITE_EXTERNAL_STORAGE}, + WRITE_EXTERNAL_STORAGE_PERMISSION_ID); + } + }); + // The user can click the button to request permission but we will also click on their behalf + // when the Activity is created. + button.callOnClick(); + } else { + // Permission has already been granted. + initializeActivity(); + } + } + + /** Handles the user accepting the permission. */ + @Override + public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) { + if (requestCode == READ_EXTERNAL_STORAGE_PERMISSION_ID) { + if (results.length > 0 && results[0] == PackageManager.PERMISSION_GRANTED) { + initializeActivity(); + } + } + if (requestCode == WRITE_EXTERNAL_STORAGE_PERMISSION_ID) { + if (results.length > 0 && results[0] == PackageManager.PERMISSION_GRANTED) { + initializeActivity(); + } + } + } + + /** + * Normal apps don't need this. However, since we use adb to interact with this sample, we + * want any new adb Intents to be routed to the existing Activity rather than launching a new + * Activity. + */ + @Override + protected void onNewIntent(Intent intent) { + // Save the new Intent which may contain a new Uri. Then tear down & recreate this Activity to + // load that Uri. + setIntent(intent); + recreate(); + } + + /** Initializes the Activity only if the permission has been granted. */ + private void initializeActivity() { + ViewGroup root = (ViewGroup) findViewById(R.id.activity_root); + for (int i = 0; i < root.getChildCount(); ++i) { + root.getChildAt(i).setVisibility(View.VISIBLE); + } + findViewById(R.id.permission_button).setVisibility(View.GONE); + videoView.loadMedia(getIntent()); + } + + @Override + protected void onResume() { + super.onResume(); + videoView.onResume(); + } + + @Override + protected void onPause() { + // MonoscopicView is a GLSurfaceView so it needs to pause & resume rendering. It's also + // important to pause MonoscopicView's sensors & the video player. + videoView.onPause(); + super.onPause(); + } + + @Override + protected void onDestroy() { + videoView.destroy(); + super.onDestroy(); + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VideoUiView.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VideoUiView.java new file mode 100644 index 00000000..b7475eef --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VideoUiView.java @@ -0,0 +1,266 @@ +package com.vcd.immersive.omafplayer; + +import android.app.ActivityManager; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.SurfaceTexture; +import android.graphics.SurfaceTexture.OnFrameAvailableListener; +import android.support.annotation.AnyThread; +import android.support.annotation.MainThread; +import android.support.annotation.Nullable; +import android.util.AttributeSet; +import android.view.ContextThemeWrapper; +import android.view.InputDevice; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageButton; +import android.widget.LinearLayout; +import android.widget.SeekBar; +import android.widget.TextView; + +import com.vcd.immersive.omafplayer.MediaPlayer.NativeMediaPlayer; +import com.vcd.immersive.omafplayer.Rendering.CanvasQuad; + +/** + * Contains a UI that can be part of a standard 2D Android Activity or a VR Activity. + * + *

For 2D Activities, this View behaves like any other Android View. It receives events from the + * media player, updates the UI, and forwards user input to the appropriate component. In VR + * Activities, this View uses standard Android APIs to render its child Views to a texture that is + * displayed in VR. It also receives events from the Daydream Controller and forwards them to its + * child views. + */ +public class VideoUiView extends LinearLayout { + // These UI elements are only useful when the app is displaying a video. + private SeekBar seekBar; + private TextView statusText; + private final UiUpdater uiUpdater = new UiUpdater(); + + // Since MediaPlayer lacks synchronization for internal events, it should only be accessed on the + // main thread. + @Nullable + private NativeMediaPlayer mediaPlayer; + // The canvasQuad is only not null when this View is in a VR Activity. It provides the backing + // canvas that standard Android child Views render to. + @Nullable + private CanvasQuad canvasQuad; + + /** Creates this View using standard XML inflation. */ + public VideoUiView(Context context, AttributeSet attrs) { + super(context, attrs); + } + + /** + * Creates this view for use in a VR scene. + * + * @param context the context used to set this View's theme + * @param parent a parent view this view will be attached to such as the Activity's root View + * @param quad the floating quad in the VR scene that will render this View + */ + @MainThread + public static VideoUiView createForOpenGl(Context context, ViewGroup parent, CanvasQuad quad) { + // If a custom theme isn't specified, the Context's theme is used. For VR Activities, this is + // the old Android default theme rather than a modern theme. Override this with a custom theme. + Context theme = new ContextThemeWrapper(context, R.style.VrTheme); + + VideoUiView view = (VideoUiView) View.inflate(theme, R.layout.video_ui, null); + view.canvasQuad = quad; + view.setLayoutParams(CanvasQuad.getLayoutParams()); + view.setVisibility(View.VISIBLE); + parent.addView(view, 0); + + view.findViewById(R.id.enter_exit_vr).setContentDescription( + view.getResources().getString(R.string.exit_vr_label)); + + return view; + } + + /** + * Binds the media player in order to update video position if the Activity is showing a video. + * This is also used to clear the bound mediaPlayer when the Activity exits to avoid trying to + * access the mediaPlayer while it is in an invalid state. + * @param mediaPlayer + */ + @MainThread + public void setMediaPlayer(NativeMediaPlayer mediaPlayer) { + this.mediaPlayer = mediaPlayer; + postInvalidate(); + } + + /** Ignores 2D touch events when this View is used in a VR Activity. */ + @Override + public boolean onInterceptTouchEvent(MotionEvent event) { + if (canvasQuad == null) { + // Not in VR mode so use standard behavior. + return super.onInterceptTouchEvent(event); + } + + if (ActivityManager.isRunningInTestHarness()) { + // If your app uses UI Automator tests, it's useful to have this touch system handle touch + // events created during tests. This allows you to create UI tests that work while the app + // is in VR. + return false; + } + + // We are in VR mode. Synthetic events generated by SceneRenderer are marked as SOURCE_GAMEPAD + // events. For this class of events, we will let the Android Touch system handle the event so we + // return false. Other classes of events were generated by the user accidentally touching the + // screen where this hidden view is attached. + if (event.getSource() != InputDevice.SOURCE_GAMEPAD) { + // Intercept and suppress touchscreen events so child buttons aren't clicked. + return true; + } else { + // Don't intercept SOURCE_GAMEPAD events. onTouchEvent will handle these. + return false; + } + } + + /** Handles standard Android touch events or synthetic VR events. */ + @Override + public boolean onTouchEvent(MotionEvent event) { + if (canvasQuad != null) { + // In VR mode so process controller events & ignore touchscreen events. + if (event.getSource() != InputDevice.SOURCE_GAMEPAD) { + // Tell the system that we handled the event. This prevents children from seeing the event. + return true; + } else { + // Have the system send the event to child Views and they will handle clicks. + return super.onTouchEvent(event); + } + } else { + // Not in VR mode so use standard behavior. + return super.onTouchEvent(event); + } + } + + /** Installs the View's event handlers. */ + @Override + public void onFinishInflate() { + super.onFinishInflate(); + + final ImageButton playPauseToggle = (ImageButton) findViewById(R.id.play_pause_toggle); + playPauseToggle.setOnClickListener( + new OnClickListener() { + @Override + public void onClick(View v) { + if (mediaPlayer == null) { + return; + } + + if (mediaPlayer.IsPlaying()) { + mediaPlayer.Pause(); + playPauseToggle.setBackgroundResource(R.drawable.play_button); + playPauseToggle.setContentDescription(getResources().getString(R.string.play_label)); + } else { + mediaPlayer.Start(); + playPauseToggle.setBackgroundResource(R.drawable.pause_button); + playPauseToggle.setContentDescription(getResources().getString(R.string.pause_label)); + } + } + }); + + seekBar = (SeekBar) findViewById(R.id.seek_bar); + seekBar.setOnSeekBarChangeListener(new SeekBarListener()); + + statusText = (TextView) findViewById(R.id.status_text); + } + + /** Sets the OnClickListener used to switch Activities. */ + @MainThread + public void setVrIconClickListener(OnClickListener listener) { + ImageButton vrIcon = (ImageButton) findViewById(R.id.enter_exit_vr); + vrIcon.setOnClickListener(listener); + } + + /** + * Renders this View and its children to either Android View hierarchy's Canvas or to the VR + * scene's CanvasQuad. + * + * @param androidUiCanvas used in 2D mode to render children to the screen + */ + @Override + public void dispatchDraw(Canvas androidUiCanvas) { + if (canvasQuad == null) { + // Handle non-VR rendering. + super.dispatchDraw(androidUiCanvas); + return; + } + + // Handle VR rendering. + Canvas glCanvas = canvasQuad.lockCanvas(); + if (glCanvas == null) { + // This happens if Android tries to draw this View before GL initialization completes. We need + // to retry until the draw call happens after GL invalidation. + postInvalidate(); + return; + } + + // Clear the canvas first. + glCanvas.drawColor(Color.BLACK); + // Have Android render the child views. + super.dispatchDraw(glCanvas); + // Commit the changes. + canvasQuad.unlockCanvasAndPost(glCanvas); + } + + /** + * Gets the listener used to update the seek bar's position on each new video frame. + * + * @return a listener that can be passed to + * {@link SurfaceTexture#setOnFrameAvailableListener(OnFrameAvailableListener)} + */ + public SurfaceTexture.OnFrameAvailableListener getFrameListener() { + return uiUpdater; + } + + /** Updates the seek bar and status text. */ + private final class UiUpdater implements SurfaceTexture.OnFrameAvailableListener { + private int videoDurationMs = 0; + + // onFrameAvailable is called on an arbitrary thread, but we can only access mediaPlayer on the + // main thread. + private Runnable uiThreadUpdater = new Runnable() { + @Override + public void run() { + if (mediaPlayer == null) { + return; + } + int positionMs = 0; + StringBuilder status = new StringBuilder(); + status.append(String.format("%.2f", positionMs / 1000f)); + status.append(" / "); + status.append(videoDurationMs / 1000); + statusText.setText(status.toString()); + + if (canvasQuad != null) { + // When in VR, we will need to manually invalidate this View. + invalidate(); + } + } + }; + + @AnyThread + @Override + public void onFrameAvailable(SurfaceTexture surfaceTexture) { + post(uiThreadUpdater); + } + } + + /** Handles the user seeking to a new position in the video. */ + private final class SeekBarListener implements SeekBar.OnSeekBarChangeListener { + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if (fromUser && mediaPlayer != null) { +// mediaPlayer.seekTo(progress); + } // else this was from the ActivityEventHandler.onNewFrame()'s seekBar.setProgress update. + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) {} + + @Override + public void onStopTrackingTouch(SeekBar seekBar) {} + } +} diff --git a/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VrVideoActivity.java b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VrVideoActivity.java new file mode 100644 index 00000000..1c2abbf6 --- /dev/null +++ b/src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/VrVideoActivity.java @@ -0,0 +1,297 @@ +package com.vcd.immersive.omafplayer; + +import android.Manifest.permission; +import android.app.Activity; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.opengl.Matrix; +import android.os.Bundle; +import android.support.annotation.MainThread; +import android.support.v4.content.ContextCompat; +import android.util.Log; +import android.util.Pair; +import android.view.View; +import android.view.View.OnClickListener; +import android.view.ViewGroup; +import com.google.vr.ndk.base.DaydreamApi; +import com.google.vr.sdk.base.AndroidCompat; +import com.google.vr.sdk.base.Eye; +import com.google.vr.sdk.base.GvrActivity; +import com.google.vr.sdk.base.GvrView; +import com.google.vr.sdk.base.HeadTransform; +import com.google.vr.sdk.base.Viewport; +import com.google.vr.sdk.controller.Controller; +import com.google.vr.sdk.controller.ControllerManager; +import com.vcd.immersive.omafplayer.Rendering.SceneRenderer; +import javax.microedition.khronos.egl.EGLConfig; + +/** + * GVR Activity demonstrating a 360 video player. + * + * The default intent for this Activity will load a 360 placeholder panorama. For more options on + * how to load other media using a custom Intent, see {@link MediaLoader}. + */ +public class VrVideoActivity extends GvrActivity { + private static final String TAG = "VrVideoActivity"; + + private static final int EXIT_FROM_VR_REQUEST_CODE = 42; + + private GvrView gvrView; + private Renderer renderer; + + // Displays the controls for video playback. + private VideoUiView uiView; + + // Given an intent with a media file and format, this will load the file and generate the mesh. + private MediaLoader mediaLoader; + + // Interfaces with the Daydream controller. + private ControllerManager controllerManager; + private Controller controller; + + /** + * Configures the VR system. + * + * @param savedInstanceState unused in this sample but it could be used to track video position + */ + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + mediaLoader = new MediaLoader(this); + + gvrView = new GvrView(this); + // Since the videos have fewer pixels per degree than the phones, reducing the render target + // scaling factor reduces the work required to render the scene. This factor can be adjusted at + // runtime depending on the resolution of the loaded video. + // You can use Eye.getViewport() in the overridden onDrawEye() method to determine the current + // render target size in pixels. + gvrView.setRenderTargetScale(.5f); + + // Standard GvrView configuration + renderer = new Renderer(gvrView); + gvrView.setEGLConfigChooser( + 8, 8, 8, 8, // RGBA bits. + 16, // Depth bits. + 0); // Stencil bits. + gvrView.setRenderer(renderer); + setContentView(gvrView); + + // Most Daydream phones can render a 4k video at 60fps in sustained performance mode. These + // options can be tweaked along with the render target scale. + if (gvrView.setAsyncReprojectionEnabled(true)) { + AndroidCompat.setSustainedPerformanceMode(this, true); + } + + // Handle the user clicking on the 'X' in the top left corner. Since this is done when the user + // has taken the headset out of VR, it should launch the app's exit flow directly rather than + // using the transition flow. + gvrView.setOnCloseButtonListener(new Runnable() { + @Override + public void run() { + launch2dActivity(); + } + }); + + // Configure Controller. + ControllerEventListener listener = new ControllerEventListener(); + controllerManager = new ControllerManager(this, listener); + controller = controllerManager.getController(); + controller.setEventListener(listener); + // controller.start() is called in onResume(). + + checkPermissionAndInitialize(); + } + + /** + * Normal apps don't need this. However, since we use adb to interact with this sample, we + * want any new adb Intents to be routed to the existing Activity rather than launching a new + * Activity. + */ + @Override + protected void onNewIntent(Intent intent) { + // Save the new Intent which may contain a new Uri. Then tear down & recreate this Activity to + // load that Uri. + setIntent(intent); + recreate(); + } + + /** Launches the 2D app with the same extras and data. */ + private void launch2dActivity() { + startActivity(new Intent(getIntent()).setClass(this, VideoActivity.class)); + // When launching the other Activity, it may be necessary to finish() this Activity in order to + // free up the MediaPlayer resources. This sample doesn't call mediaPlayer.release() unless the + // Activities are destroy()ed. This allows the video to be paused and resumed when another app + // is in the foreground. However, most phones have trouble maintaining sufficient resources for + // 2 4k videos in the same process. Large videos may fail to play in the second Activity if the + // first Activity hasn't finish()ed. + // + // Alternatively, a single media player instance can be reused across multiple Activities in + // order to conserve resources. + finish(); + } + + /** + * Tries to exit gracefully from VR using a VR transition dialog. + * + * @return whether the exit request has started or whether the request failed due to the device + * not being Daydream Ready + */ + private boolean exitFromVr() { + // This needs to use GVR's exit transition to avoid disorienting the user. + DaydreamApi api = DaydreamApi.create(this); + if (api != null) { + api.exitFromVr(this, EXIT_FROM_VR_REQUEST_CODE, null); + // Eventually, the Activity's onActivityResult will be called. + api.close(); + return true; + } + return false; + } + + /** Initializes the Activity only if the permission has been granted. */ + private void checkPermissionAndInitialize() { + if (ContextCompat.checkSelfPermission(this, permission.READ_EXTERNAL_STORAGE) + == PackageManager.PERMISSION_GRANTED) { + mediaLoader.handleIntent(getIntent(), uiView); + } else { + exitFromVr(); + // This method will return false on Cardboard devices. This case isn't handled in this sample + // but it should be handled for VR Activities that run on Cardboard devices. + } + } + + /** + * Handles the result from {@link DaydreamApi#exitFromVr(Activity, int, Intent)}. This is called + * via the uiView.setVrIconClickListener listener below. + * + * @param requestCode matches the parameter to exitFromVr() + * @param resultCode whether the user accepted the exit request or canceled + */ + @Override + protected void onActivityResult(int requestCode, int resultCode, Intent unused) { + if (requestCode == EXIT_FROM_VR_REQUEST_CODE && resultCode == RESULT_OK) { + launch2dActivity(); + } else { + // This should contain a VR UI to handle the user declining the exit request. + Log.e(TAG, "Declining the exit request isn't implemented in this sample."); + } + } + + @Override + protected void onResume() { + super.onResume(); + controllerManager.start(); + mediaLoader.resume(); + } + + @Override + protected void onPause() { + mediaLoader.pause(); + controllerManager.stop(); + super.onPause(); + } + + @Override + protected void onDestroy() { + mediaLoader.destroy(); + uiView.setMediaPlayer(null); + super.onDestroy(); + } + + /** + * Standard GVR renderer. Most of the real work is done by {@link SceneRenderer}. + */ + private class Renderer implements GvrView.StereoRenderer { + private static final float Z_NEAR = .1f; + private static final float Z_FAR = 100; + + // Used by ControllerEventListener to manipulate the scene. + public final SceneRenderer scene; + + private final float[] viewProjectionMatrix = new float[16]; + + /** + * Creates the Renderer and configures the VR exit button. + * + * @param parent Any View that is already attached to the Window. The uiView will secretly be + * attached to this View in order to properly handle UI events. + */ + @MainThread + public Renderer(ViewGroup parent) { + Pair pair + = SceneRenderer.createForVR(VrVideoActivity.this, parent); + scene = pair.first; + uiView = pair.second; + uiView.setVrIconClickListener( + new OnClickListener() { + @Override + public void onClick(View v) { + if (!exitFromVr()) { + // Directly exit Cardboard Activities. + onActivityResult(EXIT_FROM_VR_REQUEST_CODE, RESULT_OK, null); + } + } + }); + } + + @Override + public void onNewFrame(HeadTransform headTransform) {} + + @Override + public void onDrawEye(Eye eye) { + Matrix.multiplyMM( + viewProjectionMatrix, 0, eye.getPerspective(Z_NEAR, Z_FAR), 0, eye.getEyeView(), 0); + scene.glDrawFrame(viewProjectionMatrix, eye.getType(), 0, 0); + } + + @Override + public void onFinishFrame(Viewport viewport) {} + + @Override + public void onSurfaceCreated(EGLConfig config) { + scene.glInit(); + mediaLoader.onGlSceneReady(scene); + } + + @Override + public void onSurfaceChanged(int width, int height) { } + + @Override + public void onRendererShutdown() { + scene.glShutdown(); + } + } + + /** Forwards Controller events to SceneRenderer. */ + private class ControllerEventListener extends Controller.EventListener + implements ControllerManager.EventListener { + private boolean touchpadDown = false; + private boolean appButtonDown = false; + + @Override + public void onApiStatusChanged(int status) { + Log.i(TAG, ".onApiStatusChanged " + status); + } + + @Override + public void onRecentered() {} + + @Override + public void onUpdate() { + controller.update(); + + renderer.scene.setControllerOrientation(controller.orientation); + + if (!touchpadDown && controller.clickButtonState) { + renderer.scene.handleClick(); + } + + if (!appButtonDown && controller.appButtonState) { + renderer.scene.toggleUi(); + } + + touchpadDown = controller.clickButtonState; + appButtonDown = controller.appButtonState; + } + } +} diff --git a/src/player/app/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/src/player/app/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml new file mode 100644 index 00000000..1f6bb290 --- /dev/null +++ b/src/player/app/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + diff --git a/src/player/app/android/app/src/main/res/drawable/enter_vr.png b/src/player/app/android/app/src/main/res/drawable/enter_vr.png new file mode 100644 index 00000000..ff73a35b Binary files /dev/null and b/src/player/app/android/app/src/main/res/drawable/enter_vr.png differ diff --git a/src/player/app/android/app/src/main/res/drawable/ic_launcher_background.xml b/src/player/app/android/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 00000000..0d025f9b --- /dev/null +++ b/src/player/app/android/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/player/app/android/app/src/main/res/drawable/icon.png b/src/player/app/android/app/src/main/res/drawable/icon.png new file mode 100644 index 00000000..c85eb1f4 Binary files /dev/null and b/src/player/app/android/app/src/main/res/drawable/icon.png differ diff --git a/src/player/app/android/app/src/main/res/drawable/pause_button.png b/src/player/app/android/app/src/main/res/drawable/pause_button.png new file mode 100644 index 00000000..7192ad48 Binary files /dev/null and b/src/player/app/android/app/src/main/res/drawable/pause_button.png differ diff --git a/src/player/app/android/app/src/main/res/drawable/play_button.png b/src/player/app/android/app/src/main/res/drawable/play_button.png new file mode 100644 index 00000000..72b31398 Binary files /dev/null and b/src/player/app/android/app/src/main/res/drawable/play_button.png differ diff --git a/src/player/app/android/app/src/main/res/drawable/vr_icon_background.png b/src/player/app/android/app/src/main/res/drawable/vr_icon_background.png new file mode 100644 index 00000000..87200845 Binary files /dev/null and b/src/player/app/android/app/src/main/res/drawable/vr_icon_background.png differ diff --git a/src/player/app/android/app/src/main/res/layout/activity_main.xml b/src/player/app/android/app/src/main/res/layout/activity_main.xml new file mode 100644 index 00000000..7d8d24e9 --- /dev/null +++ b/src/player/app/android/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,19 @@ + + + + + + \ No newline at end of file diff --git a/src/player/app/android/app/src/main/res/layout/bind_view.xml b/src/player/app/android/app/src/main/res/layout/bind_view.xml new file mode 100644 index 00000000..27eda1d8 --- /dev/null +++ b/src/player/app/android/app/src/main/res/layout/bind_view.xml @@ -0,0 +1,7 @@ + + + diff --git a/src/player/app/android/app/src/main/res/layout/video_activity.xml b/src/player/app/android/app/src/main/res/layout/video_activity.xml new file mode 100644 index 00000000..d2d3df09 --- /dev/null +++ b/src/player/app/android/app/src/main/res/layout/video_activity.xml @@ -0,0 +1,30 @@ + + + + + +