Introducing Probatron - Analyzing Broadcom Transcoding
https://docs.google.com/a/google.com/document/d/1ocdPvZ5CNdK42WI1I3Nry8OiUq28rCSFSdl1uoKLEA0/
Change-Id: I9e999fbd20bf5aa5a72620d2b08d12cb440aadb0
diff --git a/probatron/README b/probatron/README
new file mode 100644
index 0000000..aa4a10a
--- /dev/null
+++ b/probatron/README
@@ -0,0 +1,9 @@
+Introducing Probatron: Analyzing Broadcom Transcoding
+
+The short of it in 36 words or less:
+ - The probe is meant to be run on a settop to transcode files.
+ - Run ./analyzer for usage and to analyze files you already transcoded.
+ - Run ./probatron.sh for usage and to start automation of the entire process.
+
+The Long of it:
+https://docs.google.com/a/google.com/document/d/1ocdPvZ5CNdK42WI1I3Nry8OiUq28rCSFSdl1uoKLEA0/
diff --git a/probatron/analyzer b/probatron/analyzer
new file mode 100755
index 0000000..c8d02b0
--- /dev/null
+++ b/probatron/analyzer
@@ -0,0 +1,257 @@
+#!/bin/bash
+
+SEG_DURATION_OUTPUT_FILE=seg_duration_output.csv
+TRANS_FRAME_OUTPUT_FILE=trans_frame_output.csv
+SEG_FRAME_OUTPUT_FILE=seg_frame_output.csv
+TRANS_GOP_OUTPUT_FILE=trans_gop_output.csv
+SEG_GOP_OUTPUT_FILE=seg_gop_output.csv
+
+###################################
+
+function printUsage() {
+ echo
+ echo "Usage:"
+ echo " analyze [path/to/transcoded/file]"
+ echo
+ echo "This tool analyzes all *.ts_* HLS segments in the local directory for "
+ echo "issues. If you pass the path to the transcoded original file as a "
+ echo "parameter it will analyze that as well for comparison. Issues that "
+ echo "appear in just the HLS analysis point to a miniclient problem. Issues "
+ echo "that appear in the latter analysis (or both analyses) point to a "
+ echo "BroadCom issue."
+ echo
+ echo "This tool requires dvbsnoop and ffmpeg! It also needs gnuplot to "
+ echo "print pretty graphs, but it's optional."
+ echo
+ echo " 'apt-get install dvbsnoop ffmpeg gnuplot'"
+ echo
+}
+
+
+## Parse args and check dependencies
+FILE=""
+if [ $# -eq 1 ]
+then
+ FILE=$1
+fi
+
+RAN_A_TEST=0
+
+## Return true if depedant app (1) exists
+dependency_exists() {
+ command -v $1 >/dev/null 2>&1
+}
+
+## Check for deps without killing terminal (as exit does, hence the goto)
+dependency_exists ffmpeg || { echo; echo >&2 "This script requires ffmpeg but it's not installed!"; printUsage; exit; }
+dependency_exists dvbsnoop || { echo; echo >&2 "This script requires dvbsnoop but it's not installed!"; printUsage; exit; }
+
+
+## Gnuplot the results given by the parameters
+function plotResult() {
+ if dependency_exists gnuplot
+ then
+ PLOTFILE=$1
+ TITLE=$2
+ XLABEL=$3
+ XRANGE=$4
+ YLABEL=$5
+ YRANGE=$6
+
+ gnuplot -persist <<HERE
+ set title "${TITLE}"
+ set xlabel '${XLABEL}'
+ set xrange[${XRANGE}]
+ set ylabel '${YLABEL}'
+ set yrange[${YRANGE}]
+ unset key
+ plot "${PLOTFILE}" using 1:3 with lines
+HERE
+ else
+ echo " 'gnuplot' is not installed, so no pretty graphs for you today :("
+ fi
+}
+
+
+## Calculate the GOP between each keyframe given an input file (1) to the given output file (2)
+function calulateGOP() {
+ OUTPUT_FILE=$2
+
+ ## GOP - Algorithm stolen Shamelessly From Zeev Leiber
+ PID=$(ffprobe -i $1 2>&1 | grep "Video:" | sed 's/[][]/ /g' | awk '{print "echo $((" $3 "))"}' | bash)
+ echo " Processing $1 (Video PID $PID)..."
+
+ GOPs=$(dvbsnoop -s ts -if $1 $PID | grep random_access | awk '{print $2}')
+ FRAME_COUNT=0
+ for g in $GOPs
+ do
+ if [[ $g -eq 1 ]]
+ then
+ if [[ $FRAME_COUNT -gt 0 ]]
+ then
+ echo $NUM_GOP $1 $FRAME_COUNT >> "$OUTPUT_FILE"
+ FRAME_COUNT=1
+ NUM_GOP=`expr $NUM_GOP + 1`
+ fi
+ else
+ FRAME_COUNT=`expr $FRAME_COUNT + 1`
+ fi
+ done
+}
+
+
+## Calculate the size of each frame given an input file (1) adding results into a single file (2)
+function calculateFrameSize() {
+ OUTPUT_FILE=$2
+ echo " Processing $1..."
+
+ FRAMEs=$(ffprobe -v quiet -show_frames -select_streams v -i $1 | awk '/pkt_size=/ {sub(/pkt_size=/, "", $1); print $1;}')
+ for g in $FRAMEs
+ do
+ echo $NUM_FRAMES $1 $g >> $OUTPUT_FILE
+ NUM_FRAMES=`expr $NUM_FRAMES + 1`
+ done
+}
+
+
+#####
+## Work around fact that you can't pass multi-line input to a function...
+
+## Calculate GOP for each segment adding results into a single file (1)
+function calculateSegmentGOPs() {
+ FILES=$(ls -v *.ts_*)
+
+ echo "# NUM File GOP" > $1
+ NUM_GOP=1
+ for f in $FILES
+ do
+ calulateGOP $f $1
+ done
+
+ echo
+ echo "Result written to $1"
+
+ plotResult $1 "HLS GOP Analysis" "Group" "1:$NUM_GOP-1" "GOP" "0:35"
+}
+
+## Calculate GOP for transcoded file (1) adding results into a single file (2)
+function calculateTranscodedGOP() {
+ echo "# NUM File GOP" > $2
+
+ NUM_GOP=1
+ calulateGOP $1 $2
+
+ echo
+ echo "Result written to $2"
+
+ plotResult $2 "Transcoded GOP Analysis" "Group" "1:$NUM_GOP-1" "GOP" "0:35"
+}
+
+## Calculate the size of each frame in each segment adding results into a single file (1)
+function calculateSegmentFS() {
+ FILES=$(ls -v *.ts_*)
+
+ echo "# NUM File Frame_Size" > $1
+ NUM_FRAMES=1
+ for f in $FILES
+ do
+ calculateFrameSize $f $1
+ done
+
+ echo
+ echo "Result written to $1"
+
+ plotResult $1 "HLS Segment Frame Size Analysis" "Frame" "1:${NUM_FRAMES}-2" "Size (b)" "0:*"
+}
+
+## Calculate the size of each frame given an input file (1) adding results into a single file (1)
+function calculateTranscodedFS() {
+ echo "# NUM File Frame_Size" > $2
+
+ NUM_FRAMES=1
+ calculateFrameSize $1 $2
+
+ echo
+ echo "Result written to $2"
+
+ plotResult $2 "Transcoded Frame Size Analysis" "Frame" "1:${NUM_FRAMES}-2" "Size (b)" "0:*"
+}
+#####
+
+
+## Calculate duration of each HLS segment in the current folder
+function calculateDurations() {
+ echo "# NUM File Duration" > $SEG_DURATION_OUTPUT_FILE
+ FILES=$(ls -v *.ts_*)
+ NUM=1
+ for f in $FILES
+ do
+ echo " Processing $f..."
+ ffprobe -v quiet -show_packets -select_streams v -i $f > ./temp
+
+ duration=$(cat ./temp | awk '/duration_time=0/ {sub(/duration_time=/, "", $1); total = total + $1;}END {print total;}')
+ echo $NUM $f $duration >> $SEG_DURATION_OUTPUT_FILE
+ NUM=`expr $NUM + 1`
+ done
+
+ echo
+ echo "Result written to $SEG_DURATION_OUTPUT_FILE"
+
+ plotResult $SEG_DURATION_OUTPUT_FILE "HLS Segment Duration" "Segment" "1:${NUM}-1" "Duration (s)" "0:*"
+}
+
+
+## Script flow ##
+
+
+## Segments
+echo
+echo "Starting segment analysis (.ts_* files)"
+ls -v *.ts_* 2>./temp 1>/dev/null
+if [[ $? == 0 ]]
+then
+ echo
+ echo "Analyzing Duration"
+ calculateDurations $FILES
+ echo
+ echo "Analyszing GOP"
+ calculateSegmentGOPs $SEG_GOP_OUTPUT_FILE
+ echo
+ echo "Analyzing Frame Size"
+ calculateSegmentFS $SEG_FRAME_OUTPUT_FILE
+
+ RAN_A_TEST=1
+else
+ echo " No segments found, skipping these tests."
+fi
+echo
+
+## Transcoded
+echo "Starting transcoded file analysis"
+if [[ "$FILE" != "" ]]
+then
+ echo
+ echo "Analyzing GOP"
+ calculateTranscodedGOP $FILE $TRANS_GOP_OUTPUT_FILE
+ echo
+ echo "Analyzing Frame Size"
+ calculateTranscodedFS $FILE $TRANS_FRAME_OUTPUT_FILE
+
+ RAN_A_TEST=1
+else
+ echo " No File specified as a parameter, skipping these tests."
+fi
+
+## If we didn't test anything, print usage in case the user is new...
+if [ $RAN_A_TEST == 0 ]
+then
+ printUsage
+fi
+
+if [ -e "./temp" ]
+then
+ rm ./temp
+fi
+
+echo
+
diff --git a/probatron/probatron.sh b/probatron/probatron.sh
new file mode 100755
index 0000000..054c2cc
--- /dev/null
+++ b/probatron/probatron.sh
@@ -0,0 +1,119 @@
+#!/bin/bash
+
+function printUsage() {
+ echo
+ echo
+ echo "Usage:"
+ echo " probatron path/to/raw/file desired/transcoded/filename [settop IP]"
+ echo
+ echo "The probatron automates the process of transcoding a raw input file "
+ echo "as the miniclient would and analyzing it against all of the *.ts_* HLS "
+ echo "segments in the local directory, checking for issues. Issues that "
+ echo "appear in just the HLS analysis point to a miniclient problem. Issues "
+ echo "that appear in the transcoded or both analyses) point to a BroadCom issue."
+ echo
+ echo "The probatron will use \$DVR_BOX as the default settop to use for "
+ echo "transcoding, but you can specify a different IP by passing it as the "
+ echo "third parameter."
+ echo
+ echo "The analysis tools require dvbsnoop and ffmpeg! It also needs gnuplot to "
+ echo "print pretty graphs, but that's optional."
+ echo
+ echo " 'apt-get install dvbsnoop ffmpeg gnuplot'"
+ echo
+
+ exit
+}
+
+###################################
+
+function checkReturnCode() {
+ ## Uncomment next line to debug - force keystroke after most commands
+ #read -n 1
+ if [ $? != 0 ]
+ then
+ exit
+ fi
+}
+
+## Sanitize args
+
+if [ $# -lt 2 ]
+then
+ printUsage
+fi
+
+INPUT_FILE=$1
+TRANSCODED_PATH=$2
+TRANSCODED_FILENAME=$(basename $TRANSCODED_PATH)
+SETTOP_IP=$3
+
+if [[ "$SETTOP_IP" == "" ]]
+then
+ SETTOP_IP=$DVR_BOX
+ if [[ "$SETTOP_IP" == "" ]]
+ then
+ echo
+ echo " Settop IP not provided and \$DVR_BOX is not set!"
+ printUsage
+ fi
+fi
+
+## Let's Get To Work!
+
+if [ -e "$INPUT_FILE" ]
+then
+ CURRENT_DIR=$(pwd)
+ ## Upload the raw file, and the prober
+ echo
+ echo "Uploading $INPUT_FILE to $SETTOP_IP"
+ ssh root@$DVR_BOX "rm -f /var/media/videos/raw.mpg"
+ scp $INPUT_FILE root@$SETTOP_IP:/var/media/videos/raw.mpg
+ checkReturnCode
+
+ echo
+ echo "Building Probe"
+ echo
+ ##### TODO: Fix this when the build is proper-like
+ cd probe/
+ make
+ checkReturnCode
+ echo
+ echo "Uploading Probe"
+ ssh root@$DVR_BOX "rm -f /rw/probe"
+ scp probe root@$DVR_BOX:/rw/
+ checkReturnCode
+ cd $CURRENT_DIR
+ #####
+
+ ## Wait for transcoding to complete
+ echo
+ echo "Starting Transcode..."
+ echo
+ # Stop miniclient as it interferes with the probe, then restore it after...
+ ssh root@$DVR_BOX 'stop miniclient'
+ # Keep app up until we press a key to kill it - without doing that probe never stops! TODO Fix this in the probe
+ ssh root@$DVR_BOX "/rw/probe /var/media/videos/raw.mpg /var/media/videos/$TRANSCODED_FILENAME & read -n 1; kill \$!"
+ ssh root@$DVR_BOX 'start miniclient'
+
+ ## Capture the transcoded file and analyze it
+ echo
+ echo "Downloading Transcoded file..."
+ scp root@$SETTOP_IP:/var/media/videos/$TRANSCODED_FILENAME $TRANSCODED_PATH
+ checkReturnCode
+
+ if [ -e "$TRANSCODED_PATH" ]
+ then
+ echo
+ ./analyzer $TRANSCODED_PATH
+ else
+ echo
+ echo "Failed to retrieve $TRANSCODED_PATH!"
+ fi
+
+else
+ echo
+ echo " The file $INPUT_FILE doesn't exist!"
+ printUsage
+fi
+
diff --git a/probatron/probe/Makefile b/probatron/probe/Makefile
new file mode 100644
index 0000000..696d827
--- /dev/null
+++ b/probatron/probe/Makefile
@@ -0,0 +1,38 @@
+BCM_BSEAV_DIR=../../../../../out/build/BSEAV
+BCM_NEXUS_DIR=../../../../../out/build/bcm_nexus-HEAD/
+B_REFSW_TOOLCHAIN_DIR=../../../../../out/host/usr/bin
+NEXUS_TOP=$(BCM_NEXUS_DIR)
+
+NEXUS_PLATFORM=97425
+BCHP_VER=1
+
+include ../../../../../out/build/nexus/platforms/common/build/nexus_platforms.inc
+
+PULLREADER_PATH := nonexistent
+SWSCALE_PATH := nonexistent
+
+# define settings e.g. debug mode
+BR2_PACKAGE_BRUNO_DEBUG=y
+
+all: probe
+
+#include ../../../../../buildroot/package/bcm_common/bcm_common.mk
+
+# Hack to replace spaces with newlines so we can evaluate
+# BCM_MAKE_ENV as makefile syntax. Do not remove empty lines.
+# -- BEGIN HACK --
+null :=
+space := ${null} ${null}
+${space} := ${space}# ${ } is a space. Neat huh?
+
+define \n
+
+
+endef
+# -- END HACK --
+
+$(eval $(subst ${ },${\n},${BCM_MAKE_ENV}))
+
+Q_=@
+
+include Makefile.7425
diff --git a/probatron/probe/Makefile.7425 b/probatron/probe/Makefile.7425
new file mode 100644
index 0000000..33ff1af
--- /dev/null
+++ b/probatron/probe/Makefile.7425
@@ -0,0 +1,132 @@
+BINDIR=$(DESTDIR)/usr/bin
+APPDIR=$(DESTDIR)/app/client
+LIBDIR=$(DESTDIR)/usr/lib
+
+ifndef NEXUS_TOP
+$(error NEXUS_TOP is not defined)
+endif
+
+BCHP_VER=1
+PLATFORM=97425
+
+# nexus uses C89 by default for higher compatibility
+NEXUS_C_STD=c99
+
+# include cross-compiler definitions
+include $(NEXUS_TOP)/platforms/$(PLATFORM)/build/platform_app.inc
+
+LIBFLAGS += -lpthread -ldl -lm -lrt
+
+LIBFLAGS += -L./
+
+INCFLAGS += -I$(MAGNUM)/../rockford/middleware/v3d/interface/khronos/include/ \
+ -I$(MAGNUM)/../rockford/middleware/platform/nexus \
+ -I$(BSEAV)/lib/playbackdevice/nexusMgr
+
+.PHONY: clean
+
+all: probe
+ @echo $(NEXUS_TOP)
+
+ifdef DEBUG
+B_REFSW_DEBUG ?= $(DEBUG)
+endif
+
+ifeq ($(B_REFSW_DEBUG),)
+B_REFSW_DEBUG = y
+endif
+
+ifeq ($(B_REFSW_DEBUG),y)
+V3D_LIB := lib_$(PLATFORM)_debug
+else
+V3D_LIB := lib_$(PLATFORM)_release
+endif
+
+ifeq ($(NEXUS_MODE),)
+CFLAGS += -DSINGLE_PROCESS
+endif
+
+ifeq ($(NEXUS_MODE),client)
+CFLAGS += -DUSER_MODE_MULTIPROC_CLIENT
+endif
+
+ifeq ($(PLATFORM),97425)
+CFLAGS += -DENCODER_ENABLED
+endif
+
+# "unused parameter" is a pretty useless warning
+CFLAGS += -Wno-unused-parameter
+CXXFLAGS += -Wno-unused-parameter
+
+# "missing initializer" is sometimes useful, but seemingly not in OpenGL where
+# there are often uninitialized fields.
+CFLAGS += -Wno-missing-field-initializers
+CXXFLAGS += -Wno-missing-field-initializers
+
+# All warnings are errors, so we aren't tempted to let them sit unfixed.
+CFLAGS += -Werror
+CXXFLAGS += -Werror
+
+# C++ standard
+CXXFLAGS += -std=c++0x
+
+# For linux builds, link to the correct libraries
+ifneq ($(findstring linux,$(B_REFSW_OS)),)
+LDFLAGS := -lnexus$(NEXUS_LIB_SUFFIX) -L${NEXUS_BIN_DIR} -lpthread \
+ -L$(NEXUS_TOP)/../rockford/middleware/v3d/$(V3D_LIB) -lv3ddriver \
+ -L$(NEXUS_TOP)/../rockford/middleware/platform/nexus/$(V3D_LIB) -lnxpl -lprojectM
+endif
+
+clean:
+ rm -f *.o probe
+ rm -f *.c~ *.h~
+
+# This is the minimum needed to compile and link with Nexus
+CFLAGS += -DGLES2 -DGLNEXUS $(INCFLAGS) $(NEXUS_CFLAGS) \
+ $(addprefix -I,$(NEXUS_APP_INCLUDE_PATHS)) \
+ $(addprefix -D,$(NEXUS_APP_DEFINES))
+CXXFLAGS += -DGLES2 -DGLNEXUS $(INCFLAGS) $(NEXUS_CFLAGS) \
+ $(addprefix -I,$(NEXUS_APP_INCLUDE_PATHS)) \
+ $(addprefix -D,$(NEXUS_APP_DEFINES))
+
+# Remove flags that don't apply to C++
+CXXFLAGS := $(filter-out -Wstrict-prototypes -std=c99, $(CXXFLAGS) )
+
+# Until we replace code in wvhelper
+CXXFLAGS += -Wno-deprecated
+
+LDFLAGS += $(LIBFLAGS) -ljpeg -lpng14 -lz -ltiff -lungif -lstacktrace
+
+# Always build with debug
+CFLAGS += -g -Os
+CXXFLAGS += -g -Os
+LDFLAGS += -rdynamic
+
+# lame autodepends: all .o files depend on all .h files
+$(patsubst %.c,%.o,$(wildcard *.c) $(wildcard */*.c)): \
+ $(wildcard *.h) $(wildcard */*.h)
+
+OBJS := probe.o
+
+%.o: %.c
+ @echo "[$< -> $@]"
+ $(Q_)$(CC) -fPIC $(CPPFLAGS) $(CFLAGS) -o $@ -c $<
+
+%.o: %.cpp
+ @echo "[$< -> $@]"
+ $(Q_)$(CXX) -fPIC $(CPPFLAGS) $(CXXFLAGS) -o $@ -c $<
+
+probe: $(OBJS) $(VXOBJS)
+ @echo "[$^ -> $@]"
+ @echo NEXUS_APP_INCLUDE_PATHS are $(NEXUS_APP_INCLUDE_PATHS)
+ $(Q_)$(CXX) -o $@ $(OBJS) $(CFLAGS) $(LDFLAGS)
+
+# implicit rule for building typical binaries
+#%: BRCM/%.o
+# @echo "[$^ -> $@]"
+# @echo NEXUS_APP_INCLUDE_PATHS are $(NEXUS_APP_INCLUDE_PATHS)
+# $(Q_)$(CXX) -o $@ $^ $(CFLAGS) $(LDFLAGS)
+
+install: all
+ mkdir -p $(BINDIR) $(APPDIR)
+ cp probe $(APPDIR)
diff --git a/probatron/probe/probe.c b/probatron/probe/probe.c
new file mode 100644
index 0000000..3d91459
--- /dev/null
+++ b/probatron/probe/probe.c
@@ -0,0 +1,928 @@
+#include "nexus_platform.h"
+#include "nexus_video_decoder.h"
+#include "nexus_stc_channel.h"
+#include "nexus_display.h"
+#include "nexus_video_window.h"
+#include "nexus_video_input.h"
+#include "nexus_video_adj.h"
+#include "nexus_spdif_output.h"
+#include "nexus_component_output.h"
+#if NEXUS_HAS_HDMI_OUTPUT
+#include "nexus_hdmi_output.h"
+#endif
+#if NEXUS_HAS_PLAYBACK && NEXUS_HAS_STREAM_MUX
+#include "nexus_playback.h"
+#include "nexus_record.h"
+#include "nexus_file.h"
+#include "nexus_video_encoder.h"
+#include "nexus_audio_encoder.h"
+#include "nexus_audio_mixer.h"
+#include "nexus_stream_mux.h"
+#include "nexus_recpump.h"
+#include "nexus_record.h"
+#endif
+#if NEXUS_HAS_SYNC_CHANNEL
+#include "nexus_sync_channel.h"
+#endif
+
+#include <stdio.h>
+#include <assert.h>
+#include "bstd.h"
+#include "bkni.h"
+#include "bdbg.h"
+
+#include <pthread.h>
+#include <malloc.h>
+#include <sys/time.h>
+#include <signal.h>
+#include <errno.h>
+#include <time.h>
+
+////////////////////////////////////////////////////////////////////////////////
+/* Hard-Coded For Now */
+#define TRANSPORT_TYPE NEXUS_TransportType_eTs
+#define VIDEO_PID 0x1e1
+#define VIDEO_CODEC NEXUS_VideoCodec_eH264
+#define AUDIO_PID 0x1e2
+#define AUDIO_CODEC NEXUS_AudioCodec_eAc3
+
+#define FORMAT NEXUS_VideoFormat_e720p
+#define FRAME_RATE NEXUS_VideoFrameRate_e30
+#define VIDEO_CODEC_PROFILE NEXUS_VideoCodecProfile_eBaseline
+#define VIDEO_CODEC_LEVEL NEXUS_VideoCodecLevel_e31
+
+////////////////////////////////////////////////////////////////////////////////
+
+#define MAX_DSP_ENCODER_WIDTH 416
+#define MAX_DSP_ENCODER_HEIGHT 224
+
+BDBG_MODULE(BVP);
+
+/* ============= utilities ==============*/
+
+void ACL_Delay(unsigned int delay)
+{
+ struct timeval tv;
+ int rv = 1;
+ tv.tv_sec = delay/1000000;
+ tv.tv_usec = (delay%1000000);
+ errno = EINTR;
+ while(rv!=0 && (errno == EINTR))
+ {
+ errno = 0;
+ rv = select(0, NULL, NULL, NULL, &tv);
+ }
+ //usleep(delay*1000);
+}
+
+/* Generate a CRC for the specified data/length */
+/* Initialize crc to 0 for new calculation. Use an old result for subsequent calls. */
+static uint32_t CRC32_mpeg(uint32_t crc, uint8_t *data, int length)
+{
+ int j;
+ crc = ~crc;
+ while (length--)
+ {
+ for (j=0; j<8; j++)
+ crc = (crc<<1) ^ ((((*data >> (7-j)) ^ (crc >> 31)) & 1) ? 0x04c11db7 : 0);
+ data++;
+ }
+ return crc;
+}
+
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU
+static void message_callback(void *context, int param)
+{
+ BSTD_UNUSED(context);
+ BDBG_ERR(("message buffer %d overflows!", param));;
+}
+#endif
+
+static void transcoderFinishCallback(void *context, int param)
+{
+ BKNI_EventHandle finishEvent = (BKNI_EventHandle)context;
+
+ BSTD_UNUSED(param);
+ BDBG_WRN(("Transcoding Complete!"));
+ BKNI_SetEvent(finishEvent);
+}
+
+struct cmdSettings {
+ NEXUS_VideoFormat displayFormat;
+ NEXUS_VideoFrameRate encoderFrameRate;
+ unsigned encoderBitrate;
+ unsigned encoderGopStructureFramesP;
+ unsigned encoderGopStructureFramesB;
+ unsigned encoderGopStructureDuration;
+ NEXUS_VideoCodec encoderVideoCodec;
+ NEXUS_VideoCodecProfile encoderProfile;
+ NEXUS_VideoCodecLevel encoderLevel;
+} cmdSettings[] = {
+ {FORMAT, FRAME_RATE, 2*1000*1000, 29, 0, 1000, VIDEO_CODEC, VIDEO_CODEC_PROFILE, VIDEO_CODEC_LEVEL}
+};
+
+struct streamSettings {
+ const char *fname;
+ uint16_t videoPid;
+ NEXUS_VideoCodec videoCodec;
+ uint16_t audioPid;
+ NEXUS_AudioCodec audioCodec;
+} streamSettings = { "raw.ts", VIDEO_PID, VIDEO_CODEC, AUDIO_PID, AUDIO_CODEC };
+
+
+int main(int argc, char *argv[])
+{
+ char* inputFilename;
+ char* outputFilename;
+
+ if (argc == 3)
+ {
+ inputFilename = argv[1];
+ outputFilename = argv[2];
+ } else {
+ fprintf(stderr, "Please specify both an input and output file!\n");
+ return 1;
+ }
+
+ BDBG_WRN(("\n\n *** All your transcoders are belong to us. *** \n"));
+ BDBG_WRN(("Reading from file %s and writing to file %s", inputFilename, outputFilename));
+
+ NEXUS_PlatformSettings platformSettings;
+ NEXUS_PlatformConfiguration platformConfig;
+ NEXUS_StcChannelHandle stcChannel;
+ NEXUS_StcChannelSettings stcSettings;
+ NEXUS_PidChannelHandle videoPidChannel;
+ NEXUS_DisplayHandle display;
+ NEXUS_DisplaySettings displaySettings;
+ NEXUS_VideoWindowHandle window;
+ NEXUS_VideoWindowMadSettings windowMadSettings;
+#ifdef NEXUS_NUM_DSP_VIDEO_ENCODERS
+ NEXUS_VideoWindowScalerSettings sclSettings;
+ NEXUS_VideoWindowSettings windowSettings;
+#endif
+ NEXUS_VideoDecoderHandle videoDecoder;
+ NEXUS_VideoDecoderStartSettings videoProgram;
+#if NEXUS_NUM_HDMI_OUTPUTS
+ NEXUS_HdmiOutputStatus hdmiStatus;
+ NEXUS_Error rc;
+#endif
+ NEXUS_AudioMixerSettings audioMixerSettings;
+ NEXUS_AudioMixerHandle audioMixer;
+ NEXUS_AudioDecoderHandle audioDecoder;
+ NEXUS_AudioDecoderStartSettings audioProgram;
+ NEXUS_PidChannelHandle audioPidChannel;
+ NEXUS_AudioMuxOutputHandle audioMuxOutput;
+ NEXUS_AudioMuxOutputDelayStatus audioDelayStatus;
+ NEXUS_AudioMuxOutputStartSettings audioMuxStartSettings;
+ NEXUS_PlaypumpHandle playpumpTranscodeAudio;
+ NEXUS_PidChannelHandle pidChannelTranscodeAudio;
+ NEXUS_AudioEncoderSettings encoderSettings;
+ NEXUS_AudioEncoderHandle audioEncoder;
+ NEXUS_AudioCodec audioCodec;
+ NEXUS_FilePlayHandle file;
+ NEXUS_PlaypumpHandle playpump;
+ NEXUS_PlaybackHandle playback;
+ NEXUS_PlaybackSettings playbackSettings;
+ NEXUS_PlaybackPidChannelSettings playbackPidSettings;
+ NEXUS_DisplayHandle displayTranscode;
+ NEXUS_VideoWindowHandle windowTranscode;
+ NEXUS_VideoEncoderHandle videoEncoder;
+ NEXUS_VideoEncoderSettings videoEncoderConfig;
+ NEXUS_VideoEncoderStartSettings videoEncoderStartConfig;
+ NEXUS_VideoEncoderDelayRange videoDelay;
+ NEXUS_PlaypumpHandle playpumpTranscodeVideo;
+ NEXUS_PidChannelHandle pidChannelTranscodeVideo;
+ NEXUS_RecordPidChannelSettings recordPidSettings;
+ NEXUS_StreamMuxHandle streamMux;
+ NEXUS_StreamMuxCreateSettings muxCreateSettings;
+ BKNI_EventHandle finishEvent;
+ NEXUS_StreamMuxStartSettings muxConfig;
+ NEXUS_StreamMuxOutput muxOutput;
+ NEXUS_PlaypumpOpenSettings playpumpConfig;
+ NEXUS_PlaypumpHandle playpumpTranscodePcr;
+ NEXUS_FileRecordHandle fileTranscode;
+ NEXUS_StcChannelHandle stcChannelTranscode;
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU
+ NEXUS_MessageSettings messageSettings;
+ NEXUS_MessageStartSettings messageStartSettings;
+ NEXUS_PidChannelHandle pidChannelUserData[2], pidChannelTranscodeUserData[2];
+#endif
+
+#if NEXUS_HAS_SYNC_CHANNEL
+ NEXUS_SyncChannelSettings syncChannelSettings;
+ NEXUS_SyncChannelHandle syncChannel;
+#endif
+ NEXUS_RecpumpHandle recpump;
+ NEXUS_RecordHandle record;
+ NEXUS_RecordSettings recordSettings;
+ NEXUS_PidChannelHandle pidChannelTranscodePcr;
+ NEXUS_PidChannelHandle pidChannelTranscodePat;
+ NEXUS_PidChannelHandle pidChannelTranscodePmt;
+ void *pat;
+ void *pmt;
+
+ int i = 0;
+ //int iteration = 1;
+ //char key;
+
+ NEXUS_Platform_GetDefaultSettings(&platformSettings);
+ platformSettings.openFrontend = false;
+ NEXUS_Platform_Init(&platformSettings);
+ NEXUS_Platform_GetConfiguration(&platformConfig);
+
+ BDBG_WRN(("Setting up transcode pipeline: format %d, fr %d, bitrate %u, duration %d, codec %d, NEXUS_HAS_SYNC_CHANNEL %x",
+ cmdSettings[i].displayFormat,
+ cmdSettings[i].encoderFrameRate,
+ cmdSettings[i].encoderBitrate,
+ cmdSettings[i].encoderGopStructureDuration,
+ cmdSettings[i].encoderVideoCodec,
+ NEXUS_HAS_SYNC_CHANNEL));
+ playpump = NEXUS_Playpump_Open(0, NULL);
+ assert(playpump);
+ playback = NEXUS_Playback_Create();
+ assert(playback);
+
+ file = NEXUS_FilePlay_OpenPosix(inputFilename, NULL);
+ if (!file) {
+ fprintf(stderr, "can't open file: %s\n", inputFilename);
+ return -1;
+ }
+
+#if NEXUS_HAS_SYNC_CHANNEL
+ /* create a sync channel */
+ NEXUS_SyncChannel_GetDefaultSettings(&syncChannelSettings);
+ syncChannel = NEXUS_SyncChannel_Create(&syncChannelSettings);
+#endif
+
+
+ NEXUS_StcChannel_GetDefaultSettings(0, &stcSettings);
+ stcSettings.timebase = NEXUS_Timebase_e0;
+ stcSettings.mode = NEXUS_StcChannelMode_eAuto;
+ stcChannel = NEXUS_StcChannel_Open(0, &stcSettings);
+
+ /* encoders/mux require different STC broadcast mode from decoder */
+ NEXUS_StcChannel_GetDefaultSettings(1, &stcSettings);
+ stcSettings.timebase = NEXUS_Timebase_e0;/* should be the same timebase for end-to-end locking */
+ stcSettings.mode = NEXUS_StcChannelMode_eAuto;
+ stcSettings.pcrBits = NEXUS_StcChannel_PcrBits_eFull42;/* ViCE2 requires 42-bit STC broadcast */
+ stcChannelTranscode = NEXUS_StcChannel_Open(1, &stcSettings);
+
+
+ NEXUS_Playback_GetSettings(playback, &playbackSettings);
+ playbackSettings.playpump = playpump;
+ /* set a stream format, it could be any audio video transport type or file format, i.e NEXUS_TransportType_eMp4, NEXUS_TransportType_eAvi ... */
+ playbackSettings.playpumpSettings.transportType = TRANSPORT_TYPE;
+ playbackSettings.stcChannel = stcChannel;
+ NEXUS_Playback_SetSettings(playback, &playbackSettings);
+
+
+ /* Bring up video display and outputs */
+ NEXUS_Display_GetDefaultSettings(&displaySettings);
+ displaySettings.format = NEXUS_VideoFormat_e480p;
+ display = NEXUS_Display_Open(0, &displaySettings);
+ window = NEXUS_VideoWindow_Open(display, 0);
+
+#ifdef NEXUS_NUM_DSP_VIDEO_ENCODERS
+ NEXUS_VideoWindow_GetMadSettings(window, &windowMadSettings);
+ windowMadSettings.deinterlace = false;
+ NEXUS_VideoWindow_SetMadSettings(window, &windowMadSettings);
+#endif
+
+#if NEXUS_NUM_COMPONENT_OUTPUTS
+ NEXUS_Display_AddOutput(display, NEXUS_ComponentOutput_GetConnector(platformConfig.outputs.component[0]));
+#endif
+#if NEXUS_NUM_HDMI_OUTPUTS
+ NEXUS_Display_AddOutput(display, NEXUS_HdmiOutput_GetVideoConnector(platformConfig.outputs.hdmi[0]));
+ rc = NEXUS_HdmiOutput_GetStatus(platformConfig.outputs.hdmi[0], &hdmiStatus);
+ if ( !rc && hdmiStatus.connected )
+ {
+ /* If current display format is not supported by monitor, switch to monitor's preferred format.
+ If other connected outputs do not support the preferred format, a harmless error will occur. */
+ NEXUS_Display_GetSettings(display, &displaySettings);
+ if ( !hdmiStatus.videoFormatSupported[displaySettings.format] ) {
+ displaySettings.format = hdmiStatus.preferredVideoFormat;
+ NEXUS_Display_SetSettings(display, &displaySettings);
+ }
+ }
+#endif
+
+ /* app assumes that deinterlacer for main window is enabled by default */
+
+ /* Open the audio decoder */
+ audioDecoder = NEXUS_AudioDecoder_Open(0, NULL);
+
+ /* Open the audio and pcr pid channel */
+ NEXUS_Playback_GetDefaultPidChannelSettings(&playbackPidSettings);
+ playbackPidSettings.pidSettings.pidType = NEXUS_PidType_eAudio;
+ playbackPidSettings.pidTypeSettings.audio.primary = audioDecoder; /* must be told codec for correct handling */
+ audioPidChannel = NEXUS_Playback_OpenPidChannel(playback, streamSettings.audioPid, &playbackPidSettings);
+
+ /* Set up decoder Start structures now. We need to know the audio codec to properly set up
+ the audio outputs. */
+ NEXUS_AudioDecoder_GetDefaultStartSettings(&audioProgram);
+ audioProgram.codec = streamSettings.audioCodec;
+ audioProgram.pidChannel = audioPidChannel;
+ audioProgram.stcChannel = stcChannel;
+
+ /* Connect audio decoders to outputs */
+ NEXUS_AudioOutput_AddInput(
+ NEXUS_AudioDac_GetConnector(platformConfig.outputs.audioDacs[0]),
+ NEXUS_AudioDecoder_GetConnector(audioDecoder, NEXUS_AudioDecoderConnectorType_eStereo));
+#if NEXUS_NUM_HDMI_OUTPUTS
+ NEXUS_AudioOutput_AddInput(
+ NEXUS_HdmiOutput_GetAudioConnector(platformConfig.outputs.hdmi[0]),
+ NEXUS_AudioDecoder_GetConnector(audioDecoder, NEXUS_AudioDecoderConnectorType_eStereo));
+#endif
+
+ /* Open audio mixer. The mixer can be left running at all times to provide continuous audio output despite input discontinuities. */
+ NEXUS_AudioMixer_GetDefaultSettings(&audioMixerSettings);
+ audioMixerSettings.mixUsingDsp = true;
+ audioMixer = NEXUS_AudioMixer_Open(&audioMixerSettings);
+ assert(audioMixer);
+
+ /* Open audio mux output */
+ audioMuxOutput = NEXUS_AudioMuxOutput_Create(NULL);
+ assert(audioMuxOutput);
+ /* Open audio encoder */
+ NEXUS_AudioEncoder_GetDefaultSettings(&encoderSettings);
+ encoderSettings.codec = NEXUS_AudioCodec_eAac;
+ audioCodec = encoderSettings.codec;
+ audioEncoder = NEXUS_AudioEncoder_Open(&encoderSettings);
+ assert(audioEncoder);
+
+ /* Connect decoder to mixer and set as master */
+ NEXUS_AudioMixer_AddInput(audioMixer,
+ NEXUS_AudioDecoder_GetConnector(audioDecoder, NEXUS_AudioDecoderConnectorType_eStereo));
+ audioMixerSettings.master = NEXUS_AudioDecoder_GetConnector(audioDecoder, NEXUS_AudioDecoderConnectorType_eStereo);
+ NEXUS_AudioMixer_SetSettings(audioMixer, &audioMixerSettings);
+ /* Connect mixer to encoder */
+ NEXUS_AudioEncoder_AddInput(audioEncoder, NEXUS_AudioMixer_GetConnector(audioMixer));
+ /* Connect mux to encoder */
+ NEXUS_AudioOutput_AddInput(
+ NEXUS_AudioMuxOutput_GetConnector(audioMuxOutput), NEXUS_AudioEncoder_GetConnector(audioEncoder));
+ NEXUS_AudioOutput_AddInput(
+ NEXUS_AudioDummyOutput_GetConnector(platformConfig.outputs.audioDummy[0]),
+ NEXUS_AudioMixer_GetConnector(audioMixer));
+
+ /* bring up decoder and connect to local display */
+ videoDecoder = NEXUS_VideoDecoder_Open(0, NULL); /* take default capabilities */
+
+ /* NOTE: must open video encoder before display; otherwise open will init ViCE2 core
+ * which might cause encoder display GISB error since encoder display would
+ * trigger RDC to program mailbox registers in ViCE2;
+ */
+ videoEncoder = NEXUS_VideoEncoder_Open(0, NULL);
+ assert(videoEncoder);
+
+ /* Bring up video encoder display */
+ NEXUS_Display_GetDefaultSettings(&displaySettings);
+ displaySettings.displayType = NEXUS_DisplayType_eAuto;
+ displaySettings.timingGenerator = NEXUS_DisplayTimingGenerator_eEncoder;
+ displaySettings.format = cmdSettings[i].displayFormat;/* source is 60hz */
+ displaySettings.frameRateMaster = NULL;/* disable frame rate tracking for now */
+ displaySettings.format = cmdSettings[i].displayFormat;
+ displayTranscode = NEXUS_Display_Open(NEXUS_ENCODER_DISPLAY_IDX, &displaySettings);/* cmp3 for transcoder */
+ assert(displayTranscode);
+
+ windowTranscode = NEXUS_VideoWindow_Open(displayTranscode, 0);
+ assert(windowTranscode);
+
+#ifdef NEXUS_NUM_DSP_VIDEO_ENCODERS
+ NEXUS_VideoWindow_GetSettings(windowTranscode, &windowSettings);
+ windowSettings.position.width = MAX_DSP_ENCODER_WIDTH;
+ windowSettings.position.height = MAX_DSP_ENCODER_HEIGHT;
+ windowSettings.pixelFormat = NEXUS_PixelFormat_eCr8_Y18_Cb8_Y08;
+ windowSettings.visible = false;
+ NEXUS_VideoWindow_SetSettings(windowTranscode, &windowSettings);
+
+ NEXUS_VideoWindow_GetScalerSettings(windowTranscode, &sclSettings);
+ sclSettings.bandwidthEquationParams.bias = NEXUS_ScalerCaptureBias_eScalerBeforeCapture;
+ sclSettings.bandwidthEquationParams.delta = 1000000;
+ NEXUS_VideoWindow_SetScalerSettings(windowTranscode, &sclSettings);
+#endif
+
+ /* enable deinterlacer to improve quality */
+ NEXUS_VideoWindow_GetMadSettings(windowTranscode, &windowMadSettings);
+ windowMadSettings.deinterlace = true;
+ windowMadSettings.enable22Pulldown = true; /* actually reverse 22 pulldown */
+ windowMadSettings.enable32Pulldown = true; /* actually reverse 32 pulldown */
+ NEXUS_VideoWindow_SetMadSettings(windowTranscode, &windowMadSettings);
+
+ /* connect same decoder to the encoder display;
+ * NOTE: simul display + transcode mode might have limitation in audio pathre;
+ * here is for video transcode bringup purpose;
+ */
+ NEXUS_VideoWindow_AddInput(windowTranscode, NEXUS_VideoDecoder_GetConnector(videoDecoder));
+ NEXUS_VideoWindow_AddInput(window, NEXUS_VideoDecoder_GetConnector(videoDecoder));
+
+ /* Open the video pid channel */
+ NEXUS_Playback_GetDefaultPidChannelSettings(&playbackPidSettings);
+ playbackPidSettings.pidSettings.pidType = NEXUS_PidType_eVideo;
+ playbackPidSettings.pidTypeSettings.video.codec = streamSettings.videoCodec; /* must be told codec for correct handling */
+ playbackPidSettings.pidTypeSettings.video.index = true;
+ playbackPidSettings.pidTypeSettings.video.decoder = videoDecoder;
+ videoPidChannel = NEXUS_Playback_OpenPidChannel(playback, streamSettings.videoPid, &playbackPidSettings);
+
+ /* Set up decoder Start structures now. We need to know the audio codec to properly set up
+ the audio outputs. */
+ NEXUS_VideoDecoder_GetDefaultStartSettings(&videoProgram);
+ videoProgram.codec = streamSettings.videoCodec;
+ videoProgram.pidChannel = videoPidChannel;
+ videoProgram.stcChannel = stcChannel;
+
+ NEXUS_VideoEncoder_GetSettings(videoEncoder, &videoEncoderConfig);
+ videoEncoderConfig.variableFrameRate = true; /* encoder can detect film content and follow CET */
+ videoEncoderConfig.frameRate = NEXUS_VideoFrameRate_e30;
+ videoEncoderConfig.frameRate = cmdSettings[i].encoderFrameRate;
+ videoEncoderConfig.bitrateMax = cmdSettings[i].encoderBitrate;
+ videoEncoderConfig.streamStructure.framesP = cmdSettings[i].encoderGopStructureFramesP;
+ videoEncoderConfig.streamStructure.framesB = cmdSettings[i].encoderGopStructureFramesB;
+ videoEncoderConfig.streamStructure.duration = cmdSettings[i].encoderGopStructureDuration;
+
+ NEXUS_VideoEncoder_GetDefaultStartSettings(&videoEncoderStartConfig);
+ videoEncoderStartConfig.codec = cmdSettings[i].encoderVideoCodec;
+ videoEncoderStartConfig.profile = cmdSettings[i].encoderProfile;
+ videoEncoderStartConfig.level = cmdSettings[i].encoderLevel;
+ videoEncoderStartConfig.input = displayTranscode;
+ videoEncoderStartConfig.stcChannel = stcChannelTranscode;
+
+#ifdef NEXUS_NUM_DSP_VIDEO_ENCODERS
+ videoEncoderStartConfig.bounds.inputDimension.max.width = windowSettings.position.width;
+ videoEncoderStartConfig.bounds.inputDimension.max.height = windowSettings.position.height;
+#else
+
+{ /* set proper encoder interlaced/progressive format. NOTE: fw currently doesn't support dynamic switch between interlaced and progressive. */
+ #include "nexus_core_utils.h"
+ NEXUS_VideoFormatInfo fmtInfo;
+ NEXUS_VideoFormat_GetInfo(cmdSettings[i].displayFormat, &fmtInfo);
+ videoEncoderStartConfig.interlaced = fmtInfo.interlaced;
+}
+ videoEncoderStartConfig.encodeUserData = true;
+
+ /******************************************
+ * add configurable delay to video path
+ */
+ /* NOTE: ITFP is encoder feature to detect and lock on 3:2/2:2 cadence in the video content to help
+ * efficient coding for interlaced formats; disabling ITFP will impact the bit efficiency but reduce the encode delay. */
+ videoEncoderConfig.enableFieldPairing = true;
+
+ /* 0 to use default 750ms rate buffer delay; TODO: allow user to adjust it to lower encode delay at cost of quality reduction! */
+ videoEncoderStartConfig.rateBufferDelay = 0;
+
+ /* to allow 23.976p passthru; TODO: allow user to configure minimum framerate to achieve lower delay!
+ * Note: lower minimum framerate means longer encode delay */
+ videoEncoderStartConfig.bounds.inputFrameRate.min = NEXUS_VideoFrameRate_e23_976;
+
+ /* to allow 24 ~ 60p dynamic frame rate coding TODO: allow user to config higher minimum frame rate for lower delay! */
+ videoEncoderStartConfig.bounds.outputFrameRate.min = NEXUS_VideoFrameRate_e23_976;
+ videoEncoderStartConfig.bounds.outputFrameRate.max = NEXUS_VideoFrameRate_e60;
+
+ /* max encode size allows 1080p encode; TODO: allow user to choose lower max resolution for lower encode delay */
+ videoEncoderStartConfig.bounds.inputDimension.max.width = 1920;
+ videoEncoderStartConfig.bounds.inputDimension.max.height = 1088;
+
+ /* encode setting and startSetting to be set after end-to-end delay is determined */
+
+ /* get end-to-end delay (Dee) for audio and video encoders;
+ * TODO: match AV delay! In other words,
+ * if (aDee > vDee) {
+ * vDee' = aDee' = aDee;
+ * }
+ * else {
+ * vDee' = aDee' = vDee;
+ * }
+ */
+ {
+ unsigned Dee;
+
+ /* NOTE: video encoder delay is in 27MHz ticks */
+ NEXUS_VideoEncoder_GetDelayRange(videoEncoder, &videoEncoderConfig, &videoEncoderStartConfig, &videoDelay);
+ printf("\n\tVideo encoder end-to-end delay = %u ms; maximum allowed: %u ms\n", videoDelay.min/27000, videoDelay.max/27000);
+
+ NEXUS_AudioMuxOutput_GetDelayStatus(audioMuxOutput, audioCodec, &audioDelayStatus);
+ printf("\tAudio codec %d end-to-end delay = %u ms\n", audioCodec, audioDelayStatus.endToEndDelay);
+
+ Dee = audioDelayStatus.endToEndDelay * 27000; /* in 27MHz ticks */
+ if(Dee > videoDelay.min)
+ {
+ if(Dee > videoDelay.max)
+ {
+ BDBG_ERR(("\tAudio Dee is way too big! Use video Dee max!"));
+ Dee = videoDelay.max;
+ }
+ else
+ {
+ printf("\tUse audio Dee %u ms %u ticks@27Mhz!\n", Dee/27000, Dee);
+ }
+ }
+ else
+ {
+ Dee = videoDelay.min;
+ printf("\tUse video Dee %u ms or %u ticks@27Mhz!\n\n", Dee/27000, Dee);
+ }
+ videoEncoderConfig.encoderDelay = Dee;
+
+ /* Start audio mux output */
+ NEXUS_AudioMuxOutput_GetDefaultStartSettings(&audioMuxStartSettings);
+ audioMuxStartSettings.stcChannel = stcChannelTranscode;
+ audioMuxStartSettings.presentationDelay = Dee/27000;/* in ms */
+ NEXUS_AudioMuxOutput_Start(audioMuxOutput, &audioMuxStartSettings);
+ }
+ /* Note: video encoder SetSettings needs to be called after the encoder delay is determined; */
+ NEXUS_VideoEncoder_SetSettings(videoEncoder, &videoEncoderConfig);
+
+ NEXUS_Playpump_GetDefaultOpenSettings(&playpumpConfig);
+ playpumpConfig.fifoSize = 16384; /* reduce FIFO size allocated for playpump */
+ playpumpConfig.numDescriptors = 64; /* set number of descriptors */
+ playpumpConfig.streamMuxCompatible = true;
+ playpumpTranscodeVideo = NEXUS_Playpump_Open(1, &playpumpConfig);
+ assert(playpumpTranscodeVideo);
+#endif
+
+ NEXUS_Playpump_GetDefaultOpenSettings(&playpumpConfig);
+ playpumpConfig.fifoSize = 16384; /* reduce FIFO size allocated for playpump */
+ playpumpConfig.numDescriptors = 64; /* set number of descriptors */
+ playpumpConfig.streamMuxCompatible = true;
+ playpumpTranscodeAudio = NEXUS_Playpump_Open(2, &playpumpConfig);
+ assert(playpumpTranscodeAudio);
+ playpumpTranscodePcr = NEXUS_Playpump_Open(3, &playpumpConfig);
+ assert(playpumpTranscodePcr);
+
+ BKNI_CreateEvent(&finishEvent);
+ NEXUS_StreamMux_GetDefaultCreateSettings(&muxCreateSettings);
+ muxCreateSettings.finished.callback = transcoderFinishCallback;
+ muxCreateSettings.finished.context = finishEvent;
+ streamMux = NEXUS_StreamMux_Create(&muxCreateSettings);
+ NEXUS_StreamMux_GetDefaultStartSettings(&muxConfig);
+ muxConfig.transportType = NEXUS_TransportType_eTs;
+ muxConfig.stcChannel = stcChannelTranscode;
+
+ muxConfig.video[0].pid = 0x11;
+ muxConfig.video[0].encoder = videoEncoder;
+ muxConfig.video[0].playpump = playpumpTranscodeVideo;
+
+ muxConfig.audio[0].pid = 0x12;
+ muxConfig.audio[0].muxOutput = audioMuxOutput;
+ muxConfig.audio[0].playpump = playpumpTranscodeAudio;
+ muxConfig.pcr.pid = 0x13;
+ muxConfig.pcr.playpump = playpumpTranscodePcr;
+ muxConfig.pcr.interval = 50;
+
+#if NEXUS_HAS_SYNC_CHANNEL
+ /* connect sync channel */
+ NEXUS_SyncChannel_GetSettings(syncChannel, &syncChannelSettings);
+ syncChannelSettings.videoInput = NEXUS_VideoDecoder_GetConnector(videoDecoder);
+ syncChannelSettings.audioInput[0] = NEXUS_AudioDecoder_GetConnector(audioDecoder, NEXUS_AudioDecoderConnectorType_eStereo);
+ NEXUS_SyncChannel_SetSettings(syncChannel, &syncChannelSettings);
+#endif
+
+ /* Start decoder */
+ NEXUS_VideoDecoder_Start(videoDecoder, &videoProgram);
+ NEXUS_AudioDecoder_Start(audioDecoder, &audioProgram);
+
+ /* Start playback */
+ NEXUS_Playback_Start(playback, file, NULL);
+
+ recpump = NEXUS_Recpump_Open(0, NULL);
+ assert(recpump);
+
+ record = NEXUS_Record_Create();
+ assert(record);
+
+ NEXUS_Record_GetSettings(record, &recordSettings);
+ recordSettings.recpump = recpump;
+ NEXUS_Record_SetSettings(record, &recordSettings);
+
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU
+{
+ unsigned userDataPid[2] = {2501, 2514};
+
+ NEXUS_Message_GetDefaultSettings(&messageSettings);
+ /* SCTE 270 spec max TS VBI user data bitrate=270Kbps, 256KB buffer can hold 7.5 seconds;
+ worthy user data for video synchronization; TODO: may be reduced if unnecessary */
+ messageSettings.bufferSize = 256*1024;
+ messageSettings.overflow.callback = message_callback; /* report overflow error */
+ messageSettings.overflow.param = 0;
+ muxConfig.userdata[0].message = NEXUS_Message_Open(&messageSettings);
+ BDBG_ASSERT(muxConfig.userdata[0].message);
+ messageSettings.overflow.param = 1;
+ muxConfig.userdata[1].message = NEXUS_Message_Open(&messageSettings);
+ BDBG_ASSERT(muxConfig.userdata[1].message);
+
+ /* open source user data PID channel */
+ NEXUS_Playback_GetDefaultPidChannelSettings(&playbackPidSettings);
+ playbackPidSettings.pidSettings.pidType = NEXUS_PidType_eOther; /* capture the TS packets with the user data PES */
+ playbackPidSettings.pidSettings.pidSettings.pidChannelIndex = NEXUS_PID_CHANNEL_OPEN_MESSAGE_CAPABLE;
+ playbackPidSettings.pidSettings.pidSettings.remap.enabled = true;
+ playbackPidSettings.pidSettings.pidSettings.remap.pid = 0x14;/* optional PID remap */
+
+ pidChannelUserData[0] = NEXUS_Playback_OpenPidChannel(playback, userDataPid[0], &playbackPidSettings);
+ assert(pidChannelUserData[0]);
+ /* open second user data PID */
+ playbackPidSettings.pidSettings.pidSettings.remap.pid = 0x15;/* optional PID remap */
+ pidChannelUserData[1] = NEXUS_Playback_OpenPidChannel(playback, userDataPid[1], &playbackPidSettings);
+ assert(pidChannelUserData[1]);
+
+ NEXUS_Message_GetDefaultStartSettings(muxConfig.userdata[0].message, &messageStartSettings);
+ messageStartSettings.format = NEXUS_MessageFormat_eTs;
+ messageStartSettings.pidChannel = pidChannelUserData[0];
+
+ /* must start message before stream mux starts */
+ NEXUS_Message_Start(muxConfig.userdata[0].message, &messageStartSettings);
+
+ messageStartSettings.pidChannel = pidChannelUserData[1];
+ NEXUS_Message_Start(muxConfig.userdata[1].message, &messageStartSettings);
+
+ /* open transcode mux output user data PidChannels */
+ pidChannelTranscodeUserData[0] = NEXUS_Playpump_OpenPidChannel(playpumpTranscodePcr, 0x14, NULL);
+ assert(pidChannelTranscodeUserData[0]);
+ pidChannelTranscodeUserData[1] = NEXUS_Playpump_OpenPidChannel(playpumpTranscodePcr, 0x15, NULL);
+ assert(pidChannelTranscodeUserData[1]);
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodeUserData[0], NULL);
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodeUserData[1], NULL);
+}
+#endif
+
+#define BTST_PMT_PID 0x0055
+ /* open PidChannels */
+ pidChannelTranscodePcr = NEXUS_Playpump_OpenPidChannel(playpumpTranscodePcr, muxConfig.pcr.pid, NULL);
+ assert(pidChannelTranscodePcr);
+ pidChannelTranscodePmt = NEXUS_Playpump_OpenPidChannel(playpumpTranscodePcr, BTST_PMT_PID, NULL);
+ assert(pidChannelTranscodePmt);
+ pidChannelTranscodePat = NEXUS_Playpump_OpenPidChannel(playpumpTranscodePcr, 0, NULL);
+ assert(pidChannelTranscodePat);
+
+ /* start mux */
+ NEXUS_StreamMux_Start(streamMux,&muxConfig, &muxOutput);
+ pidChannelTranscodeVideo = muxOutput.video[0];
+ pidChannelTranscodeAudio = muxOutput.audio[0];
+
+ /* configure the video pid for indexing */
+ NEXUS_Record_GetDefaultPidChannelSettings(&recordPidSettings);
+ recordPidSettings.recpumpSettings.pidType = NEXUS_PidType_eVideo;
+ recordPidSettings.recpumpSettings.pidTypeSettings.video.index = true;
+ recordPidSettings.recpumpSettings.pidTypeSettings.video.codec = cmdSettings[i].encoderVideoCodec;
+
+ /* add multiplex data to the same record */
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodeVideo, &recordPidSettings);
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodeAudio, NULL);
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodePcr, NULL);
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodePat, NULL);
+ NEXUS_Record_AddPidChannel(record, pidChannelTranscodePmt, NULL);
+
+ fileTranscode = NEXUS_FileRecord_OpenPosix(outputFilename, "/var/media/videos/transcoded.nav");
+ assert(fileTranscode);
+
+ /* Start record of stream mux output */
+ NEXUS_Record_Start(record, fileTranscode);
+
+ {
+static const uint8_t s_auiTSPacket_PAT[188] =
+{
+ 0x47,0x40,0x00,0x30,0xa6,0x40,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,'P', 'A', 'T',
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0x00,0x00,0xb0,0x0d,0x00,
+ 0x00,0x81,0x00,0x00,0x00,0x01,0xe0,0x55,
+ 0x65,0x80,0x5e,0xdc,
+};
+
+#define BTST_PMT_TRANSPORT_IDX BTST_SYSTEM_TRANSPORT_IDX
+static const uint8_t s_auiTSPacket_PMT[188] =
+{
+ 0x47,0x40,BTST_PMT_PID,0x30,
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU
+ 0x66,/* adaptation field length */
+#else
+ 0x9c,/* adaptation field length */
+#endif
+ 0x40,/* AF flags */
+ 0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,0xff,0xff,'P', 'M', 'T',
+ 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
+ 0xff,0xff,0xff,
+ /* placeholder for VBI user data PMT section and descriptors */
+ 0x00,/* pointer */
+ 0x02,0xb0,0x4d,0x00,0x01,0xc1,0x00,0x00,0xe0,0x13,0xf0,0x00,
+ /* VBI user data PID */
+ 0x06,0xe0,0x14,0xf0,0x19,
+ 0x52,0x01,0x0e,0x56,0x14,0x64,0x75,0x74,
+ 0x10,0x88,0x6e,0x6f,0x72,0x17,0x77,0x64,
+ 0x75,0x74,0x08,0x88,0x73,0x77,0x65,0x16,
+ 0x91,
+ /* VBI user data PID */
+ 0x06,0xe0,0x15,0xf0,0x13,
+ 0x0a,0x04,0x73,0x77,0x65,0x00,
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU
+ 0x52,0x01,
+ 0x12,0x59,0x08,0x73,0x77,0x65,0x10,0x00,
+ 0x02,0x00,0x02,
+#else
+ 0x00,/* pointer */
+ 0x02,0xb0,0x17,0x00,0x01,0xc1,0x00,0x00,0xe0,0x13,0xf0,0x00,
+#endif
+ 0x1b,0xe0,0x11,0xf0,0x00,/* video */
+ 0x81,0xe0,0x12,0xf0,0x00,/* audio */
+ 0x3d,0x19,0x07,0x2f
+};
+
+ NEXUS_StreamMuxSystemData psi[2];
+ /* Get CRC right to be playable by VLCplayer etc 3rd party SW */
+ uint32_t uiCRC = CRC32_mpeg(0, (uint8_t *) s_auiTSPacket_PAT + 184 - (8+4*1), 8+4*1);
+ fprintf(stderr, "PAT crc=%x\n", uiCRC);
+ NEXUS_Memory_Allocate(188, NULL, &pat);
+ NEXUS_Memory_Allocate(188, NULL, &pmt);
+ BKNI_Memcpy(pat, s_auiTSPacket_PAT, sizeof(s_auiTSPacket_PAT));
+ BKNI_Memcpy(pmt, s_auiTSPacket_PMT, sizeof(s_auiTSPacket_PMT));
+ ((uint8_t*)pat)[184] = (uiCRC >> 24) & 0xFF;
+ ((uint8_t*)pat)[185] = (uiCRC >> 16) & 0xFF;
+ ((uint8_t*)pat)[186] = (uiCRC >> 8) & 0xFF;
+ ((uint8_t*)pat)[187] = (uiCRC >> 0) & 0xFF;
+
+ /* video codec */
+ switch(videoEncoderStartConfig.codec)
+ {
+ case NEXUS_VideoCodec_eMpeg2: ((uint8_t *) pmt)[184-2*5] = 0x2; break;
+ case NEXUS_VideoCodec_eMpeg4Part2: ((uint8_t *) pmt)[184-2*5] = 0x10; break;
+ case NEXUS_VideoCodec_eH264: ((uint8_t *) pmt)[184-2*5] = 0x1b; break;
+ case NEXUS_VideoCodec_eVc1SimpleMain: ((uint8_t *) pmt)[184-2*5] = 0xea; break;
+ default:
+ BDBG_ERR(("Video encoder codec %d is not supported!\n", videoEncoderStartConfig.codec));
+ BDBG_ASSERT(0);
+ }
+ /* audio stream type */
+ switch(audioCodec)
+ {
+ case NEXUS_AudioCodec_eMpeg: ((uint8_t *) pmt)[184-5] = 0x4; break;
+ case NEXUS_AudioCodec_eMp3: ((uint8_t *) pmt)[184-5] = 0x4; break;
+ case NEXUS_AudioCodec_eAac : ((uint8_t *) pmt)[184-5] = 0xf; break; /* ADTS */
+ case NEXUS_AudioCodec_eAacPlus: ((uint8_t *) pmt)[184-5] = 0x11; break;/* LOAS */
+ /* MP2TS doesn't allow 14496-3 AAC+ADTS; here is placeholder to test AAC-HE before LOAS encode is supported; */
+ case NEXUS_AudioCodec_eAacPlusAdts: ((uint8_t *) pmt)[184-5] = 0x11; break;
+ case NEXUS_AudioCodec_eAc3: ((uint8_t *) pmt)[184-5] = 0x81; break;
+ default:
+ BDBG_ERR(("Audio encoder codec %d is not supported!\n", audioCodec));
+ }
+ /* A+V two ES */
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU
+ uiCRC = CRC32_mpeg(0, (uint8_t *) pmt + 184 - (12+5*2)-54, 12+5*2+54);
+#else
+ uiCRC = CRC32_mpeg(0, (uint8_t *) pmt + 184 - (12+5*2), 12+5*2);
+#endif
+ fprintf(stderr, "PMT crc=%x\n", uiCRC);
+
+ ((uint8_t*)pmt)[184] = (uiCRC >> 24) & 0xFF;
+ ((uint8_t*)pmt)[185] = (uiCRC >> 16) & 0xFF;
+ ((uint8_t*)pmt)[186] = (uiCRC >> 8) & 0xFF;
+ ((uint8_t*)pmt)[187] = (uiCRC >> 0) & 0xFF;
+ NEXUS_Memory_FlushCache(pat, sizeof(s_auiTSPacket_PAT));
+ NEXUS_Memory_FlushCache(pmt, sizeof(s_auiTSPacket_PMT));
+ BKNI_Memset(psi, 0, sizeof(psi));
+ psi[0].size = 188;
+ psi[0].pData = pat;
+ psi[0].timestampDelta = 100;
+ psi[1].size = 188;
+ psi[1].pData = pmt;
+ psi[1].timestampDelta = 100;
+ NEXUS_StreamMux_AddSystemDataBuffer(streamMux, &psi[0]);
+ NEXUS_StreamMux_AddSystemDataBuffer(streamMux, &psi[1]);
+}
+
+ NEXUS_VideoEncoder_Start(videoEncoder, &videoEncoderStartConfig);
+
+ BDBG_WRN(("\n\nTranscoding for 60 minutes, press any key to stop now...\n"));
+ ACL_Delay(3600000000);
+
+ /* Bring down system */
+ NEXUS_Playback_Stop(playback);
+ NEXUS_VideoDecoder_Stop(videoDecoder);
+
+ NEXUS_AudioDecoder_Stop(audioDecoder);
+ NEXUS_AudioMixer_Stop(audioMixer);
+ NEXUS_AudioMuxOutput_Stop(audioMuxOutput);
+#if NEXUS_HAS_SYNC_CHANNEL
+ /* disconnect sync channel */
+ NEXUS_SyncChannel_GetSettings(syncChannel, &syncChannelSettings);
+ syncChannelSettings.videoInput = NULL;
+ syncChannelSettings.audioInput[0] = NULL;
+ syncChannelSettings.audioInput[1] = NULL;
+ NEXUS_SyncChannel_SetSettings(syncChannel, &syncChannelSettings);
+#endif
+
+
+ NEXUS_VideoEncoder_Stop(videoEncoder, NULL);
+ NEXUS_StreamMux_Finish(streamMux);
+
+ // wait for the encoder buffer model's data to be drained //
+ if(BKNI_WaitForEvent(finishEvent, (videoEncoderConfig.encoderDelay/27000)*2)!=BERR_SUCCESS) {
+ fprintf(stderr, "TIMEOUT\n");
+ }
+
+ BKNI_DestroyEvent(finishEvent);
+
+ NEXUS_Record_Stop(record);
+ /* Note: remove all record PID channels before stream mux stop since streammux would close the A/V PID channels */
+ NEXUS_Record_RemoveAllPidChannels(record);
+ NEXUS_StreamMux_Stop(streamMux);
+
+#if BTST_ENABLE_TS_LAYER_USER_DATA_PASS_THRU /* stop message after stream mux stops */
+ NEXUS_Message_Stop(muxConfig.userdata[0].message);
+ NEXUS_Message_Close(muxConfig.userdata[0].message);
+ NEXUS_Message_Stop(muxConfig.userdata[1].message);
+ NEXUS_Message_Close(muxConfig.userdata[1].message);
+#endif
+
+ NEXUS_Record_Destroy(record);
+ NEXUS_Recpump_Close(recpump);
+ NEXUS_FileRecord_Close(fileTranscode);
+
+ NEXUS_Playback_CloseAllPidChannels(playback);
+ NEXUS_Playpump_CloseAllPidChannels(playpumpTranscodePcr);
+ NEXUS_FilePlay_Close(file);
+ NEXUS_Playback_Destroy(playback);
+ NEXUS_Playpump_Close(playpump);
+
+ NEXUS_VideoWindow_RemoveInput(window, NEXUS_VideoDecoder_GetConnector(videoDecoder));
+ NEXUS_VideoWindow_RemoveInput(windowTranscode, NEXUS_VideoDecoder_GetConnector(videoDecoder));
+ NEXUS_VideoInput_Shutdown(NEXUS_VideoDecoder_GetConnector(videoDecoder));
+ NEXUS_VideoDecoder_Close(videoDecoder);
+ NEXUS_VideoWindow_Close(window);
+ NEXUS_VideoWindow_Close(windowTranscode);
+ NEXUS_Display_Close(display);
+ NEXUS_Display_Close(displayTranscode);
+
+ NEXUS_StreamMux_Destroy(streamMux);
+
+ NEXUS_Playpump_Close(playpumpTranscodePcr);
+
+ NEXUS_Playpump_Close(playpumpTranscodeVideo);
+ NEXUS_VideoEncoder_Close(videoEncoder);
+
+ NEXUS_AudioOutput_RemoveAllInputs(NEXUS_AudioMuxOutput_GetConnector(audioMuxOutput));
+ NEXUS_AudioEncoder_RemoveAllInputs(audioEncoder);
+ NEXUS_AudioInput_Shutdown(NEXUS_AudioEncoder_GetConnector(audioEncoder));
+ NEXUS_AudioEncoder_Close(audioEncoder);
+ NEXUS_AudioMixer_RemoveAllInputs(audioMixer);
+ NEXUS_AudioOutput_RemoveAllInputs(NEXUS_AudioDummyOutput_GetConnector(platformConfig.outputs.audioDummy[0]));
+ NEXUS_AudioOutput_Shutdown(NEXUS_AudioDummyOutput_GetConnector(platformConfig.outputs.audioDummy[0]));
+ NEXUS_AudioInput_Shutdown(NEXUS_AudioMixer_GetConnector(audioMixer));
+ NEXUS_AudioMixer_Close(audioMixer);
+ NEXUS_AudioOutput_RemoveAllInputs(NEXUS_AudioDac_GetConnector(platformConfig.outputs.audioDacs[0]));
+ NEXUS_AudioOutput_RemoveAllInputs(NEXUS_SpdifOutput_GetConnector(platformConfig.outputs.spdif[0]));
+#if NEXUS_NUM_HDMI_OUTPUTS
+ NEXUS_AudioOutput_RemoveAllInputs(NEXUS_HdmiOutput_GetAudioConnector(platformConfig.outputs.hdmi[0]));
+#endif
+
+ NEXUS_Playpump_Close(playpumpTranscodeAudio);
+ NEXUS_AudioOutput_Shutdown(NEXUS_AudioMuxOutput_GetConnector(audioMuxOutput));
+ NEXUS_AudioMuxOutput_Destroy(audioMuxOutput);
+
+ NEXUS_AudioInput_Shutdown(NEXUS_AudioDecoder_GetConnector(audioDecoder, NEXUS_AudioDecoderConnectorType_eStereo));
+ NEXUS_AudioOutput_Shutdown(NEXUS_AudioDac_GetConnector(platformConfig.outputs.audioDacs[0]));
+ NEXUS_AudioOutput_Shutdown(NEXUS_SpdifOutput_GetConnector(platformConfig.outputs.spdif[0]));
+
+ NEXUS_AudioDecoder_Close(audioDecoder);
+
+#if NEXUS_HAS_SYNC_CHANNEL
+ NEXUS_SyncChannel_Destroy(syncChannel);
+#endif
+
+ NEXUS_StcChannel_Close(stcChannel);
+ NEXUS_StcChannel_Close(stcChannelTranscode);
+ NEXUS_Memory_Free(pat);
+ NEXUS_Memory_Free(pmt);
+
+ NEXUS_Platform_Uninit();
+
+ return 0;
+}
+
+//#endif