#!/bin/bash # # Look for a .keep file with space separated pairs of timestamps one each # line to be kept from the original .mpg file, start parallel jobs to encode # each piece, wait for them all to finish then join them together. # mydir=`dirname $0` PATH=`$mydir/echo-path` export PATH # # Set x264 encoding parameters to do a high quality 1280x720 scaled down # recording of the original 1080i mpg file downloaded from the TiVo. Use # -nostats so the logs don't grow to several megabytes. # encopts="-nostats -vf scale=1280:-1" encopts="$encopts -c:v libx264 -preset slower" encopts="$encopts -x264opts fast-pskip=1:rc-lookahead=6" encopts="$encopts -b:v 1500k -threads auto" # # Deal with a few possible options # jobcount="4" nicemax=19 nicemin=2 tryfast="no" while getopts ':Fj:n:Hh' opt do case $opt in F) tryfast="yes" ;; j) jobcount=$OPTARG ;; n) nicemax=$OPTARG ;; H|h) myname=`basename $0` cat <&2 exit 2 ;; esac done shift $(($OPTIND - 1)) if [ "$nicemax" -gt 19 ] then nicemax="19" fi if [ "$nicemax" -lt "$nicemin" ] then nicemax="$nicemin" fi # infile="$1" if [ -f "$infile" ] then waitforit='' dirname=`dirname $infile` cd "$dirname" basefile=`basename $infile .mpg` rm -f "$basefile.csv" "$basefile-part"*.mpg rm -rf "tempdir-"* # Try to find .keep file, if not there, start up run of comskip # to create it in the backgrouns. keepfile="$basefile.keep" if [ -f "$keepfile" ] then : else keepfile="guess-$keepfile" fi if [ -f "$keepfile" ] then : else create-keep-guess "$basefile.mpg" > guess.log 2>&1 < /dev/null & waitforit="$!" fi # I don't always need a time-*.avi file, but if I want to manually pick # off the times of commercials (which I usually get around to doing), # I'll need it so go ahead and create it early, but I don't need to wait # for it. if [ -f "time-$basefile.avi" ] then : else generate-timestamps "$basefile.mpg" > gentime.log 2>&1 < /dev/null & fi # If I started any background processing, wait for it now. if [ -n "$waitforit" ] then wait "$waitforit" fi # And yet another file I need (and cannot background easily since # it requires the .keep file to be created first) is the .csv file # and all the split mpg file parts (or a dummy .csv if I'm not # using -F fast encoding). if [ -f "$basefile.csv" ] then : else if [ "$tryfast" = "yes" ] then print-segment-command "$basefile.mpg" "$keepfile" > runseg.temp rm -f "$basefile-part"*.mpg bash -x ./runseg.temp > runseg.log 2>&1 < /dev/null else # Create dummy .csv file with a single (plenty long enough) segment echo "$basefile.mpg",0.000000,18000.000000 > "$basefile.csv" fi fi if [ -f "$keepfile" ] then partnum=0 rm -f "$basefile.concat" rm -f "$basefile.jobs" niceval="$nicemax" while read starttime endtime do partnum=`expr $partnum + 1` # Use perl script to do the complicated work of getting the # adjusted start time for the right part file print-one-segment "$basefile.csv" $starttime $endtime > oneseg.tmp if [ -s oneseg.tmp ] then : else echo failed: print-one-segment "$basefile.csv" $starttime $endtime exit 2 fi read partfile adjtime duration < oneseg.tmp td="tempdir-$partnum" [ -d "$td" ] || mkdir "$td" rm -f "$td/run" echo cd "$td" >> "$td/run" # Uncompressed audio allows more exact manual diddling when the # mpg files are corrupted and I have audio/video sync problems. # Compress the audio at the end when joining the segments. echo rm -f "pcm-$basefile.avi" >> "$td/run" echo ffmpeg -y -i "../$partfile" $encopts -pass 1 \ -f avi -ss $adjtime -t $duration /dev/null >> "$td/run" echo ffmpeg -i "../$partfile" $encopts -pass 2 \ -acodec pcm_s16le -ar 48000 -ac 2 \ -ss $adjtime -t $duration \ "pcm-$basefile.avi" >> "$td/run" # Save the ordered list of parts in the format the ffmpeg concat # demuxer needs to put them all together again. echo file \'$td/pcm-$basefile.avi\' >> "$basefile.concat" # Create job for transcoding this piece in background. Stagger the # niceness of each background job so we don't waste a lot of time # context switching when the actual cpu bound encoding is going on, # but we can work on multiple jobs during the seeking and pass 1 # phases (which are not totally cpu bound). echo nice -$niceval bash -x "$td/run" \> "$td/log" 2\>\&1 \ \< /dev/null >> "$basefile.jobs" if [ "$niceval" -gt "$nicemin" ] then niceval=`expr $niceval - 1` else niceval="$nicemax" fi done < "$keepfile" # Wait for all the pieces to be transcoded and join them together # Reverse the order of the jobs so lots of seeking happens in # parallel up front. if multijob -r $jobcount < "$basefile.jobs" then # transcoding the segments worked, put them all together and # compress the audio. rm -f "$basefile.avi" "$basefile.mp4" "$basefile.jpg" \ "$basefile.webm" "$basefile.html" "temp-$basefile.avi" # I found that sometimes weirdly timestamped recordings wind up with # better audio sync if I leave the uncompressed audio in the # concat results and then recompress the whole file. ffmpeg -nostats -f concat -i "$basefile.concat" -vcodec copy \ -acodec copy \ "temp-$basefile.avi" < /dev/null if [ -f "temp-$basefile.avi" ] then ffmpeg -nostats -i "temp-$basefile.avi" -vcodec copy \ -acodec libmp3lame -ab 128k -ac 2 \ "$basefile.avi" < /dev/null # While we are here, create an mp4 with AAC audio as well to # be more html5 compliant (maybe switch to this entirely if # it turns out well, but the need for the experimental option # bugs me at the moment :-). ffmpeg -nostats -i "temp-$basefile.avi" -vcodec copy \ -strict experimental -c:a aac -cutoff 15000 -b:a 128k \ "$basefile.mp4" < /dev/null fi if [ -f "$basefile.avi" ] then make-airdate-link "$basefile.avi" rm -f "$basefile.concat" rm -f "temp-$basefile.avi" rm -f "$basefile.jobs" rm -f "$basefile-part"*.mpg rm -f oneseg.tmp runseg.temp fi fi else echo "Input file $keepfile does not exist." 1>&2 exit 2 fi else echo "Input file $infile does not exist." 1>&2 exit 2 fi