Merge branch 'master' into m3u-reduc

This commit is contained in:
Trek H 2020-01-28 15:38:18 +10:30
commit 0dae15a2c5
10 changed files with 290 additions and 344 deletions

View File

@ -1,195 +1,33 @@
/**
* HLS config
*/
/*
AUTHOR
Trek Hopton <trek@ausocean.org>
import AbrController from './controller/abr-controller';
import BufferController from './controller/buffer-controller';
import CapLevelController from './controller/cap-level-controller';
import FPSController from './controller/fps-controller';
import XhrLoader from './utils/xhr-loader';
// import FetchLoader from './utils/fetch-loader';
LICENSE
This file is Copyright (C) 2020 the Australian Ocean Lab (AusOcean)
import AudioTrackController from './controller/audio-track-controller';
import AudioStreamController from './controller/audio-stream-controller';
It is free software: you can redistribute it and/or modify them
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
import * as Cues from './utils/cues';
import TimelineController from './controller/timeline-controller';
import SubtitleTrackController from './controller/subtitle-track-controller';
import { SubtitleStreamController } from './controller/subtitle-stream-controller';
import EMEController from './controller/eme-controller';
import { requestMediaKeySystemAccess, MediaKeyFunc } from './utils/mediakeys-helper';
It is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
type ABRControllerConfig = {
abrEwmaFastLive: number,
abrEwmaSlowLive: number,
abrEwmaFastVoD: number,
abrEwmaSlowVoD: number,
abrEwmaDefaultEstimate: number,
abrBandWidthFactor: number,
abrBandWidthUpFactor: number,
abrMaxWithRealBitrate: boolean,
maxStarvationDelay: number,
maxLoadingDelay: number,
};
You should have received a copy of the GNU General Public License in gpl.txt.
If not, see http://www.gnu.org/licenses.
export type BufferControllerConfig = {
appendErrorMaxRetry: number,
liveDurationInfinity: boolean,
liveBackBufferLength: number,
};
For hls.js Copyright notice and license, see LICENSE file.
*/
type CapLevelControllerConfig = {
capLevelToPlayerSize: boolean
};
export type EMEControllerConfig = {
licenseXhrSetup?: (xhr: XMLHttpRequest, url: string) => void,
emeEnabled: boolean,
widevineLicenseUrl?: string,
requestMediaKeySystemAccessFunc: MediaKeyFunc | null,
};
type FragmentLoaderConfig = {
fLoader: any, // TODO(typescript-loader): Once Loader is typed fill this in
fragLoadingTimeOut: number,
fragLoadingMaxRetry: number,
fragLoadingRetryDelay: number,
fragLoadingMaxRetryTimeout: number,
};
type FPSControllerConfig = {
capLevelOnFPSDrop: boolean,
fpsDroppedMonitoringPeriod: number,
fpsDroppedMonitoringThreshold: number,
};
type LevelControllerConfig = {
startLevel?: number
};
type MP4RemuxerConfig = {
stretchShortVideoTrack: boolean,
maxAudioFramesDrift: number,
};
type PlaylistLoaderConfig = {
pLoader: any, // TODO(typescript-loader): Once Loader is typed fill this in
manifestLoadingTimeOut: number,
manifestLoadingMaxRetry: number,
manifestLoadingRetryDelay: number,
manifestLoadingMaxRetryTimeout: number,
levelLoadingTimeOut: number,
levelLoadingMaxRetry: number,
levelLoadingRetryDelay: number,
levelLoadingMaxRetryTimeout: number
};
type StreamControllerConfig = {
autoStartLoad: boolean,
startPosition: number,
defaultAudioCodec?: string,
initialLiveManifestSize: number,
maxBufferLength: number,
maxBufferSize: number,
maxBufferHole: number,
lowBufferWatchdogPeriod: number,
highBufferWatchdogPeriod: number,
nudgeOffset: number,
nudgeMaxRetry: number,
maxFragLookUpTolerance: number,
liveSyncDurationCount: number,
liveMaxLatencyDurationCount: number,
liveSyncDuration?: number,
liveMaxLatencyDuration?: number,
maxMaxBufferLength: number,
startFragPrefetch: boolean,
};
type TimelineControllerConfig = {
cueHandler: any, // TODO(typescript-cues): Type once file is done
enableCEA708Captions: boolean,
enableWebVTT: boolean,
captionsTextTrack1Label: string,
captionsTextTrack1LanguageCode: string,
captionsTextTrack2Label: string,
captionsTextTrack2LanguageCode: string,
};
type TSDemuxerConfig = {
forceKeyFrameOnDiscontinuity: boolean,
};
export type HlsConfig =
{
debug: boolean,
enableWorker: boolean,
enableSoftwareAES: boolean,
minAutoBitrate: number,
loader: any, // TODO(typescript-xhrloader): Type once XHR is done
xhrSetup?: (xhr: XMLHttpRequest, url: string) => void,
// Alt Audio
audioStreamController?: any, // TODO(typescript-audiostreamcontroller): Type once file is done
audioTrackController?: any, // TODO(typescript-audiotrackcontroller): Type once file is done
// Subtitle
subtitleStreamController?: any, // TODO(typescript-subtitlestreamcontroller): Type once file is done
subtitleTrackController?: any, // TODO(typescript-subtitletrackcontroller): Type once file is done
timelineController?: any, // TODO(typescript-timelinecontroller): Type once file is done
// EME
emeController?: typeof EMEController,
abrController: any, // TODO(typescript-abrcontroller): Type once file is done
bufferController: typeof BufferController,
capLevelController: any, // TODO(typescript-caplevelcontroller): Type once file is done
fpsController: any, // TODO(typescript-fpscontroller): Type once file is done
} &
ABRControllerConfig &
BufferControllerConfig &
CapLevelControllerConfig &
EMEControllerConfig &
FPSControllerConfig &
FragmentLoaderConfig &
LevelControllerConfig &
MP4RemuxerConfig &
PlaylistLoaderConfig &
StreamControllerConfig &
Partial<TimelineControllerConfig> &
TSDemuxerConfig;
import XhrLoader from './utils/xhr-loader.js';
// If possible, keep hlsDefaultConfig shallow
// It is cloned whenever a new Hls instance is created, by keeping the config
// shallow the properties are cloned, and we don't end up manipulating the default
export const hlsDefaultConfig: HlsConfig = {
autoStartLoad: true, // used by stream-controller
export const hlsDefaultConfig = {
startPosition: -1, // used by stream-controller
defaultAudioCodec: void 0, // used by stream-controller
debug: false, // used by logger
capLevelOnFPSDrop: false, // used by fps-controller
capLevelToPlayerSize: false, // used by cap-level-controller
initialLiveManifestSize: 1, // used by stream-controller
maxBufferLength: 30, // used by stream-controller
maxBufferSize: 60 * 1000 * 1000, // used by stream-controller
maxBufferHole: 0.5, // used by stream-controller
lowBufferWatchdogPeriod: 0.5, // used by stream-controller
highBufferWatchdogPeriod: 3, // used by stream-controller
nudgeOffset: 0.1, // used by stream-controller
nudgeMaxRetry: 3, // used by stream-controller
maxFragLookUpTolerance: 0.25, // used by stream-controller
liveSyncDurationCount: 3, // used by stream-controller
liveMaxLatencyDurationCount: Infinity, // used by stream-controller
liveSyncDuration: void 0, // used by stream-controller
liveMaxLatencyDuration: void 0, // used by stream-controller
liveDurationInfinity: false, // used by buffer-controller
liveBackBufferLength: Infinity, // used by buffer-controller
maxMaxBufferLength: 600, // used by stream-controller
enableWorker: true, // used by demuxer
enableSoftwareAES: true, // used by decrypter
manifestLoadingTimeOut: 10000, // used by playlist-loader
manifestLoadingMaxRetry: 1, // used by playlist-loader
manifestLoadingRetryDelay: 1000, // used by playlist-loader
@ -203,62 +41,8 @@ export const hlsDefaultConfig: HlsConfig = {
fragLoadingMaxRetry: 6, // used by fragment-loader
fragLoadingRetryDelay: 1000, // used by fragment-loader
fragLoadingMaxRetryTimeout: 64000, // used by fragment-loader
startFragPrefetch: false, // used by stream-controller
fpsDroppedMonitoringPeriod: 5000, // used by fps-controller
fpsDroppedMonitoringThreshold: 0.2, // used by fps-controller
appendErrorMaxRetry: 3, // used by buffer-controller
loader: XhrLoader,
// loader: FetchLoader,
fLoader: void 0, // used by fragment-loader
pLoader: void 0, // used by playlist-loader
xhrSetup: void 0, // used by xhr-loader
licenseXhrSetup: void 0, // used by eme-controller
// fetchSetup: void 0,
abrController: AbrController,
bufferController: BufferController,
capLevelController: CapLevelController,
fpsController: FPSController,
stretchShortVideoTrack: false, // used by mp4-remuxer
maxAudioFramesDrift: 1, // used by mp4-remuxer
forceKeyFrameOnDiscontinuity: true, // used by ts-demuxer
abrEwmaFastLive: 3, // used by abr-controller
abrEwmaSlowLive: 9, // used by abr-controller
abrEwmaFastVoD: 3, // used by abr-controller
abrEwmaSlowVoD: 9, // used by abr-controller
abrEwmaDefaultEstimate: 5e5, // 500 kbps // used by abr-controller
abrBandWidthFactor: 0.95, // used by abr-controller
abrBandWidthUpFactor: 0.7, // used by abr-controller
abrMaxWithRealBitrate: false, // used by abr-controller
maxStarvationDelay: 4, // used by abr-controller
maxLoadingDelay: 4, // used by abr-controller
minAutoBitrate: 0, // used by hls
emeEnabled: false, // used by eme-controller
widevineLicenseUrl: void 0, // used by eme-controller
requestMediaKeySystemAccessFunc: requestMediaKeySystemAccess, // used by eme-controller
// Dynamic Modules
...timelineConfig(),
subtitleStreamController: (__USE_SUBTITLES__) ? SubtitleStreamController : void 0,
subtitleTrackController: (__USE_SUBTITLES__) ? SubtitleTrackController : void 0,
timelineController: (__USE_SUBTITLES__) ? TimelineController : void 0,
audioStreamController: (__USE_ALT_AUDIO__) ? AudioStreamController : void 0,
audioTrackController: (__USE_ALT_AUDIO__) ? AudioTrackController : void 0,
emeController: (__USE_EME_DRM__) ? EMEController : void 0
};
function timelineConfig (): TimelineControllerConfig {
if (!__USE_SUBTITLES__) {
// intentionally doing this over returning Partial<TimelineControllerConfig> above
// this has the added nice property of still requiring the object below to completely define all props.
return {} as any;
}
return {
cueHandler: Cues, // used by timeline-controller
enableCEA708Captions: true, // used by timeline-controller
enableWebVTT: true, // used by timeline-controller
captionsTextTrack1Label: 'English', // used by timeline-controller
captionsTextTrack1LanguageCode: 'en', // used by timeline-controller
captionsTextTrack2Label: 'Spanish', // used by timeline-controller
captionsTextTrack2LanguageCode: 'es' // used by timeline-controller
};
}
};

View File

@ -1,110 +1,55 @@
/*
AUTHOR
Trek Hopton <trek@ausocean.org>
LICENSE
This file is Copyright (C) 2020 the Australian Ocean Lab (AusOcean)
It is free software: you can redistribute it and/or modify them
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
It is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License in gpl.txt.
If not, see http://www.gnu.org/licenses.
For hls.js Copyright notice and license, see LICENSE file.
*/
/**
* @readonly
* @enum {string}
*/
const HlsEvents = {
// fired before MediaSource is attaching to media element - data: { media }
MEDIA_ATTACHING: 'hlsMediaAttaching',
// fired when MediaSource has been succesfully attached to media element - data: { }
MEDIA_ATTACHED: 'hlsMediaAttached',
// fired before detaching MediaSource from media element - data: { }
MEDIA_DETACHING: 'hlsMediaDetaching',
// fired when MediaSource has been detached from media element - data: { }
MEDIA_DETACHED: 'hlsMediaDetached',
// fired when we buffer is going to be reset - data: { }
BUFFER_RESET: 'hlsBufferReset',
// fired when we know about the codecs that we need buffers for to push into - data: {tracks : { container, codec, levelCodec, initSegment, metadata }}
BUFFER_CODECS: 'hlsBufferCodecs',
// fired when sourcebuffers have been created - data: { tracks : tracks }
BUFFER_CREATED: 'hlsBufferCreated',
// fired when we append a segment to the buffer - data: { segment: segment object }
BUFFER_APPENDING: 'hlsBufferAppending',
// fired when we are done with appending a media segment to the buffer - data : { parent : segment parent that triggered BUFFER_APPENDING, pending : nb of segments waiting for appending for this segment parent}
BUFFER_APPENDED: 'hlsBufferAppended',
// fired when the stream is finished and we want to notify the media buffer that there will be no more data - data: { }
BUFFER_EOS: 'hlsBufferEos',
// fired when the media buffer should be flushed - data { startOffset, endOffset }
BUFFER_FLUSHING: 'hlsBufferFlushing',
// fired when the media buffer has been flushed - data: { }
BUFFER_FLUSHED: 'hlsBufferFlushed',
// fired to signal that a manifest loading starts - data: { url : manifestURL}
MANIFEST_LOADING: 'hlsManifestLoading',
// fired after manifest has been loaded - data: { levels : [available quality levels], audioTracks : [ available audio tracks], url : manifestURL, stats : { trequest, tfirst, tload, mtime}}
MANIFEST_LOADED: 'hlsManifestLoaded',
// fired after manifest has been parsed - data: { levels : [available quality levels], firstLevel : index of first quality level appearing in Manifest}
MANIFEST_PARSED: 'hlsManifestParsed',
// fired when a level switch is requested - data: { level : id of new level }
LEVEL_SWITCHING: 'hlsLevelSwitching',
// fired when a level switch is effective - data: { level : id of new level }
LEVEL_SWITCHED: 'hlsLevelSwitched',
// fired when a level playlist loading starts - data: { url : level URL, level : id of level being loaded}
LEVEL_LOADING: 'hlsLevelLoading',
// fired when a level playlist loading finishes - data: { details : levelDetails object, level : id of loaded level, stats : { trequest, tfirst, tload, mtime} }
LEVEL_LOADED: 'hlsLevelLoaded',
// fired when a level's details have been updated based on previous details, after it has been loaded - data: { details : levelDetails object, level : id of updated level }
LEVEL_UPDATED: 'hlsLevelUpdated',
// fired when a level's PTS information has been updated after parsing a fragment - data: { details : levelDetails object, level : id of updated level, drift: PTS drift observed when parsing last fragment }
LEVEL_PTS_UPDATED: 'hlsLevelPtsUpdated',
// fired to notify that audio track lists has been updated - data: { audioTracks : audioTracks }
AUDIO_TRACKS_UPDATED: 'hlsAudioTracksUpdated',
// fired when an audio track switching is requested - data: { id : audio track id }
AUDIO_TRACK_SWITCHING: 'hlsAudioTrackSwitching',
// fired when an audio track switch actually occurs - data: { id : audio track id }
AUDIO_TRACK_SWITCHED: 'hlsAudioTrackSwitched',
// fired when an audio track loading starts - data: { url : audio track URL, id : audio track id }
AUDIO_TRACK_LOADING: 'hlsAudioTrackLoading',
// fired when an audio track loading finishes - data: { details : levelDetails object, id : audio track id, stats : { trequest, tfirst, tload, mtime } }
AUDIO_TRACK_LOADED: 'hlsAudioTrackLoaded',
// fired to notify that subtitle track lists has been updated - data: { subtitleTracks : subtitleTracks }
SUBTITLE_TRACKS_UPDATED: 'hlsSubtitleTracksUpdated',
// fired when an subtitle track switch occurs - data: { id : subtitle track id }
SUBTITLE_TRACK_SWITCH: 'hlsSubtitleTrackSwitch',
// fired when a subtitle track loading starts - data: { url : subtitle track URL, id : subtitle track id }
SUBTITLE_TRACK_LOADING: 'hlsSubtitleTrackLoading',
// fired when a subtitle track loading finishes - data: { details : levelDetails object, id : subtitle track id, stats : { trequest, tfirst, tload, mtime } }
SUBTITLE_TRACK_LOADED: 'hlsSubtitleTrackLoaded',
// fired when a subtitle fragment has been processed - data: { success : boolean, frag : the processed frag }
SUBTITLE_FRAG_PROCESSED: 'hlsSubtitleFragProcessed',
// fired when the first timestamp is found - data: { id : demuxer id, initPTS: initPTS, frag : fragment object }
INIT_PTS_FOUND: 'hlsInitPtsFound',
// fired when a fragment loading starts - data: { frag : fragment object }
FRAG_LOADING: 'hlsFragLoading',
// fired when a fragment loading is progressing - data: { frag : fragment object, { trequest, tfirst, loaded } }
FRAG_LOAD_PROGRESS: 'hlsFragLoadProgress',
// Identifier for fragment load aborting for emergency switch down - data: { frag : fragment object }
FRAG_LOAD_EMERGENCY_ABORTED: 'hlsFragLoadEmergencyAborted',
// fired when a fragment loading is completed - data: { frag : fragment object, payload : fragment payload, stats : { trequest, tfirst, tload, length } }
FRAG_LOADED: 'hlsFragLoaded',
// fired when a fragment has finished decrypting - data: { id : demuxer id, frag: fragment object, payload : fragment payload, stats : { tstart, tdecrypt } }
FRAG_DECRYPTED: 'hlsFragDecrypted',
// fired when Init Segment has been extracted from fragment - data: { id : demuxer id, frag: fragment object, moov : moov MP4 box, codecs : codecs found while parsing fragment }
FRAG_PARSING_INIT_SEGMENT: 'hlsFragParsingInitSegment',
// fired when parsing sei text is completed - data: { id : demuxer id, frag: fragment object, samples : [ sei samples pes ] }
FRAG_PARSING_USERDATA: 'hlsFragParsingUserdata',
// fired when parsing id3 is completed - data: { id : demuxer id, frag: fragment object, samples : [ id3 samples pes ] }
FRAG_PARSING_METADATA: 'hlsFragParsingMetadata',
// fired when data have been extracted from fragment - data: { id : demuxer id, frag: fragment object, data1 : moof MP4 box or TS fragments, data2 : mdat MP4 box or null}
FRAG_PARSING_DATA: 'hlsFragParsingData',
// fired when fragment parsing is completed - data: { id : demuxer id, frag: fragment object }
FRAG_PARSED: 'hlsFragParsed',
// fired when fragment remuxed MP4 boxes have all been appended into SourceBuffer - data: { id : demuxer id, frag : fragment object, stats : { trequest, tfirst, tload, tparsed, tbuffered, length, bwEstimate } }
FRAG_BUFFERED: 'hlsFragBuffered',
// fired when fragment matching with current media position is changing - data : { id : demuxer id, frag : fragment object }
FRAG_CHANGED: 'hlsFragChanged',
// Identifier for a FPS drop event - data: { curentDropped, currentDecoded, totalDroppedFrames }
FPS_DROP: 'hlsFpsDrop',
// triggered when FPS drop triggers auto level capping - data: { level, droppedlevel }
FPS_DROP_LEVEL_CAPPING: 'hlsFpsDropLevelCapping',
// Identifier for an error event - data: { type : error type, details : error details, fatal : if true, hls.js cannot/will not try to recover, if false, hls.js will try to recover,other error specific data }
ERROR: 'hlsError',
// fired when hls.js instance starts destroying. Different from MEDIA_DETACHED as one could want to detach and reattach a media to the instance of hls.js to handle mid-rolls for example - data: { }
DESTROYING: 'hlsDestroying',
// fired when a decrypt key loading starts - data: { frag : fragment object }
KEY_LOADING: 'hlsKeyLoading',
// fired when a decrypt key loading is completed - data: { frag : fragment object, payload : key payload, stats : { trequest, tfirst, tload, length } }
KEY_LOADED: 'hlsKeyLoaded',
// fired upon stream controller state transitions - data: { previousState, nextState }
STREAM_STATE_TRANSITION: 'hlsStreamStateTransition'
FRAG_LOADED: 'hlsFragLoaded'
};
export default HlsEvents;

View File

@ -128,6 +128,7 @@ func handleFlags() config.Config {
httpAddressPtr = flag.String("HttpAddress", "", "Destination address of http posts")
verticalFlipPtr = flag.Bool("VerticalFlip", false, "Flip video vertically: Yes, No")
horizontalFlipPtr = flag.Bool("HorizontalFlip", false, "Flip video horizontally: Yes, No")
loopPtr = flag.Bool("Loop", false, "Loop input source on completion (true/false)")
bitratePtr = flag.Uint("Bitrate", 0, "Bitrate of recorded video")
heightPtr = flag.Uint("Height", 0, "Height in pixels")
widthPtr = flag.Uint("Width", 0, "Width in pixels")
@ -138,6 +139,7 @@ func handleFlags() config.Config {
saturationPtr = flag.Int("Saturation", 0, "Set Saturation. (100-100)")
exposurePtr = flag.String("Exposure", "auto", "Set exposure mode. ("+strings.Join(raspivid.ExposureModes[:], ",")+")")
autoWhiteBalancePtr = flag.String("Awb", "auto", "Set automatic white balance mode. ("+strings.Join(raspivid.AutoWhiteBalanceModes[:], ",")+")")
fileFPSPtr = flag.Int("FileFPS", 0, "File source frame processing FPS")
// Audio specific flags.
sampleRatePtr = flag.Int("SampleRate", 48000, "Sample rate of recorded audio")
@ -254,6 +256,8 @@ func handleFlags() config.Config {
netsender.ConfigFile = *configFilePtr
}
cfg.FileFPS = *fileFPSPtr
cfg.Loop = *loopPtr
cfg.CameraIP = *cameraIPPtr
cfg.Rotation = *rotationPtr
cfg.HorizontalFlip = *horizontalFlipPtr

129
filter/difference.go Normal file
View File

@ -0,0 +1,129 @@
// +build !circleci
/*
DESCRIPTION
A filter that detects motion and discards frames without motion. The
algorithm calculates the absolute difference for each pixel between
two frames, then finds the mean. If the mean is above a given threshold,
then it is considered motion.
AUTHORS
Scott Barnard <scott@ausocean.org>
LICENSE
difference.go is Copyright (C) 2020 the Australian Ocean Lab (AusOcean)
It is free software: you can redistribute it and/or modify them
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
It is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
in gpl.txt. If not, see http://www.gnu.org/licenses.
*/
package filter
import (
"fmt"
"image"
"image/color"
"io"
"gocv.io/x/gocv"
)
// Difference is a filter that provides basic motion detection. Difference calculates
// the absolute difference for each pixel between two frames, then finds the mean. If
// the mean is above a given threshold, then it is considered motion.
type Difference struct {
dst io.WriteCloser
thresh float64
prev gocv.Mat
debug bool
windows []*gocv.Window
}
// NewDifference returns a pointer to a new Difference struct.
func NewDifference(dst io.WriteCloser, debug bool, threshold float64) *Difference {
var windows []*gocv.Window
if debug {
windows = []*gocv.Window{gocv.NewWindow("Diff: Bounding boxes"), gocv.NewWindow("Diff: Motion")}
}
return &Difference{dst, threshold, gocv.NewMat(), debug, windows}
}
// Implements io.Closer.
// Close frees resources used by gocv, because it has to be done manually, due to
// it using c-go.
func (d *Difference) Close() error {
d.prev.Close()
for _, window := range d.windows {
window.Close()
}
return nil
}
// Implements io.Writer.
// Write applies the motion filter to the video stream. Only frames with motion
// are written to the destination encoder, frames without are discarded.
func (d *Difference) Write(f []byte) (int, error) {
if d.prev.Empty() {
var err error
d.prev, err = gocv.IMDecode(f, gocv.IMReadColor)
if err != nil {
return 0, err
}
return len(f), nil
}
img, err := gocv.IMDecode(f, gocv.IMReadColor)
defer img.Close()
if err != nil {
return 0, err
}
imgDelta := gocv.NewMat()
defer imgDelta.Close()
// Seperate foreground and background.
gocv.AbsDiff(img, d.prev, &imgDelta)
gocv.CvtColor(imgDelta, &imgDelta, gocv.ColorBGRToGray)
mean := imgDelta.Mean().Val1
// Update History.
d.prev = img.Clone()
// Draw debug information.
if d.debug {
if mean >= d.thresh {
gocv.PutText(
&img,
fmt.Sprintf("motion - mean:%f", mean),
image.Pt(32, 32),
gocv.FontHersheyPlain,
2.0,
color.RGBA{255, 0, 0, 0},
2,
)
}
d.windows[0].IMShow(img)
d.windows[1].IMShow(imgDelta)
d.windows[0].WaitKey(1)
}
// Don't write to destination if there is no motion.
if mean < d.thresh {
return len(f), nil
}
// Write to destination.
return d.dst.Write(f)
}

View File

@ -36,6 +36,12 @@ func NewMOGFilter(dst io.WriteCloser, area, threshold float64, history int, debu
return &NoOp{dst: dst}
}
// NewKNNFilter returns a pointer to a new NoOp struct for testing purposes only.
func NewKNNFilter(dst io.WriteCloser, area, threshold float64, history, kernelSize int, debug bool, hf int) *NoOp {
return &NoOp{dst: dst}
}
// NewDiffference returns a pointer to a new NoOp struct for testing purposes only.
func NewDifference(dst io.WriteCloser, debug bool, threshold float64) *NoOp {
return &NoOp{dst: dst}
}

2
go.mod
View File

@ -4,7 +4,7 @@ go 1.13
require (
bitbucket.org/ausocean/iot v1.2.13
bitbucket.org/ausocean/utils v1.2.12
bitbucket.org/ausocean/utils v1.2.13
github.com/Comcast/gots v0.0.0-20190305015453-8d56e473f0f7
github.com/go-audio/audio v0.0.0-20181013203223-7b2a6ca21480
github.com/go-audio/wav v0.0.0-20181013172942-de841e69b884

2
go.sum
View File

@ -12,6 +12,8 @@ bitbucket.org/ausocean/utils v1.2.11 h1:zA0FOaPjN960ryp8PKCkV5y50uWBYrIxCVnXjwbv
bitbucket.org/ausocean/utils v1.2.11/go.mod h1:uXzX9z3PLemyURTMWRhVI8uLhPX4uuvaaO85v2hcob8=
bitbucket.org/ausocean/utils v1.2.12 h1:VnskjWTDM475TnQRhBQE0cNp9D6Y6OELrd4UkD2VVIQ=
bitbucket.org/ausocean/utils v1.2.12/go.mod h1:uXzX9z3PLemyURTMWRhVI8uLhPX4uuvaaO85v2hcob8=
bitbucket.org/ausocean/utils v1.2.13 h1:tUaIywtoMc1+zl1GCVQokX4mL5X7LNHX5O51AgAPrWA=
bitbucket.org/ausocean/utils v1.2.13/go.mod h1:uXzX9z3PLemyURTMWRhVI8uLhPX4uuvaaO85v2hcob8=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Comcast/gots v0.0.0-20190305015453-8d56e473f0f7 h1:LdOc9B9Bj6LEsKiXShkLA3/kpxXb6LJpH+ekU2krbzw=

View File

@ -86,6 +86,7 @@ const (
defaultAudioInputCodec = codecutil.ADPCM
defaultPSITime = 2
defaultMotionInterval = 5
defaultFileFPS = 0
// Ring buffer defaults.
defaultRBMaxElements = 10000
@ -126,6 +127,7 @@ const (
FilterMOG
FilterVariableFPS
FilterKNN
FilterDifference
)
// OS names
@ -297,6 +299,14 @@ type Config struct {
MOGMinArea float64 // Used to ignore small areas of motion detection.
MOGThreshold float64 // Intensity value from the KNN motion detection algorithm that is considered motion.
MOGHistory uint // Length of MOG filter's history
// If true will restart reading of input after an io.EOF.
Loop bool
// Defines the rate at which frames from a file source are processed.
FileFPS int
// Difference filter parameters.
DiffThreshold float64 // Intensity value from the Difference motion detection algorithm that is considered motion.
}
// TypeData contains information about all of the variables that
@ -310,8 +320,10 @@ var TypeData = map[string]string{
"CameraIP": "string",
"CBR": "bool",
"ClipDuration": "uint",
"DiffThreshold": "float",
"Exposure": "enum:auto,night,nightpreview,backlight,spotlight,sports,snow,beach,verylong,fixedfps,antishake,fireworks",
"Filters": "enums:NoOp,MOG,VariableFPS,KNN",
"FileFPS": "int",
"Filters": "enums:NoOp,MOG,VariableFPS,KNN,Difference",
"FrameRate": "uint",
"Height": "uint",
"HorizontalFlip": "bool",
@ -324,20 +336,21 @@ var TypeData = map[string]string{
"KNNMinArea": "float",
"KNNThreshold": "float",
"logging": "enum:Debug,Info,Warning,Error,Fatal",
"Loop": "bool",
"MinFPS": "float",
"MinFrames": "uint",
"mode": "enum:Normal,Paused,Burst",
"mode": "enum:Normal,Paused,Burst,Loop",
"MOGHistory": "uint",
"MOGMinArea": "float",
"MOGThreshold": "float",
"MotionInterval": "int",
"RBCapacity": "uint",
"RBMaxElements": "uint",
"RBWriteTimeout": "uint",
"Output": "enum:File,Http,Rtmp,Rtp",
"OutputPath": "string",
"Outputs": "enums:File,Http,Rtmp,Rtp",
"Quantization": "uint",
"RBCapacity": "uint",
"RBMaxElements": "uint",
"RBWriteTimeout": "uint",
"Rotation": "uint",
"RTMPURL": "string",
"RTPAddress": "string",
@ -521,6 +534,11 @@ func (c *Config) Validate() error {
}
}
if c.FileFPS <= 0 || (c.FileFPS > 0 && c.Input != InputFile) {
c.Logger.Log(logger.Info, pkg+"FileFPS bad or unset, defaulting", "FileFPS", defaultFileFPS)
c.FileFPS = defaultFileFPS
}
return nil
}

View File

@ -7,6 +7,7 @@ AUTHORS
Alan Noble <alan@ausocean.org>
Dan Kortschak <dan@ausocean.org>
Trek Hopton <trek@ausocean.org>
Scott Barnard <scott@ausocean.org>
LICENSE
revid is Copyright (C) 2017-2020 the Australian Ocean Lab (AusOcean)
@ -52,6 +53,7 @@ import (
"bitbucket.org/ausocean/av/filter"
"bitbucket.org/ausocean/av/revid/config"
"bitbucket.org/ausocean/iot/pi/netsender"
"bitbucket.org/ausocean/utils/bitrate"
"bitbucket.org/ausocean/utils/ioext"
"bitbucket.org/ausocean/utils/logger"
"bitbucket.org/ausocean/utils/vring"
@ -115,6 +117,9 @@ type Revid struct {
// err will channel errors from revid routines to the handle errors routine.
err chan error
// bitrate is used for bitrate calculations.
bitrate bitrate.Calculator
}
// New returns a pointer to a new Revid with the desired configuration, and/or
@ -148,10 +153,8 @@ func (r *Revid) handleErrors() {
}
// Bitrate returns the result of the most recent bitrate check.
//
// TODO: get this working again.
func (r *Revid) Bitrate() int {
return -1
return r.bitrate.Bitrate()
}
// reset swaps the current config of a Revid with the passed
@ -266,14 +269,14 @@ func (r *Revid) setupPipeline(mtsEnc func(dst io.WriteCloser, rate float64) (io.
return fmt.Errorf("could not initialise MTS ring buffer: %w", err)
}
w = newMtsSender(
newHttpSender(r.ns, r.cfg.Logger.Log),
newHttpSender(r.ns, r.cfg.Logger.Log, r.bitrate.Report),
r.cfg.Logger.Log,
rb,
r.cfg.ClipDuration,
)
mtsSenders = append(mtsSenders, w)
case config.OutputRTP:
w, err := newRtpSender(r.cfg.RTPAddress, r.cfg.Logger.Log, r.cfg.FrameRate)
w, err := newRtpSender(r.cfg.RTPAddress, r.cfg.Logger.Log, r.cfg.FrameRate, r.bitrate.Report)
if err != nil {
r.cfg.Logger.Log(logger.Warning, pkg+"rtp connect error", "error", err.Error())
}
@ -295,6 +298,7 @@ func (r *Revid) setupPipeline(mtsEnc func(dst io.WriteCloser, rate float64) (io.
rtmpConnectionMaxTries,
rb,
r.cfg.Logger.Log,
r.bitrate.Report,
)
if err != nil {
r.cfg.Logger.Log(logger.Warning, pkg+"rtmp connect error", "error", err.Error())
@ -342,6 +346,8 @@ func (r *Revid) setupPipeline(mtsEnc func(dst io.WriteCloser, rate float64) (io.
r.filters[i] = filter.NewVariableFPSFilter(dst, r.cfg.MinFPS, filter.NewMOGFilter(dst, r.cfg.MOGMinArea, r.cfg.MOGThreshold, int(r.cfg.MOGHistory), r.cfg.ShowWindows, r.cfg.MotionInterval))
case config.FilterKNN:
r.filters[i] = filter.NewKNNFilter(dst, r.cfg.KNNMinArea, r.cfg.KNNThreshold, int(r.cfg.KNNHistory), int(r.cfg.KNNKernel), r.cfg.ShowWindows, r.cfg.MotionInterval)
case config.FilterDifference:
r.filters[i] = filter.NewDifference(dst, r.cfg.ShowWindows, r.cfg.DiffThreshold)
default:
panic("Undefined Filter")
}
@ -428,13 +434,14 @@ func (r *Revid) Start() error {
return err
}
err = r.input.Start()
if err != nil {
return fmt.Errorf("could not start input device: %w", err)
// Calculate delay between frames based on FileFPS.
d := time.Duration(0)
if r.cfg.FileFPS != 0 {
d = time.Duration(1000/r.cfg.FileFPS) * time.Millisecond
}
r.wg.Add(1)
go r.processFrom(r.input, 0)
go r.processFrom(r.input, d)
r.running = true
return nil
@ -662,7 +669,7 @@ func (r *Revid) Update(vars map[string]string) error {
}
case "Filters":
filters := strings.Split(value, ",")
m := map[string]int{"NoOp": config.FilterNoOp, "MOG": config.FilterMOG, "VariableFPS": config.FilterVariableFPS, "KNN": config.FilterKNN}
m := map[string]int{"NoOp": config.FilterNoOp, "MOG": config.FilterMOG, "VariableFPS": config.FilterVariableFPS, "KNN": config.FilterKNN, "Difference": config.FilterDifference}
r.cfg.Filters = make([]int, len(filters))
for i, filter := range filters {
v, ok := m[filter]
@ -810,6 +817,13 @@ func (r *Revid) Update(vars map[string]string) error {
break
}
r.cfg.KNNThreshold = v
case "DiffThreshold":
v, err := strconv.ParseFloat(value, 64)
if err != nil {
r.cfg.Logger.Log(logger.Warning, pkg+"invalid DiffThreshold var", "value", value)
break
}
r.cfg.DiffThreshold = v
case "KNNKernel":
v, err := strconv.Atoi(value)
if err != nil {
@ -845,6 +859,18 @@ func (r *Revid) Update(vars map[string]string) error {
break
}
r.cfg.MOGHistory = uint(v)
case "FileFPS":
v, err := strconv.Atoi(value)
if err != nil {
r.cfg.Logger.Log(logger.Warning, pkg+"invalid FileFPS var", "value", value)
break
}
r.cfg.FileFPS = v
case "mode":
r.cfg.Loop = false
if value == "Loop" {
r.cfg.Loop = true
}
}
}
r.cfg.Logger.Log(logger.Info, pkg+"revid config changed", "config", fmt.Sprintf("%+v", r.cfg))
@ -853,14 +879,34 @@ func (r *Revid) Update(vars map[string]string) error {
// processFrom is run as a routine to read from a input data source, lex and
// then send individual access units to revid's encoders.
func (r *Revid) processFrom(read io.Reader, delay time.Duration) {
err := r.lexTo(r.filters[0], read, delay)
r.cfg.Logger.Log(logger.Debug, pkg+"finished lexing")
switch err {
case nil: // Do nothing.
case io.EOF: // TODO: handle this depending on loop mode.
default:
r.err <- err
func (r *Revid) processFrom(in device.AVDevice, delay time.Duration) {
defer r.wg.Done()
for l := true; l; l = r.cfg.Loop {
err := in.Start()
if err != nil {
r.err <- fmt.Errorf("could not start input device: %w", err)
return
}
// Lex data from input device, in, until finished or an error is encountered.
// For a continuous source e.g. a camera or microphone, we should remain
// in this call indefinitely unless in.Stop() is called and an io.EOF is forced.
r.cfg.Logger.Log(logger.Info, pkg+"lexing")
err = r.lexTo(r.filters[0], in, delay)
switch err {
case nil, io.EOF:
case io.ErrUnexpectedEOF:
r.cfg.Logger.Log(logger.Info, pkg+"unexpected EOF from input")
default:
r.err <- err
}
err = in.Stop()
if err != nil {
r.err <- fmt.Errorf("could not stop input source: %w", err)
}
}
r.wg.Done()
r.cfg.Logger.Log(logger.Info, pkg+"finished lexing")
}

View File

@ -61,19 +61,25 @@ const (
type httpSender struct {
client *netsender.Sender
log func(lvl int8, msg string, args ...interface{})
report func(sent int)
}
// newHttpSender returns a pointer to a new httpSender.
func newHttpSender(ns *netsender.Sender, log func(lvl int8, msg string, args ...interface{})) *httpSender {
func newHttpSender(ns *netsender.Sender, log func(lvl int8, msg string, args ...interface{}), report func(sent int)) *httpSender {
return &httpSender{
client: ns,
log: log,
report: report,
}
}
// Write implements io.Writer.
func (s *httpSender) Write(d []byte) (int, error) {
return len(d), httpSend(d, s.client, s.log)
err := httpSend(d, s.client, s.log)
if err == nil {
s.report(len(d))
}
return len(d), err
}
func (s *httpSender) Close() error { return nil }
@ -276,9 +282,10 @@ type rtmpSender struct {
ring *vring.Buffer
done chan struct{}
wg sync.WaitGroup
report func(sent int)
}
func newRtmpSender(url string, timeout uint, retries int, rb *vring.Buffer, log func(lvl int8, msg string, args ...interface{})) (*rtmpSender, error) {
func newRtmpSender(url string, timeout uint, retries int, rb *vring.Buffer, log func(lvl int8, msg string, args ...interface{}), report func(sent int)) (*rtmpSender, error) {
var conn *rtmp.Conn
var err error
for n := 0; n < retries; n++ {
@ -299,6 +306,7 @@ func newRtmpSender(url string, timeout uint, retries int, rb *vring.Buffer, log
log: log,
ring: rb,
done: make(chan struct{}),
report: report,
}
s.wg.Add(1)
go s.output()
@ -364,6 +372,7 @@ func (s *rtmpSender) Write(d []byte) (int, error) {
if err != nil {
s.log(logger.Warning, pkg+"rtmpSender: ring buffer write error", "error", err.Error())
}
s.report(len(d))
return len(d), nil
}
@ -404,9 +413,10 @@ type rtpSender struct {
log func(lvl int8, msg string, args ...interface{})
encoder *rtp.Encoder
data []byte
report func(sent int)
}
func newRtpSender(addr string, log func(lvl int8, msg string, args ...interface{}), fps uint) (*rtpSender, error) {
func newRtpSender(addr string, log func(lvl int8, msg string, args ...interface{}), fps uint, report func(sent int)) (*rtpSender, error) {
conn, err := net.Dial("udp", addr)
if err != nil {
return nil, err
@ -414,6 +424,7 @@ func newRtpSender(addr string, log func(lvl int8, msg string, args ...interface{
s := &rtpSender{
log: log,
encoder: rtp.NewEncoder(conn, int(fps)),
report: report,
}
return s, nil
}
@ -426,6 +437,7 @@ func (s *rtpSender) Write(d []byte) (int, error) {
if err != nil {
s.log(logger.Warning, pkg+"rtpSender: write error", err.Error())
}
s.report(len(d))
return len(d), nil
}