Commit 01b8230d authored by d.basulto's avatar d.basulto

reconnect functionality added

parent c9ad4f92
...@@ -7,136 +7,158 @@ ...@@ -7,136 +7,158 @@
* @note streamRecorder http://streamingmovil.radioformula.com:8000/m1033 3000 * @note streamRecorder http://streamingmovil.radioformula.com:8000/m1033 3000
*/ */
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
#include "StreamRecorder.h" #include "StreamRecorder.h"
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include <ctime>
#include <cmath> #include <cmath>
#include <sstream> #include <sstream>
#include <gst/gst.h>
#include <glib.h>
#include <unistd.h>
#include <cstring>
#include "FLAC/metadata.h" #include "FLAC/metadata.h"
#include "FLAC/stream_encoder.h" #include "FLAC/stream_encoder.h"
#include <cstring> #include <cstring>
using namespace std; using namespace std;
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* Constructor * Constructor
* @param source streaing url * @param source streaming url
* @param time split recording in segments of time seconds. * @param time split recording in segments of time seconds.
*/ */
StreamRecorder::StreamRecorder(const char* source, int time) StreamRecorder::StreamRecorder(const char* source, int time)
{ {
int nFrames = ceil(time*STREAMRECORDER_SAMPLERATE/READSIZE);
recordTime = nFrames*READSIZE/STREAMRECORDER_SAMPLERATE; int nFrames = ceil(time*STREAMRECORDER_SAMPLERATE/READSIZE);
cout << "record time: " << recordTime << endl; recordTime = nFrames*READSIZE/STREAMRECORDER_SAMPLERATE;
cout << "record time: " << recordTime << endl;
bufferSize=nFrames*READSIZE*STREAMRECORDER_BYTESPERSAMPLE;
audioBuffer = new unsigned char[bufferSize]; audioFileDuration = time;
memset(audioBuffer, 0, bufferSize); strcpy(pluginUri,source);
audioBufferPosition=audioBuffer; bufferSize=nFrames*READSIZE*STREAMRECORDER_BYTESPERSAMPLE;
audioBuffer = new unsigned char[bufferSize];
nBytes=0; // New buffer
isConnectionLost=false; memset(audioBuffer, 0, bufferSize);
audioBufferPosition=audioBuffer;
nBytes=0;
createMainPipeline();
connect(source); createMainPipeline();
connect();
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* Connect to the stream * Connect to the stream
* @param uri streaing uri * @param uri streaing uri
* @return unimplemented * @return unimplemented
*/ */
int StreamRecorder::connect(const char *uri) int StreamRecorder::connect()
{ {
disconnect(); disconnect();
cout << "connecting to " << uri << endl;
pluginUri = (char*)uri;
gst_element_set_state (mainPipeline, GST_STATE_NULL);
gst_element_set_state (mainPipeline, GST_STATE_READY);
g_object_set (G_OBJECT (streamSrc), "uri", uri, NULL);
gst_element_link (streamSrc, audioConvert);
gst_element_set_state (mainPipeline, GST_STATE_PLAYING); cout << "connecting to " << pluginUri << endl;
return 0;
gst_element_set_state (mainPipeline, GST_STATE_NULL);
gst_element_set_state (mainPipeline, GST_STATE_READY);
g_object_set (G_OBJECT (streamSrc), "uri", pluginUri, NULL);
gst_element_link (streamSrc, audioConvert);
gst_element_set_state (mainPipeline, GST_STATE_PLAYING);
return 0;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* disconnect from the stream * disconnect from the stream
* @return unimplemented * @return unimplemented
*/ */
int StreamRecorder::disconnect() int StreamRecorder::disconnect()
{ {
gst_element_unlink (streamSrc, audioConvert); gst_element_unlink (streamSrc, audioConvert);
gst_element_set_state (mainPipeline, GST_STATE_NULL); gst_element_set_state (mainPipeline, GST_STATE_NULL);
return 0; return 0;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* Create main pipeline * Create main pipeline
* @return 0 on success else on error * @return 0 on success else on error
*/ */
int StreamRecorder::createMainPipeline() int StreamRecorder::createMainPipeline()
{ {
mainPipeline = gst_pipeline_new("stream-recorder"); mainPipeline = gst_pipeline_new("stream-recorder");
GstBus* bus;
bus = gst_pipeline_get_bus(GST_PIPELINE (mainPipeline));
gst_bus_add_watch(bus, bus_callback, this);
gst_object_unref(GST_OBJECT (bus));
streamSrc = gst_element_factory_make("uridecodebin", "stream_source");
audioConvert = gst_element_factory_make ("audioconvert", "audio_convert");
//audioResample = gst_element_factory_make ("audioresample", "audio_resample");
filterCaps = gst_element_factory_make("capsfilter", "filter_cap");
GstCaps *fc = gst_caps_new_full(gst_structure_new ("audio/x-raw",
"channels", G_TYPE_INT, 1,
"rate", G_TYPE_INT, STREAMRECORDER_SAMPLERATE,
"format", G_TYPE_STRING, "S16LE",
"signed", G_TYPE_BOOLEAN, 1, //MUST BE SIGNED
"endianness", G_TYPE_INT, 1234,
NULL),
NULL);
g_object_set(G_OBJECT (filterCaps), "caps", fc, NULL);
queue0 = gst_element_factory_make("queue", "queue0");
filter = gst_element_factory_make("identity", "audio_filter");
g_signal_connect(filter, "handoff", G_CALLBACK (filter_handoff_callback), this);
queue1 = gst_element_factory_make("queue", "queue1");
fakeSink = gst_element_factory_make("fakesink", "fake_sink");
//audioSink = gst_element_factory_make("autoaudiosink", "speaker");
gst_bin_add_many (GST_BIN (mainPipeline), streamSrc, audioConvert, filterCaps, queue0, filter,queue1, fakeSink, NULL);
if(!gst_element_link_many(audioConvert, filterCaps, queue0, filter, queue1, fakeSink, NULL))
{
//gst_bin_add_many (GST_BIN (mainPipeline), streamSrc, audioConvert, filterCaps, queue0, filter, queue1, audioSink, NULL);
// if(!gst_element_link_many(audioConvert, filterCaps, queue0, filter, queue1, audioSink, NULL)){
cerr << "mainPipeline: Failed to link elements in the pipeline" << endl;
exit(0);
}
g_signal_connect(streamSrc, "pad-added", G_CALLBACK(srcNewPad_callback), this); GstBus* bus;
gst_element_set_state (mainPipeline, GST_STATE_NULL); bus = gst_pipeline_get_bus(GST_PIPELINE (mainPipeline));
gst_bus_add_watch(bus, bus_callback, this);
return 0; gst_object_unref(GST_OBJECT (bus));
streamSrc = gst_element_factory_make("uridecodebin", "stream_source");
audioConvert = gst_element_factory_make ("audioconvert", "audio_convert");
//audioResample = gst_element_factory_make ("audioresample", "audio_resample");
filterCaps = gst_element_factory_make("capsfilter", "filter_cap");
GstCaps *fc = gst_caps_new_full(gst_structure_new ("audio/x-raw",
"channels", G_TYPE_INT, 1,
"rate", G_TYPE_INT, STREAMRECORDER_SAMPLERATE,
"format", G_TYPE_STRING, "S16LE",
"signed", G_TYPE_BOOLEAN, 1, //MUST BE SIGNED
"endianness", G_TYPE_INT, 1234,
NULL),
NULL);
g_object_set(G_OBJECT (filterCaps), "caps", fc, NULL);
queue0 = gst_element_factory_make("queue", "queue0");
filter = gst_element_factory_make("identity", "audio_filter");
g_signal_connect(filter, "handoff", G_CALLBACK (filter_handoff_callback), this);
queue1 = gst_element_factory_make("queue", "queue1");
fakeSink = gst_element_factory_make("fakesink", "fake_sink");
//audioSink = gst_element_factory_make("autoaudiosink", "speaker");
gst_bin_add_many (GST_BIN (mainPipeline), streamSrc, audioConvert, filterCaps, queue0, filter,queue1, fakeSink, NULL);
if(!gst_element_link_many(audioConvert, filterCaps, queue0, filter, queue1, fakeSink, NULL))
{
//gst_bin_add_many (GST_BIN (mainPipeline), streamSrc, audioConvert, filterCaps, queue0, filter, queue1, audioSink, NULL);
// if(!gst_element_link_many(audioConvert, filterCaps, queue0, filter, queue1, audioSink, NULL)){
cerr << "mainPipeline: Failed to link elements in the pipeline" << endl;
exit(0);
}
g_signal_connect(streamSrc, "pad-added", G_CALLBACK(srcNewPad_callback), this);
gst_element_set_state (mainPipeline, GST_STATE_NULL);
return 0;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
gboolean StreamRecorder::reconnectURIStream(void *instance)
/**
*
* @param class instance
* @return False if an error occurs
*/
gboolean StreamRecorder::reconnectURIStream(void* instance)
{ {
cout << "reconnectURIStream" << endl; cout << "\nTrying to reconnect with the stream..." << endl << endl;
((StreamRecorder*)instance)->connect(((StreamRecorder*)instance)->pluginUri); ((StreamRecorder*)instance)->connect();
return FALSE;
return FALSE;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* disconnect from the stream * disconnect from the stream
* @param the GstBus that sent the message * @param the GstBus that sent the message
...@@ -146,52 +168,149 @@ gboolean StreamRecorder::reconnectURIStream(void *instance) ...@@ -146,52 +168,149 @@ gboolean StreamRecorder::reconnectURIStream(void *instance)
*/ */
int StreamRecorder::bus_callback (GstBus *bus, GstMessage *message, void *user_data) int StreamRecorder::bus_callback (GstBus *bus, GstMessage *message, void *user_data)
{ {
//printf("StreamRecorder got %s message\n", GST_MESSAGE_TYPE_NAME (message)); //printf("StreamRecorder got %s message\n", GST_MESSAGE_TYPE_NAME (message));
if(GST_MESSAGE_TYPE (message) == GST_MESSAGE_EOS) if(GST_MESSAGE_TYPE (message) == GST_MESSAGE_EOS)
{ {
((StreamRecorder*)user_data)->isConnectionLost = true; ((StreamRecorder*)user_data)->isConnectionLost = true;
} }
switch (GST_MESSAGE_TYPE (message))
{ switch (GST_MESSAGE_TYPE (message))
case GST_MESSAGE_EOS: {
cout << "End of stream" << endl; case GST_MESSAGE_EOS:
cout << "End sometimes src" << endl; cout << "** End of stream **\n" << endl;
break;
if(((StreamRecorder*)user_data)->isConnectionLost) //Enter only if the connection is lost
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error; // ----------------------------------------------------------------------------------------
gst_message_parse_error (message, &error, &debug);
g_free (debug); long int actualRecordTime;
long int currentTime = time(NULL);
cerr << "Error: "<< error->message << endl; //Print specific error
g_error_free (error); //cout << "Audio start time (timestamp): " << ((StreamRecorder*)user_data)->timestamp << endl;
//cout << "Actual time (timestamp): " << currentTime << endl;
actualRecordTime = currentTime-((StreamRecorder*)user_data)->timestamp;
cout << "Record time: " << actualRecordTime << endl;
// ----------------------------------------------------------------------------------------
/** Record time is greater than th required */
if(actualRecordTime >= ((StreamRecorder*)user_data)->audioFileDuration)
{
cout << "Bytes readed: " << ((StreamRecorder*)user_data)->nBytes << endl;
saveBuffer(user_data);
}
else
{
/** Moves the pointer to the position corresponding to the difference of the timestamps */
long int bytesToAdd = actualRecordTime*READSIZE*STREAMRECORDER_BYTESPERSAMPLE*10;
cout << "-------------------------------------" << endl;
cout << "Actual Bytes number: " << "(" << ((StreamRecorder*)user_data)->nBytes << ") + ";
cout << "Bytes t/add: " << "(" << bytesToAdd << ")" << endl;
/** Update the pointer and the bytes number */
((StreamRecorder*)user_data)->nBytes+=bytesToAdd;
((StreamRecorder*)user_data)->audioBufferPosition+=bytesToAdd;
cout << "Result : " << ((StreamRecorder*)user_data)->nBytes << endl;
cout << "Buffer size: " << ((StreamRecorder*)user_data)->bufferSize << endl;
cout << "-------------------------------------" << endl;
}
((StreamRecorder*)user_data)->isConnectionLost = false;
/** Try reconnect with the radio stream*/
g_timeout_add(5*1000, reconnectURIStream, user_data);
}
break;
case GST_MESSAGE_ERROR:
gchar *debug;
GError *error;
gst_message_parse_error (message, &error, &debug);
g_free (debug);
strcpy(((StreamRecorder*)user_data)->errorMessage,error->message);
/** The message doesn't contains null character*/
((StreamRecorder*)user_data)->errorMessage[ERROR_MSG_SIZE-1] = '\0';
g_timeout_add(60*1000, reconnectURIStream, user_data); //Try to reconnect with the uri stream cerr << "Error: "<< ((StreamRecorder*)user_data)->errorMessage << endl << endl;
g_error_free (error);
if(((StreamRecorder*)user_data)->isConnectionLost) //Enter only if the connection is lost if(strcmp(((StreamRecorder*)user_data)->errorMessage, "Stream doesn't contain enough data.") == 0)
{ {
savePartialBuffer(user_data);
((StreamRecorder*)user_data)->isConnectionLost = false; /** Not audio stream received */
} if(((StreamRecorder*)user_data)->timestamp == 0)
break; {
g_timeout_add(5*1000, reconnectURIStream, user_data);
default: }
break; else
} {
return TRUE;
long int actualRecordTime;
long int currentTime = time(NULL);
//cout << "Audio start time (timestamp): " << ((StreamRecorder *) user_data)->timestamp << endl;
//cout << "Actual time (timestamp): " << currentTime << endl;
actualRecordTime = currentTime - ((StreamRecorder *) user_data)->timestamp;
cout << "Record time: " << actualRecordTime << endl;
/** Record time is greater than th required */
if (actualRecordTime >= ((StreamRecorder *) user_data)->audioFileDuration)
{
cout << "Bytes readed: " << ((StreamRecorder *) user_data)->nBytes << endl;
saveBuffer(user_data);
}
else
{
/** Moves the pointer to the position corresponding to the difference of the timestamps */
long int bytesToAdd = actualRecordTime * READSIZE * STREAMRECORDER_BYTESPERSAMPLE * 10;
cout << "-------------------------------------" << endl;
cout << "Actual Bytes number: " << "(" << ((StreamRecorder *) user_data)->nBytes << ") + ";
cout << "Bytes t/add: " << "(" << bytesToAdd << ")" << endl;
/** Update the pointer and the bytes number */
((StreamRecorder *) user_data)->nBytes += bytesToAdd;
((StreamRecorder *) user_data)->audioBufferPosition += bytesToAdd;
cout << "Result : " << ((StreamRecorder *) user_data)->nBytes << endl;
cout << "Buffer size: " << ((StreamRecorder *) user_data)->bufferSize << endl;
cout << "-------------------------------------" << endl;
}
((StreamRecorder*)user_data)->isConnectionLost = false;
/** Try reconnect with the radio stream*/
g_timeout_add(5*1000, reconnectURIStream, user_data);
}
}
break;
default:
break;
}
return TRUE;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
void StreamRecorder::savePartialBuffer(void *user_data) void StreamRecorder::saveBuffer(void *user_data)
{ {
int missingBytes = ((StreamRecorder*)user_data)->bufferSize - ((StreamRecorder*)user_data)->nBytes; int missingBytes = ((StreamRecorder*)user_data)->bufferSize - ((StreamRecorder*)user_data)->nBytes;
((StreamRecorder*)user_data)->audioBufferPosition+=missingBytes; //((StreamRecorder*)user_data)->audioBufferPosition+=missingBytes;
((StreamRecorder*)user_data)->nBytes+=missingBytes;
((StreamRecorder*)user_data)->compressBuffer();
((StreamRecorder*)user_data)->audioBufferPosition=((StreamRecorder*)user_data)->audioBuffer;
memset (((StreamRecorder*)user_data)->audioBuffer, 0, ((StreamRecorder*)user_data)->bufferSize);
((StreamRecorder*)user_data)->nBytes=0;
((StreamRecorder*)user_data)->compressBuffer();
((StreamRecorder*)user_data)->audioBufferPosition=((StreamRecorder*)user_data)->audioBuffer;
memset (((StreamRecorder*)user_data)->audioBuffer, 0, ((StreamRecorder*)user_data)->bufferSize);
((StreamRecorder*)user_data)->nBytes=0;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
...@@ -203,149 +322,57 @@ void StreamRecorder::savePartialBuffer(void *user_data) ...@@ -203,149 +322,57 @@ void StreamRecorder::savePartialBuffer(void *user_data)
void StreamRecorder::srcNewPad_callback(GstElement *element, GstPad *pad, void *data) void StreamRecorder::srcNewPad_callback(GstElement *element, GstPad *pad, void *data)
{ {
cout << gst_element_get_name(element)<< " adding pad.." << gst_pad_get_name (pad) << endl; cout << gst_element_get_name(element)<< " adding pad.." << gst_pad_get_name (pad) << endl;
cout << "Pad Name: " << gst_pad_get_name (pad) << endl; cout << "Pad Name: " << gst_pad_get_name (pad) << endl;
GstPad *sinkpad; GstPad *sinkpad;
//gst_pad_get_caps is for gst v0.1 //gst_pad_get_caps is for gst v0.1
// GstCaps *new_pad_caps = gst_pad_get_caps (pad); // GstCaps *new_pad_caps = gst_pad_get_caps (pad);
GstCaps *new_pad_caps = gst_pad_query_caps(pad, NULL); GstCaps *new_pad_caps = gst_pad_query_caps(pad, NULL);
GstStructure *new_pad_struct = gst_caps_get_structure (new_pad_caps, 0); GstStructure *new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
const gchar *new_pad_type = gst_structure_get_name (new_pad_struct); const gchar *new_pad_type = gst_structure_get_name (new_pad_struct);
if (g_str_has_prefix (new_pad_type, "audio/x-raw")) if (g_str_has_prefix (new_pad_type, "audio/x-raw"))
{ {
cout << "linking " << new_pad_type << endl; cout << "linking " << new_pad_type << endl;
GstElement *nextElement = ((StreamRecorder*)data)->audioConvert; GstElement *nextElement = ((StreamRecorder*)data)->audioConvert;
sinkpad = gst_element_get_static_pad (nextElement, "sink"); sinkpad = gst_element_get_static_pad (nextElement, "sink");
if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad))) if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad)))
{ {
cerr << "Type is "<< new_pad_type <<" but link failed." << endl; cerr << "Type is "<< new_pad_type <<" but link failed." << endl;
exit(0); exit(0);
} }
else else
cout <<"Link succeeded " << new_pad_type << endl; cout <<"Link succeeded " << new_pad_type << endl;
} }
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* Save audio data (audioBuffer) in flac format * CallBack for handoff signal of identity filter
* @param filter Identity filter
* @param buffer The buffer that just has been received
* @param user_data this
* @return unimplemented * @return unimplemented
*/ */
int StreamRecorder::compressBuffer() int StreamRecorder::filter_handoff_callback(GstElement* filter, GstBuffer* buffer, void* user_data)
{ {
GstMapInfo info;
if(!gst_buffer_map (buffer, &info, GST_MAP_READ))
cout << "ERROR: MAPPING IS NOT VALID" << endl;
long int currentTime = time(NULL); //GST_BUFFER_DATA is for gst v0.1
stringstream ss; // ((StreamRecorder*)user_data)->addToBuffer((unsigned char*)GST_BUFFER_DATA (buffer));
string fileNameStr, currentTimeStr;
ss << currentTime << endl; // user data is the class
getline(ss, currentTimeStr); ((StreamRecorder*)user_data)->addToBuffer((unsigned char*)info.data, info.size);
gst_buffer_unmap (buffer, &info);
ss << currentTimeStr << ".flac" << endl; return 0;
getline(ss, fileNameStr);
int readsize = READSIZE;
FLAC__bool ok = true;
FLAC__StreamEncoder *encoder = 0;
FLAC__StreamMetadata *metadata[2];
FLAC__StreamMetadata_VorbisComment_Entry entry;
FLAC__StreamEncoderInitStatus init_status;
/* allocate the encoder */
if((encoder = FLAC__stream_encoder_new()) == NULL)
{
fprintf(stderr, "ERROR: allocating encoder\n");
return 1;
}
ok &= FLAC__stream_encoder_set_verify(encoder, true);
ok &= FLAC__stream_encoder_set_compression_level(encoder, 5);
ok &= FLAC__stream_encoder_set_channels(encoder, 1);
ok &= FLAC__stream_encoder_set_bits_per_sample(encoder, STREAMRECORDER_BYTESPERSAMPLE*8);
ok &= FLAC__stream_encoder_set_sample_rate(encoder, STREAMRECORDER_SAMPLERATE);
ok &= FLAC__stream_encoder_set_total_samples_estimate(encoder, nBytes/STREAMRECORDER_BYTESPERSAMPLE);
/* now add some metadata; we'll add some tags and a padding block */
if(ok)
{
if(
(metadata[0] = FLAC__metadata_object_new(FLAC__METADATA_TYPE_VORBIS_COMMENT)) == NULL ||
(metadata[1] = FLAC__metadata_object_new(FLAC__METADATA_TYPE_PADDING)) == NULL ||
/* there are many tag (vorbiscomment) functions but these are convenient for this particular use: */
!FLAC__metadata_object_vorbiscomment_entry_from_name_value_pair(&entry, "ARTIST", "artist") ||
!FLAC__metadata_object_vorbiscomment_append_comment(metadata[0], entry, /*copy=*/false) || /* copy=false: let metadata object take control of entry's allocated string */
!FLAC__metadata_object_vorbiscomment_entry_from_name_value_pair(&entry, "YEAR", "year") ||
!FLAC__metadata_object_vorbiscomment_append_comment(metadata[0], entry, /*copy=*/false)
)
{
fprintf(stderr, "ERROR: out of memory or tag error\n");
ok = false;
}
metadata[1]->length = 1234; /* set the padding length */
ok = FLAC__stream_encoder_set_metadata(encoder, metadata, 2);
}
/* initialize encoder */
if(ok)
{
init_status = FLAC__stream_encoder_init_file(encoder, fileNameStr.c_str(), NULL, /*client_data=*/NULL);
if(init_status != FLAC__STREAM_ENCODER_INIT_STATUS_OK)
{
fprintf(stderr, "ERROR: initializing encoder: %s\n", FLAC__StreamEncoderInitStatusString[init_status]);
ok = false;
}
}
int channels = 1;
// int bps = 16;
unsigned char* audioBufferTmp = audioBuffer;
unsigned char* buffer = new unsigned char[readsize*STREAMRECORDER_BYTESPERSAMPLE*channels];
int* pcm = new int[readsize*channels];
/* read blocks of samples from WAVE file and feed to encoder */
if(ok)
{
int left = nBytes;
while(left&&ok)
{
int need = (left>readsize? readsize : left);
memcpy(buffer, audioBufferTmp, need);//*sizeof(short));
audioBufferTmp+=need;//*sizeof(short);
// /* convert the packed little-endian 16-bit PCM samples from WAVE into an interleaved FLAC__int32 buffer for libFLAC */
for(int i=0; i < need*channels; i++)
{
/* inefficient but simple and works on big- or little-endian machines */
pcm[i] = (FLAC__int32)(((FLAC__int16)(FLAC__int8)buffer[2*i+1] << 8) | (FLAC__int16)buffer[2*i]);
}
/* feed samples to encoder */
ok = FLAC__stream_encoder_process_interleaved(encoder, pcm, need/STREAMRECORDER_BYTESPERSAMPLE);
left -= need;
}
}
else
cout << "-ERROR-" << endl;
ok &= FLAC__stream_encoder_finish(encoder);
fprintf(stderr, "encoding: %s\n", ok? "succeeded" : "FAILED");
fprintf(stderr, " state: %s\n", FLAC__StreamEncoderStateString[FLAC__stream_encoder_get_state(encoder)]);
/* now that encoding is finished, the metadata can be freed */
FLAC__metadata_object_delete(metadata[0]);
FLAC__metadata_object_delete(metadata[1]);
FLAC__stream_encoder_delete(encoder);
delete[] buffer;
delete[] pcm;
return 0;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* Add audio data to audioBuffer * Add audio data to audioBuffer
...@@ -355,47 +382,171 @@ int StreamRecorder::compressBuffer() ...@@ -355,47 +382,171 @@ int StreamRecorder::compressBuffer()
*/ */
int StreamRecorder::addToBuffer(unsigned char* data, int length) { int StreamRecorder::addToBuffer(unsigned char* data, int length) {
//cout << "addToBuffer(" << length << ")" << endl; int bytesRead = length;// READSIZE*STREAMRECORDER_BYTESPERSAMPLE;
int bytesRead = length;// READSIZE*STREAMRECORDER_BYTESPERSAMPLE; long int currentTime;
long int actualRecordTime;
memcpy((char*)audioBufferPosition, (char*)data, bytesRead); bool isNewAudioFile;
cout << *audioBufferPosition << endl;
/** Useful for obtain the filename*/
audioBufferPosition+=bytesRead; isNewAudioFile = nBytes == 0;
nBytes+=bytesRead;//READSIZE;
cout << "Bytes readed " << nBytes << endl; cout << "Data size: " << bytesRead << endl;
cout << "Buffer size " << bufferSize << endl;
//if(nBytes < bufferSize){
if(nBytes >= bufferSize) /** Update pointer*/
{ memcpy((char*)audioBufferPosition, (char*)data, bytesRead);
compressBuffer(); nBytes+=bytesRead;
audioBufferPosition=audioBuffer; audioBufferPosition+=bytesRead;
memset (audioBuffer, 0, bufferSize); //}
nBytes=0;
} if(isNewAudioFile)
{
return nBytes; cout << "New audio stream" << endl;
/** filename */
timestamp = time(NULL);
cout << "Audio filename (timestamp): " << timestamp << endl;
}
else
{
currentTime = time(NULL);
actualRecordTime = currentTime-timestamp;
cout << "Record time: " << actualRecordTime << endl;
cout << "Bytes readed " << nBytes << endl;
cout << "Buffer size " << bufferSize << endl;
/** If the buffer is full, save it in flac file */
if(nBytes >= bufferSize)
{
compressBuffer();
audioBufferPosition=audioBuffer;
memset (audioBuffer, 0, bufferSize);
nBytes=0;
}
}
return nBytes;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** /**
* CallBack for handoff signal of identity filter * Save audio data (audioBuffer) in flac format
* @param filter Identity filter
* @param buffer The buffer that just has been received
* @param user_data this
* @return unimplemented * @return unimplemented
*/ */
int StreamRecorder::filter_handoff_callback(GstElement* filter, GstBuffer* buffer, void* user_data) int StreamRecorder::compressBuffer()
{ {
GstMapInfo info;
if(!gst_buffer_map (buffer, &info, GST_MAP_READ)) //long int currentTime = time(NULL);
cout << "ERROR: MAPPING IS NOT VALID" << endl; stringstream ss;
//GST_BUFFER_DATA is for gst v0.1 string fileNameStr, currentTimeStr;
// ((StreamRecorder*)user_data)->addToBuffer((unsigned char*)GST_BUFFER_DATA (buffer)); ss << timestamp << endl;
getline(ss, currentTimeStr);
// user data is the class
((StreamRecorder*)user_data)->addToBuffer((unsigned char*)info.data, info.size); ss << currentTimeStr << ".flac" << endl;
gst_buffer_unmap (buffer, &info); getline(ss, fileNameStr);
return 0;
// Restart timestamp;
timestamp = 0;
int readsize = READSIZE;
FLAC__bool ok = true;
FLAC__StreamEncoder *encoder = 0;
FLAC__StreamMetadata *metadata[2];
FLAC__StreamMetadata_VorbisComment_Entry entry;
FLAC__StreamEncoderInitStatus init_status;
/* allocate the encoder */
if((encoder = FLAC__stream_encoder_new()) == NULL)
{
fprintf(stderr, "ERROR: allocating encoder\n");
return 1;
}
ok &= FLAC__stream_encoder_set_verify(encoder, true);
ok &= FLAC__stream_encoder_set_compression_level(encoder, 5);
ok &= FLAC__stream_encoder_set_channels(encoder, 1);
ok &= FLAC__stream_encoder_set_bits_per_sample(encoder, STREAMRECORDER_BYTESPERSAMPLE*8);
ok &= FLAC__stream_encoder_set_sample_rate(encoder, STREAMRECORDER_SAMPLERATE);
ok &= FLAC__stream_encoder_set_total_samples_estimate(encoder, nBytes/STREAMRECORDER_BYTESPERSAMPLE);
/* now add some metadata; we'll add some tags and a padding block */
if(ok)
{
if(
(metadata[0] = FLAC__metadata_object_new(FLAC__METADATA_TYPE_VORBIS_COMMENT)) == NULL ||
(metadata[1] = FLAC__metadata_object_new(FLAC__METADATA_TYPE_PADDING)) == NULL ||
/* there are many tag (vorbiscomment) functions but these are convenient for this particular use: */
!FLAC__metadata_object_vorbiscomment_entry_from_name_value_pair(&entry, "ARTIST", "artist") ||
!FLAC__metadata_object_vorbiscomment_append_comment(metadata[0], entry, /*copy=*/false) || /* copy=false: let metadata object take control of entry's allocated string */
!FLAC__metadata_object_vorbiscomment_entry_from_name_value_pair(&entry, "YEAR", "year") ||
!FLAC__metadata_object_vorbiscomment_append_comment(metadata[0], entry, /*copy=*/false)
)
{
fprintf(stderr, "ERROR: out of memory or tag error\n");
ok = false;
}
metadata[1]->length = 1234; /* set the padding length */
ok = FLAC__stream_encoder_set_metadata(encoder, metadata, 2);
}
/* initialize encoder */
if(ok)
{
init_status = FLAC__stream_encoder_init_file(encoder, fileNameStr.c_str(), NULL, /*client_data=*/NULL);
if(init_status != FLAC__STREAM_ENCODER_INIT_STATUS_OK)
{
fprintf(stderr, "ERROR: initializing encoder: %s\n", FLAC__StreamEncoderInitStatusString[init_status]);
ok = false;
}
}
int channels = 1;
// int bps = 16;
unsigned char* audioBufferTmp = audioBuffer;
unsigned char* buffer = new unsigned char[readsize*STREAMRECORDER_BYTESPERSAMPLE*channels];
int* pcm = new int[readsize*channels];
/* read blocks of samples from WAVE file and feed to encoder */
if(ok)
{
int left = nBytes;
while(left&&ok)
{
int need = (left>readsize? readsize : left);
memcpy(buffer, audioBufferTmp, need);//*sizeof(short));
audioBufferTmp+=need;//*sizeof(short);
// /* convert the packed little-endian 16-bit PCM samples from WAVE into an interleaved FLAC__int32 buffer for libFLAC */
for(int i=0; i < need*channels; i++)
{
/* inefficient but simple and works on big- or little-endian machines */
pcm[i] = (FLAC__int32)(((FLAC__int16)(FLAC__int8)buffer[2*i+1] << 8) | (FLAC__int16)buffer[2*i]);
}
/* feed samples to encoder */
ok = FLAC__stream_encoder_process_interleaved(encoder, pcm, need/STREAMRECORDER_BYTESPERSAMPLE);
left -= need;
}
}
else
cout << "-ERROR-" << endl;
ok &= FLAC__stream_encoder_finish(encoder);
fprintf(stderr, "encoding: %s\n", ok? "succeeded" : "FAILED");
fprintf(stderr, " state: %s\n", FLAC__StreamEncoderStateString[FLAC__stream_encoder_get_state(encoder)]);
/* now that encoding is finished, the metadata can be freed */
FLAC__metadata_object_delete(metadata[0]);
FLAC__metadata_object_delete(metadata[1]);
FLAC__stream_encoder_delete(encoder);
delete[] buffer;
delete[] pcm;
return 0;
} }
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
...@@ -11,62 +11,83 @@ ...@@ -11,62 +11,83 @@
#ifndef STREAMRECORDER_H #ifndef STREAMRECORDER_H
#define STREAMRECORDER_H #define STREAMRECORDER_H
// your public header include
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** Your public header include */
#include <gst/gst.h> #include <gst/gst.h>
#include <jmorecfg.h> #include <jmorecfg.h>
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
#define STREAMRECORDER_SAMPLERATE 44100 #define STREAMRECORDER_SAMPLERATE 44100
#define READSIZE 1152 //For MPEG1, frame_size = 1152 samples/frame #define READSIZE 1152 //For MPEG1, frame_size = 1152 samples/frame
#define STREAMRECORDER_BYTESPERSAMPLE 2 #define STREAMRECORDER_BYTESPERSAMPLE 2
// the declaration of your class...
#define ERROR_MSG_SIZE 50
#define DST_URI_SIZE 80
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
/** Class declaration */
class StreamRecorder class StreamRecorder
{ {
private: private:
unsigned char* audioBuffer;
unsigned char* audioBufferPosition;
char errorMessage[ERROR_MSG_SIZE];
char pluginUri[DST_URI_SIZE];
unsigned int nBytes;
unsigned int bufferSize;
unsigned int nBytes; int recordTime;
unsigned int bufferSize; int audioFileDuration;
unsigned char* audioBuffer;
unsigned char* audioBufferPosition; /** Audio filename */
int recordTime; long int timestamp = 0;
bool isConnectionLost; bool isConnectionLost;
char * pluginUri; //char* sourceName;
//char* sourceName; //GstElement* audioResample;
GstElement* streamSrc; //GstElement* tempBin;
GstElement* audioConvert; //GstElement* audioSink;
//GstElement* audioResample;
GstElement* filterCaps;
GstElement* queue0;
GstElement* queue1;
GstElement* filter;
GstElement* fakeSink;
//GstElement* audioSink;
GstElement* mainPipeline;
//GstElement* tempBin;
int createMainPipeline(); GstElement* streamSrc;
int connect(const char *uri); GstElement* audioConvert;
int disconnect(); GstElement* filterCaps;
GstElement* queue0;
GstElement* queue1;
GstElement* filter;
GstElement* fakeSink;
GstElement* mainPipeline;
// callbacks int createMainPipeline();
int connect();
int disconnect();
/** add data to buffer */
int addToBuffer(unsigned char* data, int length);
int compressBuffer();
/** plugin's callbacks */
static void srcNewPad_callback(GstElement *element, GstPad *pad, void *data); static void srcNewPad_callback(GstElement *element, GstPad *pad, void *data);
static int bus_callback(GstBus *bus, GstMessage *message, void *data); static int bus_callback(GstBus *bus, GstMessage *message, void *data);
static int filter_handoff_callback(GstElement* filter, GstBuffer* buffer, void* user_data); static int filter_handoff_callback(GstElement *filter, GstBuffer* buffer, void* user_data);
// add data to buffer /** Save audio*/
int addToBuffer(unsigned char* data, int length); static void saveBuffer(void *user_data);
int compressBuffer();
/** Restart the pipeline */
static gboolean reconnectURIStream(void* data);
// Save information when connection fails public:
static void savePartialBuffer(void *user_data);
// Restart the pipeline
static gboolean reconnectURIStream(void *data);
public: StreamRecorder(const char* source, int time);
StreamRecorder(const char* source, int time);
}; };
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
#endif #endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment