Creating a gstreamer pipeline with the intent of modifying parts - android

I'm working on an audio streamer, and I want to be able to modify the file I'm streaming, and also the target I'm streaming to. To do this I would modify the location for my filesrc, or I would modify the host/port of my udpsink.
I am having trouble understanding everything I need to know to get this pipeline linked together and playing. Previously I hard coded everything and used the gst pipeline parsing tool with this pipeline:
filesrc location=/storage/sdcard0/Music/RunToTheHills.ogg ! oggdemux ! vorbisdec ! audioresample ! audioconvert ! audio/x-raw-int,channels=2,depth=16,width=16,rate=44100 ! rtpL16pay ! udpsink host=192.168.100.126 port=9001
Now I want to change the filesrc location, and udp host/port as mentioned above.
My application is an Android app using the NDK. However, this should not effect the code needed to set up a pipeline.
Here's what I've got so far, which results in a segfault.
My data structure:
/**
* Structure to hold all the various variables we need.
* This is handed to callbacks
*/
typedef struct _CustomData {
jobject app; /* The Java app */
GstElement *pipeline; /* gStreamer pipeline */
GstElement *filesrc; /* Input file */
GstPad *fileblock; /* Used to block filesrc */
GstElement *ogg; /* Ogg demultiplexer */
GstElement *vorbis; /* Vorbis decoder */
GstElement *resample;
GstElement *convert;
GstCaps *caps;
GstElement *rtp; /* RTP packer */
GstElement *udp; /* UDP sender */
GMainContext *context; /* GLib Context */
GMainLoop *main_loop; /* GLib main loop */
gboolean initialised; /* True after initialisation */
GstState state; /* Pipeline state */
GstState target_state; /* What state we want to put the pipeline into */
gint64 duration; /* Clip length */
gint64 desired_position; /* Where we want to track to within the clip */
GstClockTime last_seek_time; /* Used to throttle seeking */
gboolean is_live; /* Live streams don't need buffering */
} CustomData;
And here's my creation of the pipeline:
data->pipeline = gst_pipeline_new("pipeline");
data->filesrc = gst_element_factory_make("filesrc", NULL);
if (!data->filesrc) {
GST_ERROR("Failed to create filesrc.");
return NULL;
}
g_object_set(G_OBJECT(data->filesrc), "location", "/storage/sdcard0/Music/RunToTheHills.ogg", NULL);
data->fileblock = gst_element_get_static_pad(data->filesrc, "src");
data->ogg = gst_element_factory_make("oggdemux", NULL);
if (!data->ogg) {
GST_ERROR("Failed to create oggdemux.");
return NULL;
}
data->vorbis = gst_element_factory_make("vorbisdec", NULL);
if (!data->vorbis) {
GST_ERROR("Failed to create vorbisdec.");
return NULL;
}
data->resample = gst_element_factory_make("audioresample", NULL);
if (!data->resample) {
GST_ERROR("Failed to create audioresample.");
return NULL;
}
data->convert = gst_element_factory_make("audioconvert", NULL);
if (!data->convert) {
GST_ERROR("Failed to create audioconvert.");
return NULL;
}
data->caps = gst_caps_new_simple("audio/x-raw-int",
"channels", G_TYPE_INT, 2,
"depth", G_TYPE_INT, 16,
"width", G_TYPE_INT, 16,
"rate", G_TYPE_INT, 44100);
if (!data->caps) {
GST_ERROR("Failed to create caps");
return NULL;
}
data->rtp = gst_element_factory_make("rtpL16pay", NULL);
if (!data->rtp) {
GST_ERROR("Failed to create rtpL16pay.");
return NULL;
}
data->udp = gst_element_factory_make("udpsink", NULL);
if (!data->udp) {
GST_ERROR("Failed to create udpsink.");
return NULL;
}
g_object_set(G_OBJECT(data->udp), "host", "192.168.100.126", NULL);
g_object_set(G_OBJECT(data->udp), "port", 9001, NULL);
if (!data->ogg || !data->vorbis || !data->resample || !data->convert || !data->caps || !data->rtp || !data->udp) {
GST_ERROR("Unable to create all elements!");
return NULL;
}
gst_bin_add_many(GST_BIN(data->pipeline), data->filesrc, data->ogg, data->vorbis,
data->resample, data->convert, data->caps, data->rtp, data->udp);
/* Link all the elements together */
gst_element_link(data->filesrc, data->ogg);
gst_element_link(data->ogg, data->vorbis);
gst_element_link(data->vorbis, data->resample);
gst_element_link(data->resample, data->convert);
gst_element_link_filtered(data->convert, data->rtp, data->caps);
gst_element_link(data->rtp, data->udp);
Can someone give me some hints as to where I went wrong?
For interest, here's my previously working pipeline:
data->pipeline = gst_parse_launch("filesrc location=/storage/sdcard0/Music/RunToTheHills.ogg ! oggdemux ! vorbisdec ! audioresample ! audioconvert ! audio/x-raw-int,channels=2,depth=16,width=16,rate=44100 ! rtpL16pay ! udpsink host=192.168.100.126 port=9001", &error);
if (error) {
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
g_clear_error (&error);
set_ui_message(message, data);
g_free (message);
return NULL;
}

You cannot simply link the oggdemux to the vorbisdec, because the demux has sometimes pads.
You need to add a handler function for the 'pad-added' signal of the demux and then perform the link there.
/* Connect to the pad-added signal */
g_signal_connect (data->ogg, "pad-added", G_CALLBACK (pad_added_handler), data);
And the handler:
void on_pad_added (GstElement *src, GstPad *new_pad, CustomData *data)
{
GstPad *sink_pad = gst_element_get_static_pad (data->vorbis, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
Also, since you're getting a segmentation fault, i believe there is a memory issue. Are you sure you're using the CustomData structure right? I notice you're using data->element instead of data.element.

Related

How to send requests to server by gstreamer rtsp client?

I have searched the question about Gstreamer rtsp client for a long time. But no luck.
Now I can display the live stream or the recorded stream by Gstreamer(gstreamer-1.0-android-armv7-1.6.0) from server on Android device, then I want to send PLAY/PAUSE/ to change server state when playing recorded stream.
My question: is there a simple way to obtain and access pipeline when working with gst-rtsp-stream? Could someone please provide an example?
Nov 10 Update:
GstBus *bus;
CustomData *data = (CustomData *)userdata;
GSource *timeout_source;
GSource *bus_source;
GError *error = NULL;
guint flags;
/* Create our own GLib Main Context and make it the default one */
data->context = g_main_context_new ();
g_main_context_push_thread_default(data->context);
/* Build pipeline */
data->pipeline = gst_parse_launch("playbin", &error);
if (error) {
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
g_clear_error (&error);
set_ui_message(message, data);
g_free (message);
return NULL;
}
/* Set latency to 0 ns */
gst_pipeline_set_latency(data->pipeline, 0);
/* Disable subtitles */
g_object_get (data->pipeline, "flags", &flags, NULL);
flags &= ~GST_PLAY_FLAG_TEXT;
g_object_set (data->pipeline, "flags", flags, NULL);
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
data->target_state = GST_STATE_READY;
gst_element_set_state(data->pipeline, GST_STATE_READY);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data->pipeline);
bus_source = gst_bus_create_watch (bus);
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
g_source_attach (bus_source, data->context);
g_source_unref (bus_source);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
gst_object_unref (bus);

CCSprite from URL - Android

I'm trying to load a CCSprite from Facebook server. But when it loads, it appear a black image. And I don't know why. I think that CURL buffer it's 0 I leave my code. I don't know if it's an easy way to do it.
Edit: I already try on the main thread, and also it's black
Note: I'm running it on a pthread.
//called at the end of init
pthread_t tid1;
pthread_create(&tid1, NULL, &loadSync, this);
struct MemoryStruct {
char *memory;
size_t size;
};
static size_t
WriteMemoryCallback(void *contents, size_t size, size_t nmemb, void *userp) {
size_t realsize = size * nmemb;
struct MemoryStruct *mem = (struct MemoryStruct *)userp;
mem->memory = (char *)realloc(mem->memory, mem->size + realsize + 1);
if (mem->memory == NULL) {
/* out of memory! */
printf("not enough memory (realloc returned NULL)\n");
exit(EXIT_FAILURE);
}
memcpy(&(mem->memory[mem->size]), contents, realsize);
mem->size += realsize;
mem->memory[mem->size] = 0;
return realsize;
}
//This class it's called helpBlock
static void *loadSync(void *args) {
helpBlock *thiz = (helpBlock*)args;
CURL *curl_handle;
struct MemoryStruct chunk;
/* will be grown as needed by the realloc above /
chunk.size = 0; / no data at this point */
chunk.memory = (char*)malloc(1);
chunk.size = 0;
curl_global_init(CURL_GLOBAL_ALL);
/* init the curl session */
curl_handle = curl_easy_init();
/* specify URL to get */
curl_easy_setopt(curl_handle, CURLOPT_URL, "http://graph.facebook.com/4/picture?width=60&height=60");
/* send all data to this function */
curl_easy_setopt(curl_handle, CURLOPT_WRITEFUNCTION, WriteMemoryCallback);
/* we pass our 'chunk' struct to the callback function */
curl_easy_setopt(curl_handle, CURLOPT_WRITEDATA, (void *)&chunk);
/* some servers don't like requests that are made without a user-agent
field, so we provide one */
curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, "libcurl-agent/1.0");
/* get it! */
curl_easy_perform(curl_handle);
/* cleanup curl stuff */
curl_easy_cleanup(curl_handle);
//mySprite->setTexture(CCTextureCache::sharedTextureCache()->addImage("newImage.png"));
CCLog("%s - %d", chunk.memory, chunk.size);
CCImage* img = new CCImage;
img->initWithImageData((void*)chunk.memory, (long)chunk.size, CCImage::kFmtPng);
cocos2d::CCTexture2D* texture = new cocos2d::CCTexture2D();
texture->initWithImage(img);
thiz->firstFriend->frontSprite->setTexture(texture);
if(chunk.memory)
free(chunk.memory);
/* we're done with libcurl, so clean it up */
curl_global_cleanup();
return NULL;
}

Enabling MediaPlayerBuffering event to be fired in android-vlc

I would like to subscribe for the MediaPlayer buffering event in the android vlc app.
I edited the EventHandler class and uncommented the event constant.
public static final int MediaPlayerBuffering = 0x103; // ** uncommented this**
public static final int MediaPlayerPlaying = 0x104;
I then added the the variable in libvlcjni.c
libvlc_event_manager_t *ev = libvlc_media_player_event_manager(mp);
static const libvlc_event_type_t mp_events[] = {
libvlc_MediaPlayerPlaying,
libvlc_MediaPlayerPaused,
libvlc_MediaPlayerEndReached,
libvlc_MediaPlayerStopped,
libvlc_MediaPlayerVout,
libvlc_MediaPlayerPositionChanged,
libvlc_MediaPlayerEncounteredError,
libvlc_MediaPlayerBuffering // **added this here**
};
recompiled the jni to get the so file and then built the vlc app but the event never seems to fire off.
Where else do I have to link to get the event fired when there is a buffering event due to lack of bandwidth.
I can see in logcat that it prints 1001 ms buffered in 6ms. But that is coming from the lower layer and not the java layer
had to add this in the libvlcjni.c file
else if(ev->type == libvlc_MediaPlayeBuffering) {
/* For determining the vout/ES track change */
jstring sData = (*env)->NewStringUTF(env, "data");
(*env)->CallVoidMethod(env, bundle, putFloat, sData, ev->u.media_player_buffer.new_cache);
(*env)->DeleteLocalRef(env, sData);
}
hope this helps someone
else if(ev->type == libvlc_MediaPlayerBuffering) {
/* For determining the vout/ES track change */
jstring sData = (*env)->NewStringUTF(env, "data");
(*env)->CallVoidMethod(env, bundle, putFloat, sData, ev->u.media_player_buffering.new_cache);
(*env)->DeleteLocalRef(env, sData);
}
The answer is ev->u.media_player_buffering.new_cache
In the VLC file mediaPlayer.c
I found this code:
`else if( newval.i_int == INPUT_EVENT_CACHE )
{
event.type = libvlc_MediaPlayerBuffering;
event.u.media_player_buffering.new_cache = (int)(100 * var_GetFloat( p_input, "cache" ));
libvlc_event_send( p_mi->p_event_manager, &event );
}
`
and in libvlc_events.h
/* media instance */
struct
{
float new_cache;
} media_player_buffering;
Then, I compiled it and it worked. Special thanks to my boy Tracy on the coast!

Android VideoView GStreamer Streaming(MediaController doesn't work )

I have some small project to stream video to android device. Streaming is done but I have problem with controlling video. The MediaController doesn't work when I push pause there is no effect. VideoView.pause() also doesn't work. Streaming server is based on GStreamer (server was wrote by my friend), and I'am using Android 2.2 CyanogenMod.
This is server code :
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMediaMapping *mapping;
GstRTSPMediaFactory *factory;
gchar *str;
gst_init (&argc, &argv);
if (argc < 2) {
g_message ("usage: %s <filename>", argv[0]);
return -1;
}
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mapping for this server, every server has a default mapper object
* that be used to map uri mount points to media factories */
mapping = gst_rtsp_server_get_media_mapping (server);
str = g_strdup_printf ("( "
"filesrc location=\"%s\" ! decodebin2 name=d "
"d. ! queue ! videoscale ! video/x-raw-yuv, width=500, height=300 "
"! ffenc_mpeg4 ! rtpmp4vpay name=pay0 "
"d. ! queue ! audioconvert ! faac ! rtpmp4apay name=pay1"
" )", argv[1]);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, str);
g_free (str);
/* attach the test factory to the /test url */
gst_rtsp_media_mapping_add_factory (mapping, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mapping);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_main_loop_run (loop);
return 0;
}
From what I have gathered, the VideoView in android only accepts h.264 feeds so you need to be encoding in h.264.

CreateProcess for android adb start-server?

When I use CreateProcess to create process adb.exe, It will Block in ReadFile.
void KillAdbProcess()
{
DWORD aProcesses[1024], cbNeeded, cProcesses;
unsigned int i;
if ( !EnumProcesses( aProcesses, sizeof(aProcesses), &cbNeeded ) )
return;
cProcesses = cbNeeded / sizeof(DWORD);
for ( i = 0; i < cProcesses; i++ )
if( aProcesses[i] != 0 ){
bool shouldKill =false;
wchar_t szProcessName[MAX_PATH] = L"<unknown>";
//Get a handle to the process.
HANDLE hProcess = OpenProcess( PROCESS_QUERY_INFORMATION |
PROCESS_VM_READ | PROCESS_TERMINATE,
FALSE, aProcesses[i] );
if (NULL != hProcess )
{
HMODULE hMod;
DWORD cbNeeded;
if ( EnumProcessModules( hProcess, &hMod, sizeof(hMod),
&cbNeeded) )
{
GetModuleFileNameExW( hProcess, hMod, szProcessName,
sizeof(szProcessName)/sizeof(TCHAR));
int len = wcslen(szProcessName);
if(!wcscmp(L"\\adb.exe",szProcessName+len-8)){
shouldKill = true;
}
}
}
if(shouldKill) TerminateProcess(hProcess,0);
CloseHandle( hProcess );
}
}
int testadb(){
KillAdbProcess();
char buff[4096] = {0};
int len = sizeof(buff);
DWORD exitCode = 0;
SECURITY_ATTRIBUTES sa;
ZeroMemory(&sa, sizeof(sa));
sa.bInheritHandle = TRUE;
sa.lpSecurityDescriptor = NULL;
sa.nLength = sizeof(sa);
HANDLE hOutputReadTmp,hOutputRead,hOutputWrite;
// Create the child output pipe.
if (!CreatePipe(&hOutputReadTmp,&hOutputWrite,&sa,0))
return false;
// Create new output read handle and the input write handles. Set
// the Properties to FALSE. Otherwise, the child inherits the
// properties and, as a result, non-closeable handles to the pipes
// are created.
if (!DuplicateHandle(GetCurrentProcess(),hOutputReadTmp,
GetCurrentProcess(),
&hOutputRead, // Address of new handle.
0,FALSE, // Make it uninheritable.
DUPLICATE_SAME_ACCESS))
return false;
// Close inheritable copies of the handles you do not want to be
// inherited.
if (!CloseHandle(hOutputReadTmp)) return false;
PROCESS_INFORMATION pi;
ZeroMemory(&pi, sizeof(pi));
STARTUPINFOW si;
GetStartupInfoW(&si);
si.cb = sizeof(STARTUPINFO);
si.dwFlags = STARTF_USESTDHANDLES;
si.wShowWindow = SW_HIDE;
si.hStdInput = NULL;
if(buff) {
si.hStdOutput = hOutputWrite;
si.hStdError = hOutputWrite;
} else {
si.hStdOutput = NULL;
si.hStdError = NULL;
}
wchar_t cmdBuf[512] = L"adb.exe start-server";
if( !::CreateProcessW(NULL, cmdBuf, NULL, NULL, TRUE, DETACHED_PROCESS, NULL, NULL, &si, &pi) )
{
exitCode = -1;
goto exit;
}
::CloseHandle(hOutputWrite);
hOutputWrite = NULL;
len--; //keep it for string end char.
DWORD dwBytes = 0;
DWORD dwHasRead = 0;
while(::ReadFile(hOutputRead, buff+dwHasRead, len-dwHasRead, &dwBytes, NULL))
{
printf("read byte=%d\n",dwBytes);
if(0 == dwBytes) break;
dwHasRead += dwBytes;
//GetExitCodeProcess(pi.hProcess, &exitCode);
//if(STILL_ACTIVE != exitCode) break;
if(dwHasRead >= len) break;
}
buff[dwHasRead] = 0;
::GetExitCodeProcess(pi.hProcess, &exitCode);
exit:
if(hOutputRead) ::CloseHandle(hOutputRead);
if(hOutputWrite) ::CloseHandle(hOutputWrite);
::CloseHandle(pi.hProcess);
::CloseHandle(pi.hThread);
return 0;
}
If I change code to
while(::ReadFile(hOutputRead, buff+dwHasRead, len-dwHasRead, &dwBytes, NULL))
{
printf("read byte=%d\n",dwBytes);
if(0 == dwBytes) break;
dwHasRead += dwBytes;
GetExitCodeProcess(pi.hProcess, &exitCode);
if(STILL_ACTIVE != exitCode) break;
if(dwHasRead >= len) break;
}
it works, but when I delete printf code, it will block again.
while(::ReadFile(hOutputRead, buff+dwHasRead, len-dwHasRead, &dwBytes, NULL))
{
if(0 == dwBytes) break;
dwHasRead += dwBytes;
GetExitCodeProcess(pi.hProcess, &exitCode);
if(STILL_ACTIVE != exitCode) break;
if(dwHasRead >= len) break;
}
In the code of adb.exe, I see some code like belows:
#if ADB_HOST
int launch_server()
{
#ifdef HAVE_WIN32_PROC
/* we need to start the server in the background */
/* we create a PIPE that will be used to wait for the server's "OK" */
/* message since the pipe handles must be inheritable, we use a */
/* security attribute */
HANDLE pipe_read, pipe_write;
SECURITY_ATTRIBUTES sa;
STARTUPINFO startup;
PROCESS_INFORMATION pinfo;
char program_path[ MAX_PATH ];
int ret;
sa.nLength = sizeof(sa);
sa.lpSecurityDescriptor = NULL;
sa.bInheritHandle = TRUE;
/* create pipe, and ensure its read handle isn't inheritable */
ret = CreatePipe( &pipe_read, &pipe_write, &sa, 0 );
if (!ret) {
fprintf(stderr, "CreatePipe() failure, error %ld\n", GetLastError() );
return -1;
}
SetHandleInformation( pipe_read, HANDLE_FLAG_INHERIT, 0 );
ZeroMemory( &startup, sizeof(startup) );
startup.cb = sizeof(startup);
startup.hStdInput = GetStdHandle( STD_INPUT_HANDLE );
startup.hStdOutput = pipe_write;
startup.hStdError = GetStdHandle( STD_ERROR_HANDLE );
startup.dwFlags = STARTF_USESTDHANDLES;
ZeroMemory( &pinfo, sizeof(pinfo) );
/* get path of current program */
GetModuleFileName( NULL, program_path, sizeof(program_path) );
ret = CreateProcess(
program_path, /* program path */
"adb fork-server server",
/* the fork-server argument will set the
debug = 2 in the child */
NULL, /* process handle is not inheritable */
NULL, /* thread handle is not inheritable */
TRUE, /* yes, inherit some handles */
DETACHED_PROCESS, /* the new process doesn't have a console */
NULL, /* use parent's environment block */
NULL, /* use parent's starting directory */
&startup, /* startup info, i.e. std handles */
&pinfo );
CloseHandle( pipe_write );
if (!ret) {
fprintf(stderr, "CreateProcess failure, error %ld\n", GetLastError() );
CloseHandle( pipe_read );
return -1;
}
CloseHandle( pinfo.hProcess );
CloseHandle( pinfo.hThread );
/* wait for the "OK\n" message */
{
char temp[3];
DWORD count;
ret = ReadFile( pipe_read, temp, 3, &count, NULL );
CloseHandle( pipe_read );
if ( !ret ) {
fprintf(stderr, "could not read ok from ADB Server, error = %ld\n", GetLastError() );
return -1;
}
if (count != 3 || temp[0] != 'O' || temp[1] != 'K' || temp[2] != '\n') {
fprintf(stderr, "ADB server didn't ACK\n" );
return -1;
}
}
#elif defined(HAVE_FORKEXEC)
char path[PATH_MAX];
int fd[2];
// set up a pipe so the child can tell us when it is ready.
// fd[0] will be parent's end, and fd[1] will get mapped to stderr in the child.
if (pipe(fd)) {
fprintf(stderr, "pipe failed in launch_server, errno: %d\n", errno);
return -1;
}
get_my_path(path);
pid_t pid = fork();
if(pid < 0) return -1;
if (pid == 0) {
// child side of the fork
// redirect stderr to the pipe
// we use stderr instead of stdout due to stdout's buffering behavior.
adb_close(fd[0]);
dup2(fd[1], STDERR_FILENO);
adb_close(fd[1]);
// child process
int result = execl(path, "adb", "fork-server", "server", NULL);
// this should not return
fprintf(stderr, "OOPS! execl returned %d, errno: %d\n", result, errno);
} else {
// parent side of the fork
char temp[3];
temp[0] = 'A'; temp[1] = 'B'; temp[2] = 'C';
// wait for the "OK\n" message
adb_close(fd[1]);
int ret = adb_read(fd[0], temp, 3);
adb_close(fd[0]);
if (ret < 0) {
fprintf(stderr, "could not read ok from ADB Server, errno = %d\n", errno);
return -1;
}
if (ret != 3 || temp[0] != 'O' || temp[1] != 'K' || temp[2] != '\n') {
fprintf(stderr, "ADB server didn't ACK\n" );
return -1;
}
setsid();
}
#else
#error "cannot implement background server start on this platform"
#endif
return 0;
}
#endif
I think the child process of adb.exe inherit the handle of adb.exe, if the child process of adb.exe doesn't exit, ReadFile will block for ever. But when I exec "adb.exe start-server" in command, all is Ok. So how does windows command call CreateProcess and ReadFile?
I have found the answer: Redirecting an arbitrary Console's Input/Output - CodeProject.
The technique of redirecting the input/output of a console process is very sample: The CreateProcess() API through the STARTUPINFO structure enables us to redirect the standard handles of a child console based process. So we can set these handles to either a pipe handle, file handle, or any handle that we can read and write. The detail of this technique has been described clearly in MSDN: HOWTO: Spawn Console Processes with Redirected Standard Handles.
However, MSDN's sample code has two big problem. First, it assumes the child process will send output at first, then wait for input, then flush the output buffer and exit. If the child process doesn't behave like that, the parent process will be hung up. The reason of this is the ReadFile() function remains blocked untill the child process sends some output, or exits.
Second, It has problem to redirect a 16-bit console (including console based MS-DOS applications.) On Windows 9x, ReadFile remains blocked even after the child process has terminated; On Windows NT/XP, ReadFile always returns FALSE with error code set to ERROR_BROKEN_PIPE if the child process is a DOS application.
Solving the block problem of ReadFile
To prevent the parent process from being blocked by ReadFile, we can simply pass a file handle as stdout to the child process, then monitor this file. A more simple way is to call PeekNamedPipe() function before calling ReadFile(). The PeekNamedPipe function checks information about data in the pipe, then returns immediately. If there's no data available in the pipe, don't call ReadFile.
By calling PeekNamedPipe before ReadFile, we also solve the block problem of redirecting a 16-bit console on Windows 9x.

Categories

Resources