android_media_MediaRecorder.cppMediaRecorder.cppclass MediaRecorder : public BnMediaRecorderClient, public virtual IMediaDeathNotifier 接口文件 ImediaRecorder.cppBinder文件 mediaRecorderClient.cppclass MediaRecorderClient : public BnMediaRecorder mRecorder = AVMediaServiceFactory::get()->createStagefrightRecorder(opPackageName); = new StagefrightRecorder(opPackageName); StagefrightRecorder.cpp以MPEG4为例子:sp<MediaWriter> mWriter:MPEG4Writer::MPEG4Writer(int fd),其中fd是输出文件。 writer->addSource(encoder); writer->addSource(audioEncoder); 源的创建:setupMediaSource(&mediaSource); => setupCameraSource(&cameraSource); => if (mCaptureFpsEnable && mCaptureFps != mFrameRate ) { if (!(mCaptureFps > 0.)) { ALOGE("Invalid mCaptureFps value: %lf", mCaptureFps); return BAD_VALUE; } mCameraSourceTimeLapse = AVFactory::get()->CreateCameraSourceTimeLapseFromCamera( mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid, videoSize, mFrameRate, mPreviewSurface, std::llround(1e6 / mCaptureFps)); *cameraSource = mCameraSourceTimeLapse; } else { *cameraSource = AVFactory::get()->CreateCameraSourceFromCamera( mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, mClientPid, videoSize, mFrameRate, mPreviewSurface); } => return CameraSource::CreateFromCamera(camera, proxy, cameraId, clientName, clientUid, clientPid, videoSize, frameRate, surface, storeMetaDataInVideoBuffers); => CameraSource *source = new CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid, videoSize, frameRate, surface, storeMetaDataInVideoBuffers); sp<MediaCodecSource> encoder = MediaCodecSource::Create( mLooper, format, cameraSource, mPersistentSurface, flags); mGraphicBufferProducer = encoder->getGraphicBufferProducer(); mVideoEncoderSource = encoder; camera模块回调到mediarecoreder模块:void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs, native_handle_t* handle) { mFramesReceived.push_back(data);//数据保存到mFramesReceived mFrameAvailableCondition.signal(); } status_t CameraSource::read( MediaBuffer **buffer, const ReadOptions *options) { mFramesBeingEncoded.push_back(frame); *buffer = new MediaBuffer(frame->pointer(), frame->size()); } Read函数的调用, 首先MPEG4Writer.cpp的status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> ¬ify) { sp<AMessage> msg = new AMessage(kWhatStart, this); } 然后: case kWhatStart: { mSource->start(static_cast<MetaData *>(obj.get())); schedulePull(); } void MediaCodecSource::Puller::schedulePull() { (new AMessage(kWhatPull, this))->post(); } 然后处理 case kWhatPull: { mSource->read(&mbuf); //这个就是camerasource 的read queue->pushBuffer(mbuf); mNotify->post();//mPuller->start(meta.get(), notify); kWhatPullerNotify msg->post();//循环跑到case kWhatPull: } 然后在MediaCodecSource处理 case kWhatPullerNotify: { feedEncoderInputBuffers(); } status_t MediaCodecSource::feedEncoderInputBuffers() { mPuller->readBuffer(&mbuf); status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf); memcpy(inbuf->data(), mbuf->data(), size); inbuf->setMediaBufferBase(mbuf); status_t err = mEncoder->queueInputBuffer( bufferIndex, 0, size, timeUs, flags); } status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) { std::vector<BufferInfo> &buffers = mPortBuffers[kPortIndexInput]; const BufferInfo &info = buffers[index]; *buffer = info.mData; } status_t MediaCodec::queueInputBuffer( size_t index, size_t offset, size_t size, int64_t presentationTimeUs, uint32_t flags, AString *errorDetailMsg) { } case kWhatQueueInputBuffer: { status_t err = onQueueInputBuffer(msg); } status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) { err = mBufferChannel->queueInputBuffer(buffer); } |
|
来自: 新用户8389DdzY > 《高通》