我想做一个简单的视频流。我使用的应用程序是 android 和使用 c# 的简单媒体播放器。我审查的大部分代码都会有 POST 方法到 ip 服务器。同时我的应用程序正在使用 gsm 网络,它需要使用数据包来传输数据媒体。
我的第一步是拥有一个可以记录的 android 应用程序。这一步完成了。这是我的代码。
public class Videotest1Activity extends Activity implements
SurfaceHolder.Callback, OnInfoListener, OnErrorListener{
Camera camera;
VideoView videoView;
SurfaceHolder holder;
TextView msg;
Button initBtn, startBtn, stopBtn, playBtn, stprevBtn;
MediaRecorder recorder;
String outputFileName;
static final String TAG = "RecordVideo";
int maxDuration = 7000;//7sec
int frameRate = 1;//15
String serverIP = "172.19.117.12";
int serverPort = 2000;
Socket socket;
int mCount;
//TimerThread mTimer;
Chronometer chronometer;
LocalSocket receiver,sender;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_videotest1);
videoView = (VideoView) findViewById(R.id.videoView1);
initBtn = (Button) findViewById(R.id.initialize);
startBtn = (Button) findViewById(R.id.button1);
stopBtn = (Button) findViewById(R.id.button3);
msg = (TextView) findViewById(R.id.textView1);
playBtn = (Button) findViewById(R.id.reviewBtn);
stprevBtn = (Button) findViewById(R.id.stprevBtn);
chronometer = (Chronometer) findViewById(R.id.chronometer1);
/*
mTimer= new TimerThread();
mTimer.setOnAlarmListener(mSTimer_OnAlarm);
mTimer.setPeriod(100);
*/
}
public void buttonTapped(View view){
switch(view.getId()){
case R.id.initialize:
initRecorder();
break;
case R.id.button1:
beginRecording();
break;
case R.id.button3:
stopRecording();
break;
case R.id.reviewBtn:
playRecording();
break;
case R.id.stprevBtn:
stopPlayback();
break;
}
}
@Override
public void onError(MediaRecorder mr, int what, int extra) {
Log.e(TAG, "Record error");
stopRecording();
Toast.makeText(this, "Recording limit reached", 2500).show();
}
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
Log.i(TAG, "recording event");
if(what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED){
Log.i(TAG, "...max duration reached");
stopRecording();
Toast.makeText(this, "Recording limit info", 2500).show();
}
}
@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
}
@Override
public void surfaceCreated(SurfaceHolder arg0) {
Log.v(TAG, "in surfaceCreated");
try{
camera.setPreviewDisplay(holder);
camera.startPreview();
}catch(IOException e){
Log.v(TAG, "Could not start the preview");
e.printStackTrace();
}
initBtn.setEnabled(true);
startBtn.setEnabled(true);
stopBtn.setEnabled(true);
}
@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
}
protected void onResume(){
Log.v(TAG, "in onResume");
super.onResume();
initBtn.setEnabled(false);
startBtn.setEnabled(false);
stopBtn.setEnabled(false);
playBtn.setEnabled(false);
stprevBtn.setEnabled(false);
if(!initCamera())
finish();
}
public boolean initCamera(){
try{
camera = Camera.open();
Camera.Parameters camParam = camera.getParameters();
camera.lock();
holder = videoView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
//Thread thread = new Thread(new hantarThread());
//thread.start();
}catch(RuntimeException re){
Log.v(TAG, "Could not initialize the camera");
re.printStackTrace();
return false;
}
return true;
}
public void initRecorder(){
if(recorder != null)return;
outputFileName = Environment.getExternalStorageDirectory() + "/videooutput.mp4";
File outputFile = new File(outputFileName);
if(outputFile.exists())
outputFile.delete();//knp nk dlt?
try{
Toast.makeText(this, "InitRecord", 2500).show();
camera.stopPreview();
camera.unlock();
recorder = new MediaRecorder();
recorder.setCamera(camera);
recorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
recorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setVideoSize(176, 144);
recorder.setVideoFrameRate(15);//15
recorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);//mpeg_4
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setMaxDuration(60000);
recorder.setPreviewDisplay(holder.getSurface());
recorder.setOutputFile(outputFileName);
recorder.prepare();
Log.v(TAG, "MediaRecorder initialized");
initBtn.setEnabled(false);
startBtn.setEnabled(true);
}catch(Exception e){
Log.v(TAG, "MediaRecorder failed");
e.printStackTrace();
}
}
public void beginRecording(){
//mCount = 0;
//mTimer.start();
try{
Log.v(TAG, "start Recording begin");
int stoppedMilliseconds = 0;
String chronoText = chronometer.getText().toString();
String array[] = chronoText.split(":");
if (array.length == 2) {
stoppedMilliseconds = Integer.parseInt(array[0]) * 60 * 1000
+ Integer.parseInt(array[1]) * 1000;
} else if (array.length == 3) {
stoppedMilliseconds = Integer.parseInt(array[0]) * 60 * 60 * 1000
+ Integer.parseInt(array[1]) * 60 * 1000
+ Integer.parseInt(array[2]) * 1000;
}
chronometer.setBase(SystemClock.elapsedRealtime() - stoppedMilliseconds);
//chronometer.setBase(SystemClock.elapsedRealtime());
chronometer.start();
Log.v(TAG, "timer start");
long dTime = chronometer.getDrawingTime();
long autoLink = chronometer.getAutoLinkMask();
Log.v(TAG, "getDrawingTime: " + dTime);
Log.v(TAG, "AutoLink: " + autoLink);
recorder.setOnInfoListener(this);
recorder.setOnErrorListener(this);
recorder.start();
msg.setText("Recording");
startBtn.setEnabled(false);
stopBtn.setEnabled(true);
}catch(Exception e){
Log.v(TAG, "start Recording failed");
e.printStackTrace();
}
}
public void stopRecording(){
if(recorder != null){
recorder.setOnErrorListener(null);
recorder.setOnInfoListener(null);
try{
recorder.stop();
Log.v(TAG, "stop Record Begin");
chronometer.stop();
Log.v(TAG, "Timer stop");
//mTimer.stop();
}catch(IllegalStateException e){
Log.e(TAG, "stop is ILLEGAL");
}
releaseRecorder();
msg.setText("");
releaseCamera();
startBtn.setEnabled(false);
stopBtn.setEnabled(false);
playBtn.setEnabled(true);
}
else{
Log.v(TAG, "video cannot stop. Video null");
long autoLink = chronometer.getAutoLinkMask();
Log.v(TAG, "stop aLink: " + autoLink);
long elapsedMillis = SystemClock.elapsedRealtime() - chronometer.getBase();
Log.v(TAG, "elapsedMillis: " + elapsedMillis);
}
}
private void releaseCamera(){
if(camera != null){
try{
camera.reconnect();
}catch(IOException e){
e.printStackTrace();
}
camera.release();
camera = null;
}
}
private void releaseRecorder(){
if(recorder != null){
recorder.release();
recorder = null;
}
}
private void playRecording(){
MediaController mc = new MediaController(this);
videoView.setMediaController(mc);
videoView.setVideoPath(outputFileName);
videoView.start();
stprevBtn.setEnabled(true);
}
private void stopPlayback(){
videoView.stopPlayback();
}
}
}
我要做的下一步是使媒体播放器可以播放 videooutput.mp4。在这里,要让它播放,它首先必须加载视频然后播放。好吧,这是第二步。它也很好用。这是我的代码。
private void button6_Click_1(object sender, EventArgs e)
{
if (openFileDialog1.ShowDialog() == DialogResult.OK)
{
// create video source
FileVideoSource fileSource = new FileVideoSource(openFileDialog1.FileName);
// open it
OpenVideoSource(fileSource);
}
}
// Open video source
private void OpenVideoSource(IVideoSource source)
{
// set busy cursor
this.Cursor = Cursors.WaitCursor;
// stop current video source
CloseCurrentVideoSource();
// start new video source
videoSourcePlayer.VideoSource = source;
videoSourcePlayer.Start();
// reset stop watch
stopWatch = null;
// start timer
videoTimer.Start();
this.Cursor = Cursors.Default;
}
// New frame received by the player
private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image)
{
DateTime now = DateTime.Now;
Graphics g = Graphics.FromImage(image);
// paint current time
SolidBrush brush = new SolidBrush(Color.Red);
g.DrawString(now.ToString(), this.Font, brush, new PointF(5, 5));
brush.Dispose();
g.Dispose();
}
// Close video source if it is running
private void CloseCurrentVideoSource()
{
if (videoSourcePlayer.VideoSource != null)
{
videoSourcePlayer.SignalToStop();
// wait ~ 3 seconds
for (int i = 0; i < 30; i++)
{
if (!videoSourcePlayer.IsRunning)
break;
System.Threading.Thread.Sleep(100);
}
if (videoSourcePlayer.IsRunning)
{
videoSourcePlayer.Stop();
}
videoSourcePlayer.VideoSource = null;
}
}
我在 C# 中使用 AForge 作为媒体播放器。我所做的只是添加 AForge.Controls、AForge.Video 和 AForge.Video.DirectShow 的引用。
现在,我需要做的是将媒体数据逐字节发送到媒体播放器(c#)。我不知道如何做到这一点。我还发现了示例编码IP 摄像头 android,它将视频带到服务器。但是,我仍然不知道在我的应用程序中执行此操作。
请注意,gsm 将需要使用的 sim 卡的电话号码以及调制解调器 gsm 的端口号。
任何人都可以帮我解决这个问题。或者给我一些关于修改代码的想法。任何帮助表示赞赏。感谢提前。