package com.cagneymoreau.sensors; import android.Manifest; import android.app.Activity; import android.app.DialogFragment; import android.content.Context; import android.content.res.Configuration; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.MediaRecorder; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.os.ParcelFileDescriptor; import android.support.annotation.NonNull; import android.util.Log; import android.util.Size; import android.util.SparseIntArray; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.widget.Button; import android.widget.Toast; import com.cagneymoreau.commchannel.R; import com.cagneymoreau.network.RobotPoint; import com.cagneymoreau.utility.AutoFitTextureView; import com.cagneymoreau.utility.Debug; import com.cagneymoreau.utility.MySharedPref; import com.cagneymoreau.videoencoding.SDPMaker; import com.cagneymoreau.videoencoding.TransH264; import com.cagneymoreau.videoencoding.TransferH264; import java.io.File; import java.io.FileDescriptor; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.DatagramPacket; import java.net.Socket; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Random; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; public class MyVideo { Activity mActivity; View mView; RobotPoint mRobotPoint; MySharedPref pref; boolean running = false; boolean collectSDP = false; private static final String SDKEY = "sessiondecription"; private static final String SDBITKEY = "sessionbitrate"; private static final String SDFRAME = "csessionframerate"; private static final String SPSKEY = "spssharedprefkey"; private static final String PPSKEY = "ppssharedprefkey"; private static final String PREFKEY = "prefixsharedprefkey"; private String sessionDescription = ""; int bitrate = 10000000; int framerate = 10; String tempFilePath; byte[][] spspps; TransferH264 transferH264; TransH264 transH264; private int ssrc; public MyVideo(Activity act, View v, RobotPoint robotPoint) { mActivity = act; mView = v; mRobotPoint = robotPoint; ssrc = new Random().nextInt(); robotPoint.getRtspClientServer().setSsrc(ssrc); robotPoint.getRtspClientServer().setVideoControl(this); mTextureView = (AutoFitTextureView) v.findViewById(R.id.texture); pref = MySharedPref.getInsance(mActivity); getSetupValues(); resume(); } //region------------------------------------------------ biolerplate camera set up fields and callbacks private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90; private static final int SENSOR_ORIENTATION_INVERSE_DEGREES = 270; private static final SparseIntArray DEFAULT_ORIENTATIONS = new SparseIntArray(); private static final SparseIntArray INVERSE_ORIENTATIONS = new SparseIntArray(); private static final String TAG = "Camera2VideoFragment"; private static final int REQUEST_VIDEO_PERMISSIONS = 1; private static final String FRAGMENT_DIALOG = "dialog"; private static final String[] VIDEO_PERMISSIONS = { Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, }; static { DEFAULT_ORIENTATIONS.append(Surface.ROTATION_0, 90); DEFAULT_ORIENTATIONS.append(Surface.ROTATION_90, 0); DEFAULT_ORIENTATIONS.append(Surface.ROTATION_180, 270); DEFAULT_ORIENTATIONS.append(Surface.ROTATION_270, 180); } static { INVERSE_ORIENTATIONS.append(Surface.ROTATION_0, 270); INVERSE_ORIENTATIONS.append(Surface.ROTATION_90, 180); INVERSE_ORIENTATIONS.append(Surface.ROTATION_180, 90); INVERSE_ORIENTATIONS.append(Surface.ROTATION_270, 0); } /** * An {@link AutoFitTextureView} for camera preview. */ private AutoFitTextureView mTextureView; /** * Button to record video */ private Button mButtonVideo; /** * A reference to the opened {@link android.hardware.camera2.CameraDevice}. */ private CameraDevice mCameraDevice; /** * A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for * preview. */ private CameraCaptureSession mPreviewSession; /** * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a * {@link TextureView}. */ private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) { openCamera(width, height); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) { configureTransform(width, height); } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { return true; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) { } }; /** * The {@link android.util.Size} of camera preview. */ private Size mPreviewSize; /** * The {@link android.util.Size} of video recording. */ private Size mVideoSize; /** * MediaRecorder */ private MediaRecorder mMediaRecorder; /** * Whether the app is recording video now */ private boolean mIsRecordingVideo; /** * An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread mBackgroundThread; /** * A {@link Handler} for running tasks in the background. */ private Handler mBackgroundHandler; /** * A {@link Semaphore} to prevent the app from exiting before closing the camera. */ private Semaphore mCameraOpenCloseLock = new Semaphore(1); /** * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status. */ private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice cameraDevice) { mCameraDevice = cameraDevice; startPreview(); mCameraOpenCloseLock.release(); if (null != mTextureView) { configureTransform(mTextureView.getWidth(), mTextureView.getHeight()); } } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; } @Override public void onError(@NonNull CameraDevice cameraDevice, int error) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; Activity activity = mActivity; if (null != activity) { activity.finish(); } } }; private Integer mSensorOrientation; private String mNextVideoAbsolutePath; private CaptureRequest.Builder mPreviewBuilder; //endregion /** * These two set it up and the toggle starts it on and off * */ //region--------------------------------------------------------control methods // TODO: 9/14/2018 delete the cheap hacks insiade this class //get previous values private void getSetupValues() { spspps = new byte[3][]; spspps[0] = pref.getBytes(SPSKEY); spspps[1] = pref.getBytes(PPSKEY); spspps[2] = pref.getBytes(PREFKEY); debugPackets("setup sps", spspps[0]); debugPackets("setup pps", spspps[1]); debugPackets("setup prefix ", spspps[2]); sessionDescription = pref.getString(SDKEY); bitrate = pref.getInt(SDBITKEY); framerate = pref.getInt(SDFRAME); // TODO: 9/11/2018 temp code here bitrate = 10000000; framerate = 30; sessionDescription = "m=video 5006 RTP/AVP 96\n" + "b=RR:0\n" + "a=rtpmap:96 H264/90000\n" + "a=fmtp:96 packetization-mode=1;profile-level-id=428028;sprop-parameter-sets=Z0KAKJWgKA9E,aM48gA==;\n" + "a=control:trackID=0\n" + "m=audio 5004 RTP/AVP 96\n" + "b=AS:128\n" + "b=RR:0\n" + "a=rtpmap:96 AMR/8000\n" + "a=fmtp:96 octet-align=1;\n" + "a=control:trackID=1\n"; } private void debugPackets(String call, byte[] arr) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < arr.length; i++) { sb.append(String.format("%8s", Integer.toBinaryString(arr[i] & 0xFF)).replace(' ', '0'))/*.append(" ->").append(String.valueOf(i)).append(" -> ")*/; } Log.d(TAG, call + sb.toString()); } // TODO: 9/17/2018 this needs to check if new values match old values and if so just start streaming otherwise call getsdp //set new values from home base public void setSetupValues() { getSDP(); } public byte[][] getSpspps() { return spspps; } public String getSessionDescription() { return sessionDescription; } public void getSDP() { collectSDP = true; //set media recorder to record to device /* mActivity.runOnUiThread(new Runnable() { @Override public void run() { resume(); //set this class to be ready to record } }); */ new AsyncTask() { @Override protected Void doInBackground(Void... voids) { try{ Thread.sleep(3000); //terrible code but we need to delay for resume to get going }catch (InterruptedException e){ Log.e(TAG, "run: ", e); } mActivity.runOnUiThread(new Runnable() { @Override public void run() { toggle(); // start recording } }); try{ Thread.sleep(3000); //delay so we can get 1 second of video }catch (InterruptedException e){ Log.e(TAG, "run: ", e); } mActivity.runOnUiThread(new Runnable() { @Override public void run() { toggle(); // stop recording } }); //stop recording and set back to streaming mode with boolean flag collectSDP = false; try{ Thread.sleep(3000); //delay so we can get 1 second of video }catch (InterruptedException e){ Log.e(TAG, "run: ", e); } Log.d(TAG, "run: " + tempFilePath); File videofile = new File(tempFilePath); try{ spspps = SDPMaker.retreiveSPSPPS(videofile); //get the data we need from the video file pref.setBytes(SPSKEY, spspps[0]); pref.setBytes(PPSKEY, spspps[1]); pref.setBytes(PREFKEY, spspps[2]); Debug.debugHex("myvideosave sps ", spspps[0], spspps[0].length); Debug.debugHex("myvideosave pps ", spspps[1], spspps[1].length); Debug.debugHex("myvideosave prefix", spspps[2], spspps[2].length); }catch (IOException ioe){ Log.e(TAG, "getSDP: ", ioe); } //boolean deleted = videofile.delete(); // TODO: 9/12/2018 add this back in //Log.d(TAG, "getSDP: video deleted? " + deleted); //delete the video file as we are done with it now return null; } }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, null); } public void resume() { Log.d(TAG, "resume: "); startBackgroundThread(); if (mTextureView.isAvailable()) { openCamera(mTextureView.getWidth(), mTextureView.getHeight()); } else { mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); } running = true; } public void pause() { closeCamera(); stopBackgroundThread(); running = false; } public void toggle() { if (!running){ resume(); } if (mIsRecordingVideo) { Log.d(TAG, "toggle: playing now stop"); stopRecordingVideo(); //pause(); } else { Log.d(TAG, "toggle: stopped now playing"); new AsyncTask() { @Override protected Void doInBackground(Void... voids) { startRecordingVideo(); return null; } }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, null); } } private String getVideoFilePath(Context context) { final File dir = context.getExternalFilesDir(null); tempFilePath = (dir == null ? "" : (dir.getAbsolutePath() + "/")) + System.currentTimeMillis() + ".mp4"; return tempFilePath; } private FileDescriptor getStreamFd() { ParcelFileDescriptor[] pipe = null; try { pipe = ParcelFileDescriptor.createPipe(); /* new TransferThread(new ParcelFileDescriptor.AutoCloseInputStream(pipe[0]), new Socket(), mRobotPoint).start(); */ transferH264 = new TransferH264(new ParcelFileDescriptor.AutoCloseInputStream(pipe[0]), new Socket(), mRobotPoint, this, ssrc); transferH264.start(); /* transH264 = new TransH264(new ParcelFileDescriptor.AutoCloseInputStream(pipe[0]), new Socket(), mRobotPoint, this, ssrc); transH264.start(); */ /* new TransferH264(new ParcelFileDescriptor.AutoCloseInputStream(pipe[0]), new Socket(), mRobotPoint, this).start(); */ } catch (IOException e) { Log.e(getClass().getSimpleName(), "Exception opening pipe", e); } return (pipe[1].getFileDescriptor()); } //endregion //region------------------------------------------------------------ camera methods /** * In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes * larger than 1080p, since MediaRecorder cannot handle such a high-resolution video. * * @param choices The list of available sizes * @return The video size */ private static Size chooseVideoSize(Size[] choices) { for (Size size : choices) { if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) { return size; } } Log.e(TAG, "Couldn't find any suitable video size"); return choices[choices.length - 1]; } /** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the respective requested values, and whose aspect * ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List bigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= width && option.getHeight() >= height) { bigEnough.add(option); } } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { Size s = Collections.min(bigEnough, new CompareSizesByArea()); Log.d(TAG, "chooseOptimalSize: " + s.toString()); return s; } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } } /** * Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { mBackgroundThread = new HandlerThread("CameraBackground"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } /** * Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { mBackgroundThread.quitSafely(); try { mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } /** * Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`. */ @SuppressWarnings("MissingPermission") private void openCamera(int width, int height) { final Activity activity = mActivity; if (null == activity || activity.isFinishing()) { return; } CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { Log.d(TAG, "tryAcquire"); if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } String cameraId = manager.getCameraIdList()[0]; // Choose the sizes for camera preview and video recording CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); if (map == null) { throw new RuntimeException("Cannot get available preview/video sizes"); } mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class)); mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize); int orientation = activity.getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } configureTransform(width, height); mMediaRecorder = new MediaRecorder(); manager.openCamera(cameraId, mStateCallback, null); } catch (CameraAccessException e) { Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show(); activity.finish(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance("camera error") .show(mActivity.getFragmentManager(), FRAGMENT_DIALOG); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening."); } } private void closeCamera() { try { mCameraOpenCloseLock.acquire(); closePreviewSession(); if (null != mCameraDevice) { mCameraDevice.close(); mCameraDevice = null; } if (null != mMediaRecorder) { mMediaRecorder.release(); mMediaRecorder = null; } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera closing."); } finally { mCameraOpenCloseLock.release(); } } /** * Start the camera preview. */ private void startPreview() { if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) { return; } try { closePreviewSession(); SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); Surface previewSurface = new Surface(texture); mPreviewBuilder.addTarget(previewSurface); mCameraDevice.createCaptureSession(Collections.singletonList(previewSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { mPreviewSession = session; updatePreview(); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { Activity activity = mActivity; if (null != activity) { Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show(); } } }, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } /** * Update the camera preview. {@link #startPreview()} needs to be called in advance. */ private void updatePreview() { if (null == mCameraDevice) { return; } try { setUpCaptureRequestBuilder(mPreviewBuilder); HandlerThread thread = new HandlerThread("CameraPreview"); thread.start(); mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) { builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); } /** * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. * This method should not to be called until the camera preview size is determined in * openCamera, or until the size of `mTextureView` is fixed. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` */ private void configureTransform(int viewWidth, int viewHeight) { Activity activity = mActivity; if (null == mTextureView || null == mPreviewSize || null == activity) { return; } int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = Math.max( (float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } mTextureView.setTransform(matrix); } private void setUpMediaRecorder() throws IOException { Log.d(TAG, "setUpMediaRecorder: "); final Activity activity = mActivity; if (null == activity) { Log.d(TAG, "setUpMediaRecorder: returning"); return; } mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT); if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) { mNextVideoAbsolutePath = getVideoFilePath(mActivity); } if (collectSDP){ Log.d(TAG, "setUpMediaRecorder: collect"); mMediaRecorder.setOutputFile(getVideoFilePath(mActivity)); }else{ Log.d(TAG, "setUpMediaRecorder: dontcollect"); mMediaRecorder.setOutputFile(getStreamFd()); } //mMediaRecorder.setOutputFile(mNextVideoAbsolutePath); mMediaRecorder.setVideoEncodingBitRate(bitrate); mMediaRecorder.setVideoFrameRate(framerate); mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight()); //mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC); int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); switch (mSensorOrientation) { case SENSOR_ORIENTATION_DEFAULT_DEGREES: mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation)); break; case SENSOR_ORIENTATION_INVERSE_DEGREES: mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation)); break; } mMediaRecorder.prepare(); } private void startRecordingVideo() { if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) { return; } try { closePreviewSession(); setUpMediaRecorder(); SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); List surfaces = new ArrayList<>(); // Set up Surface for the camera preview Surface previewSurface = new Surface(texture); surfaces.add(previewSurface); mPreviewBuilder.addTarget(previewSurface); // Set up Surface for the MediaRecorder Surface recorderSurface = mMediaRecorder.getSurface(); surfaces.add(recorderSurface); mPreviewBuilder.addTarget(recorderSurface); // Start a capture session // Once the session starts, we can update the UI and start recording mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { mPreviewSession = cameraCaptureSession; updatePreview(); mActivity.runOnUiThread(new Runnable() { @Override public void run() { // UI mIsRecordingVideo = true; // Start recording mMediaRecorder.start(); } }); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { Activity activity = mActivity; if (null != activity) { Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show(); } } }, mBackgroundHandler); } catch (CameraAccessException | IOException e) { e.printStackTrace(); } } private void closePreviewSession() { if (mPreviewSession != null) { mPreviewSession.close(); mPreviewSession = null; } } private void stopRecordingVideo() { Log.d(TAG, "stopRecordingVideo: 1"); if (transferH264 != null){ transferH264.setNotEOF(false); } if (transH264 != null){ transH264.setNotEOF(false); } // UI mIsRecordingVideo = false; Log.d(TAG, "stopRecordingVideo: 2"); // Stop recording try{ mPreviewSession.stopRepeating(); mPreviewSession.abortCaptures(); }catch (CameraAccessException cae){ Log.e(TAG, "stopRecordingVideo: ", cae); } Log.d(TAG, "stopRecordingVideo: 3"); mMediaRecorder.stop(); // !!! This will fail while streaming // TODO: 9/18/2018 ...trying the not eof to close the stream Log.d(TAG, "stopRecordingVideo: 4"); mMediaRecorder.reset(); Log.d(TAG, "stopRecordingVideo: 5"); Activity activity = mActivity; if (null != activity) { //Toast.makeText(activity, "Video saved: " + mNextVideoAbsolutePath, Toast.LENGTH_SHORT).show(); Log.d(TAG, "Video saved: " + mNextVideoAbsolutePath); } Log.d(TAG, "stopRecordingVideo: 6"); mNextVideoAbsolutePath = null; Log.d(TAG, "stopRecordingVideo: completed"); } /** * Compares two {@code Size}s based on their areas. */ static class CompareSizesByArea implements Comparator { @Override public int compare(Size lhs, Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } public static class ErrorDialog extends DialogFragment { private static final String ARG_MESSAGE = "message"; public static ErrorDialog newInstance(String message) { ErrorDialog dialog = new ErrorDialog(); Bundle args = new Bundle(); args.putString(ARG_MESSAGE, message); dialog.setArguments(args); return dialog; } } private File getOutputFile() { final File dir = mActivity.getExternalFilesDir(null); return new File((dir == null ? "" : (dir.getAbsolutePath() + "/")) + System.currentTimeMillis() + ".mp4"); //return (new File(Environment.getExternalStorageDirectory().getPath().toString() + "/YourDirectory/filename")); } /** * This was my original transfer thread for experimenting */ static class TransferThread extends Thread { InputStream in; //FileOutputStream out; Socket out; OutputStream stream; RobotPoint robotPoint; TransferThread(InputStream in, Socket out, RobotPoint robotPoint) { this.in = in; this.out = out; this.robotPoint = robotPoint; //out = robotPoint.getMediaSockert(); try{ stream = out.getOutputStream(); }catch (IOException ioe){ Log.e(TAG, "TransferThread: ", ioe); } } @Override public void run() { byte[] buf = new byte[8192]; int len; StringBuilder sb; short[] shorts; try { while ((len = in.read(buf)) > 0) { /* sb = new StringBuilder(); shorts = new short[buf.length/2]; ByteBuffer.wrap(buf).order(ByteOrder.BIG_ENDIAN).asShortBuffer().get(shorts); for (short s : shorts) { sb.append(String.valueOf(s)); } Log.d(TAG, "run: " + String.valueOf(len)); Log.d(TAG, sb.toString()); */ sb = new StringBuilder(); for (byte b : buf) { sb.append(String.format("%02X", b)); sb.append(" "); } Log.d(TAG, sb.toString()); stream.write(buf, 0, len); //out.write(buf, 0, len); } in.close(); stream.flush(); stream.close(); //out.flush(); //out.getFD().sync(); out.close(); } catch (IOException e) { Log.e(getClass().getSimpleName(), "Exception transferring file", e); } } } //endregion }