julioz / AudioCaptureSample

Sample project to test the AudioCapture API introduced in Android 10 (Q)

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

can't create file after recording

Playmaker0210 opened this issue · comments

My Android emulator doesn't show .pcm files of recorded audio after I press stop button. When I restart the emulator and check the directory, those files appeared but I can't open them. Here is my code to record screen with mediaProjection and record audio.

`

public class RecordingService extends Service {
private final IBinder mIBinder = new RecordingBinder();

private static final String TAG = "hung";
//    private static final int PERMISSION_RECORD_DISPLAY = 1;
private static final List<Resolution> RESOLUTIONS = new ArrayList<Resolution>() {{
    add(new Resolution(640, 360));
    add(new Resolution(960, 540));
    add(new Resolution(1366, 768));
    add(new Resolution(1600, 900));
}};
private static final int NOTIFICATION_ID = 123;
private int mScreenDensity;
private MediaProjectionManager mProjectionManager;
private int mDisplayWidth;
private int mDisplayHeight;
private boolean mScreenSharing;
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private MediaProjectionCallback mMediaProjectionCallback;
private MediaRecorder mMediaRecorder;
WindowManager mWindowManager;
private boolean mIsRecording = false;

private static final SparseIntArray ORIENTATIONS = new SparseIntArray();

static {
    ORIENTATIONS.append(Surface.ROTATION_0, 90);
    ORIENTATIONS.append(Surface.ROTATION_90, 0);
    ORIENTATIONS.append(Surface.ROTATION_180, 270);
    ORIENTATIONS.append(Surface.ROTATION_270, 180);
}


private Resolution mResolution;
private Intent mScreenCaptureIntent;
private int mScreenCaptureResultCode;
private AudioRecord audioRecord;
private Thread audioCaptureThread;
private static final int NUM_SAMPLES_PER_READ = 1024;
private static final int BYTES_PER_SAMPLE = 2; // 2 bytes since we hardcoded the PCM 16-bit format
private static final int BUFFER_SIZE_IN_BYTES = NUM_SAMPLES_PER_READ * BYTES_PER_SAMPLE;


public void prepareToRecording() {
    Log.d(TAG, "RecordingService: prepareToRecording()");
    initRecorder();
    shareScreen();
}

public void startRecording() {
    mIsRecording = true;
    startForeground(NOTIFICATION_ID, buildNotification());
    prepareToRecording();
    mVirtualDisplay = createVirtualDisplay();
    mMediaRecorder.start();
    startAudioCapture();
}

public void stopRecording() {
    audioCaptureThread.interrupt();
    try {
        audioCaptureThread.join();
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
    if(audioRecord != null) {
        audioRecord.stop();
        audioRecord.release();
        audioRecord = null;
    }
    mMediaRecorder.stop();
    mMediaRecorder.reset();
    Log.d(TAG, "Stopping Recording");
    stopScreenSharing();
    stopForeground(true);
}


public class RecordingBinder extends Binder {
    public RecordingService getService() {
        return RecordingService.this;
    }

}

public RecordingService() {
}

@Override
public IBinder onBind(Intent intent) {
    Log.d(TAG, "RecordingService: onBind()");
    mScreenCaptureIntent = intent.getParcelableExtra(Intent.EXTRA_INTENT);
    mScreenCaptureResultCode = mScreenCaptureIntent.getIntExtra(UiUtils.SCREEN_CAPTURE_INTENT_RESULT_CODE, UiUtils.RESULT_CODE_FAILED);
    Log.d(TAG, "onBind: " + mScreenCaptureIntent);
    return mIBinder;
}

@Override
public void onCreate() {
    super.onCreate();
    Log.d(TAG, "RecordingService: onCreate()");
    mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);

    DisplayMetrics metrics = new DisplayMetrics();
    mWindowManager.getDefaultDisplay().getMetrics(metrics);

    mMediaRecorder = new MediaRecorder();

    mScreenDensity = metrics.densityDpi;

    mProjectionManager =
            (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);

    //TOdo: chooose resolution and orientation
    mResolution = RESOLUTIONS.get(3);
    mDisplayWidth = mResolution.y;
    mDisplayHeight = mResolution.x;
}


private void stopScreenSharing() {
    Log.d(TAG, "RecordingService: stopScreenSharing()");
    mScreenSharing = false;
    if (mVirtualDisplay == null) {
        return;
    }
    mVirtualDisplay.release();
    destroyMediaProjection();
}

private void initRecorder() {
    Log.d(TAG, "RecordingService: initRecorder()");
    String timeStamp = new SimpleDateFormat("yyyy-MM-dd-HH-mm").format(new Date());
    try {
        mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
        mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
        mMediaRecorder.setOutputFile(Environment
                .getExternalStoragePublicDirectory(Environment
                        .DIRECTORY_DOWNLOADS) + "/SCREC-" + timeStamp + ".mp4");
        mMediaRecorder.setVideoSize(mDisplayWidth, mDisplayHeight);
        mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
        mMediaRecorder.setVideoEncodingBitRate(512 * 1000);
        mMediaRecorder.setVideoFrameRate(30);
        int rotation = mWindowManager.getDefaultDisplay().getRotation();
        int orientation = ORIENTATIONS.get(rotation + 90);
        mMediaRecorder.setOrientationHint(orientation);
        mMediaRecorder.prepare();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

private void shareScreen() {
    Log.d(TAG, "RecordingService: initRecorder()");
    mScreenSharing = true;
    if (mMediaProjection == null) {
        mMediaProjectionCallback = new MediaProjectionCallback();
        mMediaProjection = mProjectionManager.getMediaProjection(mScreenCaptureResultCode, mScreenCaptureIntent);
        mMediaProjection.registerCallback(mMediaProjectionCallback, null);
    }

}


private VirtualDisplay createVirtualDisplay() {
    Log.d(TAG, "RecordingService: createVirtualDisplay()");
    return mMediaProjection.createVirtualDisplay("ScreenSharingDemo",
            mDisplayWidth, mDisplayHeight, mScreenDensity,
            DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
            mMediaRecorder.getSurface(), null /*Callbacks*/, null /*Handler*/);
}

private void destroyMediaProjection() {
    Log.d(TAG, "RecordingService: destroyMediaProjection()");
    if (mMediaProjection != null) {
        mMediaProjection.unregisterCallback(mMediaProjectionCallback);
        mMediaProjection.stop();
        mMediaProjection = null;
    }
    Log.i(TAG, "MediaProjection Stopped");
}

private class MediaProjectionCallback extends MediaProjection.Callback {
    @Override
    public void onStop() {
        if (mIsRecording) {
            mIsRecording = false;
            mMediaRecorder.stop();
            mMediaRecorder.reset();
            Log.v(TAG, "Recording Stopped");
        }
        mMediaProjection = null;
        stopRecording();
    }
}

private static class Resolution {
    int x;
    int y;

    public Resolution(int x, int y) {
        this.x = x;
        this.y = y;
    }

    @Override
    public String toString() {
        return x + "x" + y;
    }
}

private Notification buildNotification() {
    NotificationCompat.Builder builder;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
        String channelId = "channel_01";
        CharSequence channelName = "Channel Name";
        int importance = android.app.NotificationManager.IMPORTANCE_DEFAULT;
        NotificationChannel notificationChannel = new NotificationChannel(channelId, channelName, importance);
        notificationChannel.setLightColor(Color.GREEN);
        notificationChannel.setLockscreenVisibility(Notification.VISIBILITY_PRIVATE);
        android.app.NotificationManager notificationManager = (android.app.NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
        assert notificationManager != null;
        notificationManager.createNotificationChannel(notificationChannel);
        builder = new NotificationCompat.Builder(this, channelId);
    } else {
        builder = new NotificationCompat.Builder(this);
    }

    Intent notificationIntent = new Intent(this, RecordingService.class);
    PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_IMMUTABLE);

    builder.setContentTitle("Recording in progress")
            .setContentText("Tap to open")
            .setSmallIcon(R.drawable.ic_rec)
            .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_rec))
            .setContentIntent(pendingIntent);

    return builder.build();
}

private void startAudioCapture() {
    AudioPlaybackCaptureConfiguration config = null;
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
        config = new AudioPlaybackCaptureConfiguration.Builder(mMediaProjection)
                .addMatchingUsage(AudioAttributes.USAGE_MEDIA)
                .addMatchingUsage(AudioAttributes.USAGE_UNKNOWN) // TODO provide UI options for inclusion/exclusion
                .build();
    }

    AudioFormat audioFormat = new AudioFormat.Builder()
            .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
            .setSampleRate(8000)
            .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
            .build();

    if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
        return;
    }
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
        audioRecord = new AudioRecord.Builder()
                .setAudioFormat(audioFormat)
                .setBufferSizeInBytes(BUFFER_SIZE_IN_BYTES)
                .setAudioPlaybackCaptureConfig(config)
                .build();
    }

    audioCaptureThread = new Thread(() -> {
        audioRecord.startRecording();
        File outputFile = createAudioFile();
        Log.d(TAG, "Created file for capture target: " + outputFile.getAbsolutePath());
        writeAudioToFile(outputFile);
    });
    audioCaptureThread.start();
}

private File createAudioFile() {
    File audioCapturesDirectory = new File(Environment
            .getExternalStoragePublicDirectory(Environment
                    .DIRECTORY_DOWNLOADS), "/AudioCaptures");
    if (!audioCapturesDirectory.exists()) {
        audioCapturesDirectory.mkdirs();
    }
    String timestamp = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss", Locale.US).format(new Date());
    String fileName = "Capture-" + timestamp + ".pcm";
    File res = new File(audioCapturesDirectory.getAbsolutePath() + "/" + fileName);
    Log.v(TAG, res.getAbsolutePath());
    return res;
}

private void writeAudioToFile(File outputFile) {
    try (FileOutputStream fileOutputStream = new FileOutputStream(outputFile)) {
        short[] capturedAudioSamples = new short[NUM_SAMPLES_PER_READ];

        while (!audioCaptureThread.isInterrupted()) {
            audioRecord.read(capturedAudioSamples, 0, NUM_SAMPLES_PER_READ);

            // This loop should be as fast as possible to avoid artifacts in the captured audio
            // You can uncomment the following line to see the capture samples but
            // that will incur a performance hit due to logging I/O.
            Log.d(TAG, "Audio samples captured: " + Arrays.toString(capturedAudioSamples));

            byte[] bytes = toByteArray(capturedAudioSamples);
            Log.v(TAG, "Bytes captured: " + Arrays.toString(bytes));
            fileOutputStream.write(bytes, 0, bytes.length);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    Log.d(TAG, "Audio capture finished for " + outputFile.getAbsolutePath() + ". File size is " + outputFile.length() + " bytes.");
}

private byte[] toByteArray(short[] shorts) {
    byte[] bytes = new byte[shorts.length * 2];
    for (int i = 0; i < shorts.length; i++) {
        bytes[i * 2] = (byte) (shorts[i] & 0x00FF);
        bytes[i * 2 + 1] = (byte) ((shorts[i] & 0xFF00) >> 8);
    }
    return bytes;
}

}

`