Android实现屏幕录制功能

本文实例为大家分享了Android实现屏幕录制功能的具体代码,供大家参考,具体内容如下

1.效果图:

2.添加依赖

dependencies {
 implementation fileTree(dir: 'libs', include: ['*.jar'])
 implementation"org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
 implementation 'androidx.appcompat:appcompat:1.1.0'
 implementation 'androidx.core:core-ktx:1.0.2'
 implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
 testImplementation 'junit:junit:4.12'
 androidTestImplementation 'androidx.test.ext:junit:1.1.1'
 androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
 api 'com.blankj:utilcode:1.24.4'
}
repositories {
 mavenCentral()
}

3.注册权限:

<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />

4.主界面,

test.aac是录屏的时候配的音乐,可以随便找另外一个放到assets文件夹里面进行替换

package com.ufi.pdioms.ztkotlin

import android.content.Intent
import android.content.res.AssetFileDescriptor
import android.media.MediaPlayer
import android.os.Build
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.util.Log
import android.widget.Toast
import com.blankj.utilcode.util.PathUtils
import kotlinx.android.synthetic.main.activity_main.*

class MainActivity : AppCompatActivity() {
 // https://github.com/fanqilongmoli/AndroidScreenRecord
 private var screenRecordHelper: ScreenRecordHelper? = null
 private val afdd:AssetFileDescriptor by lazy { assets.openFd("test.aac") }

 override fun onCreate(savedInstanceState: Bundle?) {
  super.onCreate(savedInstanceState)
  setContentView(R.layout.activity_main)

  btnStart.setOnClickListener {
   if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
    if (screenRecordHelper == null) {
     screenRecordHelper = ScreenRecordHelper(this, object : ScreenRecordHelper.OnVideoRecordListener {
      override fun onBeforeRecord() {
      }

      override fun onStartRecord() {
       play()
      }

      override fun onCancelRecord() {
       releasePlayer()
      }

      override fun onEndRecord() {
       releasePlayer()
      }

     }, PathUtils.getExternalStoragePath() + "/fanqilong")
    }
    screenRecordHelper?.apply {
     if (!isRecording) {
      // 如果你想录制音频(一定会有环境音量),你可以打开下面这个限制,并且使用不带参数的 stopRecord()
//      recordAudio = true
      startRecord()
     }
    }
   } else {
    Toast.makeText(this@MainActivity.applicationContext, "sorry,your phone does not support recording screen", Toast.LENGTH_LONG).show()
   }
  }

  btnStop.setOnClickListener {
   if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
    screenRecordHelper?.apply {
     if (isRecording) {
      if (mediaPlayer != null) {
       // 如果选择带参数的 stop 方法,则录制音频无效
       stopRecord(mediaPlayer!!.duration.toLong(), 15 * 1000, afdd)
      } else {
       stopRecord()
      }
     }
    }
   }
  }
 }

 private fun play() {
  mediaPlayer = MediaPlayer()
  try {
   mediaPlayer?.apply {
    this.reset()
    this.setDataSource(afdd.fileDescriptor, afdd.startOffset, afdd.length)
    this.isLooping = true
    this.prepare()
    this.start()
   }
  } catch (e: Exception) {
   Log.d("fanqilong", "播放音乐失败")
  } finally {

  }
 }

 // 音频播放
 private var mediaPlayer: MediaPlayer? = null

 private fun releasePlayer() {
  mediaPlayer?.apply {
   stop()
   release()
  }
  mediaPlayer = null
 }

 override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
  super.onActivityResult(requestCode, resultCode, data)
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && data != null) {
   screenRecordHelper?.onActivityResult(requestCode, resultCode, data)
  }
 }

 override fun onDestroy() {
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
   screenRecordHelper?.clearAll()
  }
  afdd.close()
  super.onDestroy()
 }
}

5.录屏代码

package com.ufi.pdioms.ztkotlin

import android.app.Activity
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.content.res.AssetFileDescriptor
import android.hardware.display.DisplayManager
import android.hardware.display.VirtualDisplay
import android.media.*
import android.media.projection.MediaProjection
import android.media.projection.MediaProjectionManager
import android.net.Uri
import android.os.Build
import android.os.Environment
import android.os.Handler
import android.util.DisplayMetrics
import android.util.Log
import android.widget.Toast
import androidx.annotation.RequiresApi
import com.blankj.utilcode.constant.PermissionConstants
import com.blankj.utilcode.util.PermissionUtils
import java.io.File
import java.lang.Exception
import java.nio.ByteBuffer

@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
class ScreenRecordHelper @JvmOverloads constructor(
 private var activity: Activity,
 private val listener: OnVideoRecordListener?,
 private var savePath: String = Environment.getExternalStorageDirectory().absolutePath + File.separator
   + "DCIM" + File.separator + "Camera",
 private val saveName: String = "record_${System.currentTimeMillis()}"
) {

 private val mediaProjectionManager by lazy { activity.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as? MediaProjectionManager }
 private var mediaRecorder: MediaRecorder? = null
 private var mediaProjection: MediaProjection? = null
 private var virtualDisplay: VirtualDisplay? = null
 private val displayMetrics by lazy { DisplayMetrics() }
 private var saveFile: File? = null
 var isRecording = false
 var recordAudio = false

 init {
  activity.windowManager.defaultDisplay.getMetrics(displayMetrics)
 }

 companion object {
  private const val VIDEO_FRAME_RATE = 30
  private const val REQUEST_CODE = 1024
  private const val TAG = "ScreenRecordHelper"
 }

 fun startRecord() {
  if (mediaProjectionManager == null) {
   Log.d(TAG, "mediaProjectionManager == null,当前手机暂不支持录屏")
   showToast(R.string.phone_not_support_screen_record)
   return
  }

  PermissionUtils.permission(PermissionConstants.STORAGE, PermissionConstants.MICROPHONE)
   .callback(object : PermissionUtils.SimpleCallback {
    override fun onGranted() {
     mediaProjectionManager?.apply {
      listener?.onBeforeRecord()
      val intent = this.createScreenCaptureIntent()
      if (activity.packageManager.resolveActivity(
        intent,
        PackageManager.MATCH_DEFAULT_ONLY
       ) != null
      ) {
       activity.startActivityForResult(intent, REQUEST_CODE)
      } else {
       showToast(R.string.phone_not_support_screen_record)
      }
     }
    }

    override fun onDenied() {
     showToast(R.string.permission_denied)
    }

   }).request()
 }

 @RequiresApi(Build.VERSION_CODES.N)
 fun resume() {
  mediaRecorder?.resume()
 }

 @RequiresApi(Build.VERSION_CODES.N)
 fun pause() {
  mediaRecorder?.pause()
 }

 fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent) {
  if (requestCode == REQUEST_CODE) {
   if (resultCode == Activity.RESULT_OK) {
    mediaProjection = mediaProjectionManager!!.getMediaProjection(resultCode, data)

    // 部分手机录制视频的时候 会出现弹框
    Handler().postDelayed({
     if (initRecorder()) {
      isRecording = true
      mediaRecorder?.start()
      listener?.onStartRecord()
     } else {
      showToast(R.string.phone_not_support_screen_record)
     }
    }, 150)
   } else {
    showToast(R.string.phone_not_support_screen_record)
   }
  }
 }

 fun cancelRecord(){
  stopRecord()
  saveFile?.delete()
  saveFile = null
  listener?.onCancelRecord()
 }

 fun stopRecord(videoDuration: Long = 0, audioDuration: Long = 0, afdd: AssetFileDescriptor? = null){
  stop()
  if (audioDuration != 0L && afdd != null) {
   syntheticAudio(videoDuration, audioDuration, afdd)
  } else {
   // saveFile
   if (saveFile != null) {
    val newFile = File(savePath, "$saveName.mp4")
    // 录制结束后修改后缀为 mp4
    saveFile!!.renameTo(newFile)
    refreshVideo(newFile)
   }
   saveFile = null
  }
 }

 private fun refreshVideo(newFile: File) {
  Log.d(TAG, "screen record end,file length:${newFile.length()}.")
  if (newFile.length() > 5000) {
   val intent = Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE)
   intent.data = Uri.fromFile(newFile)
   activity.sendBroadcast(intent)
   Log.e("TAG","refreshVideo: "+savePath)
   showToast(R.string.save_to_album_success)
  } else {
   newFile.delete()
   showToast(R.string.phone_not_support_screen_record)
   Log.d(TAG, activity.getString(R.string.record_faild))
  }
 }

 private fun stop() {
  if (isRecording) {
   isRecording = false
   try {
    mediaRecorder?.apply {
     setOnErrorListener(null)
     setOnInfoListener(null)
     setPreviewDisplay(null)
     stop()
     Log.d(TAG, "stop success")
    }
   } catch (e: Exception) {
    Log.e(TAG, "stopRecorder() error!${e.message}")
   } finally {
    mediaRecorder?.reset()
    virtualDisplay?.release()
    mediaProjection?.stop()
    listener?.onEndRecord()
   }

  }
 }

 private fun initRecorder(): Boolean {
  var result = true
  val f = File(savePath)
  if (!f.exists()) {
   f.mkdir()
  }
  saveFile = File(savePath, "$saveName.tmp")
  saveFile?.apply {
   if (exists()) {
    delete()
   }
  }
  mediaRecorder = MediaRecorder()
  val width = Math.min(displayMetrics.widthPixels, 1080)
  val height = Math.min(displayMetrics.heightPixels, 1920)
  mediaRecorder?.apply {
   if (recordAudio) {
    setAudioSource(MediaRecorder.AudioSource.MIC)
   }
   setVideoSource(MediaRecorder.VideoSource.SURFACE)
   setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
   setVideoEncoder(MediaRecorder.VideoEncoder.H264)
   if (recordAudio) {
    setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB)
   }
   setOutputFile(saveFile!!.absolutePath)
   setVideoSize(width, height)
   setVideoEncodingBitRate(8388608)
   setVideoFrameRate(VIDEO_FRAME_RATE)

   try {

    prepare()
    virtualDisplay = mediaProjection?.createVirtualDisplay(
     "MainScreen", width, height, displayMetrics.densityDpi,
     DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, surface, null, null
    )
    Log.d(TAG, "initRecorder 成功")
   } catch (e: Exception) {
    Log.e(TAG, "IllegalStateException preparing MediaRecorder: ${e.message}")
    e.printStackTrace()
    result = false
   }
  }

  return result
 }

 private fun showToast(resId: Int) {
  Toast.makeText(activity.applicationContext, activity.applicationContext.getString(resId), Toast.LENGTH_SHORT)
   .show()
 }

 fun clearAll() {
  mediaRecorder?.release()
  mediaRecorder = null
  virtualDisplay?.release()
  virtualDisplay = null
  mediaProjection?.stop()
  mediaProjection = null
 }

 /**
  * https://stackoverflow.com/questions/31572067/android-how-to-mux-audio-file-and-video-file
  */
 private fun syntheticAudio(audioDuration: Long, videoDuration: Long, afdd: AssetFileDescriptor) {
  Log.d(TAG, "start syntheticAudio")
  val newFile = File(savePath, "$saveName.mp4")
  if (newFile.exists()) {
   newFile.delete()
  }
  try {
   newFile.createNewFile()
   val videoExtractor = MediaExtractor()
   videoExtractor.setDataSource(saveFile!!.absolutePath)
   val audioExtractor = MediaExtractor()
   afdd.apply {
    audioExtractor.setDataSource(fileDescriptor, startOffset, length * videoDuration / audioDuration)
   }
   val muxer = MediaMuxer(newFile.absolutePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
   videoExtractor.selectTrack(0)
   val videoFormat = videoExtractor.getTrackFormat(0)
   val videoTrack = muxer.addTrack(videoFormat)

   audioExtractor.selectTrack(0)
   val audioFormat = audioExtractor.getTrackFormat(0)
   val audioTrack = muxer.addTrack(audioFormat)

   var sawEOS = false
   var frameCount = 0
   val offset = 100
   val sampleSize = 1000 * 1024
   val videoBuf = ByteBuffer.allocate(sampleSize)
   val audioBuf = ByteBuffer.allocate(sampleSize)
   val videoBufferInfo = MediaCodec.BufferInfo()
   val audioBufferInfo = MediaCodec.BufferInfo()

   videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC)
   audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC)

   muxer.start()

   // 每秒多少帧
   // 实测 OPPO R9em 垃圾手机,拿出来的没有 MediaFormat.KEY_FRAME_RATE
   val frameRate = if (videoFormat.containsKey(MediaFormat.KEY_FRAME_RATE)) {
    videoFormat.getInteger(MediaFormat.KEY_FRAME_RATE)
   } else {
    31
   }
   // 得出平均每一帧间隔多少微妙
   val videoSampleTime = 1000 * 1000 / frameRate
   while (!sawEOS) {
    videoBufferInfo.offset = offset
    videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset)
    if (videoBufferInfo.size < 0) {
     sawEOS = true
     videoBufferInfo.size = 0
    } else {
     videoBufferInfo.presentationTimeUs += videoSampleTime
     videoBufferInfo.flags = videoExtractor.sampleFlags
     muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo)
     videoExtractor.advance()
     frameCount++
    }
   }
   var sawEOS2 = false
   var frameCount2 = 0
   while (!sawEOS2) {
    frameCount2++
    audioBufferInfo.offset = offset
    audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset)

    if (audioBufferInfo.size < 0) {
     sawEOS2 = true
     audioBufferInfo.size = 0
    } else {
     audioBufferInfo.presentationTimeUs = audioExtractor.sampleTime
     audioBufferInfo.flags = audioExtractor.sampleFlags
     muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo)
     audioExtractor.advance()
    }
   }
   muxer.stop()
   muxer.release()
   videoExtractor.release()
   audioExtractor.release()

   // 删除无声视频文件
   saveFile?.delete()
  } catch (e: Exception) {
   Log.e(TAG, "Mixer Error:${e.message}")
   // 视频添加音频合成失败,直接保存视频
   saveFile?.renameTo(newFile)

  } finally {
   afdd.close()
   Handler().post {
    refreshVideo(newFile)
    saveFile = null
   }
  }
 }

 interface OnVideoRecordListener {

  /**
   * 录制开始时隐藏不必要的UI
   */
  fun onBeforeRecord()

  /**
   * 开始录制
   */
  fun onStartRecord()

  /**
   * 取消录制
   */
  fun onCancelRecord()

  /**
   * 结束录制
   */
  fun onEndRecord()
 }
}

6.布局

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
 xmlns:app="http://schemas.android.com/apk/res-auto"
 xmlns:tools="http://schemas.android.com/tools"
 android:layout_width="match_parent"
 android:layout_height="match_parent"
 android:orientation="vertical"
 tools:context=".MainActivity">

 <Button android:id="@+id/btnStart"
  android:layout_width="match_parent"
  android:layout_height="wrap_content"
  android:textAllCaps="false"
  android:text="start"/>

 <Button android:id="@+id/btnStop"
  android:layout_width="match_parent"
  android:layout_height="wrap_content"
  android:textAllCaps="false"
  android:text="stop"/>

</LinearLayout>

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持我们。

(0)

相关推荐

  • Android5.1 取消录制屏幕跳出的权限对话框问题

    当录制屏幕时总是跳出一个对话框让确定是否录制,总是会引起不必要的麻烦,于是找到启动获取屏幕信息的代码 Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent(); startActivityForResult(captureIntent, REQUEST_CODE); 找到createScreenCaptureIntent()方法 在源码中找到 frameworks/base/packages/System

  • Android 录制手机屏幕视频生成GIF图片实例详解

    Android 录制手机屏幕视频生成GIF图片实例详解 无图无真相,在我们日常的网络交流中往往需要给交流对象提供直观的显示,而视频是一个很好的方式,但是视频需要播放器,还需要当做文件进行对点传输,并不是很方便.想CSDN这样的博客网站也并不支持在博客里放视频这种方式,除非你贴外链,太烦了不是么.最好是如下图这种gif方式,直观 今天来教大家一个易操作的录制方式.当然,一般只适合Android开发者.因为你需要有AndroidStudio 工具 AndroidStudio(完成手机屏幕的视频录制,

  • Android 5.0+ 屏幕录制实现的示例代码

    前言 Android 从 4.0 开始就提供了手机录屏方法,但是需要 root 权限,比较麻烦不容易实现.但是从 5.0 开始,系统提供给了 app 录制屏幕的一系列方法,不需要 root 权限,只需要用户授权即可录屏,相对来说较为简单.本文是在参考了网络上其他录屏资料后完成的, 感谢 .以下将介绍开发录屏功能的一系列步骤以及实现过程中所遇到的一些需要注意的事项. 实现步骤 1.在清单文件中声明需要的权限 因为录制用到麦克风,所以需要加上 AUDIO 权限, <uses-permission a

  • Android实现屏幕录制功能

    本文实例为大家分享了Android实现屏幕录制功能的具体代码,供大家参考,具体内容如下 1.效果图: 2.添加依赖 dependencies { implementation fileTree(dir: 'libs', include: ['*.jar']) implementation"org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" implementation 'androidx.appcompat:appcompa

  • Python实现屏幕录制功能的代码

    前段时间做视频时需要演示电脑端的操作,因此要用到屏幕录制,下载了个迅捷屏幕录制,但是没有vip录制的视频有水印且只能录制二分钟,于是鄙人想了下能不能通过万能的python来实现呢?经过一晚上的尝试发现这条路是可以走的通的.分享一下自己的想法,整体思路是PIL模块中的ImageGrab不停的获得当前屏幕,利用opencv写入视频流话不多说,直接上代码,有什么更好的建议,欢迎大家交流! """python + opencv 实现屏幕录制_by-_Zjh_""&

  • 只用20行Python代码实现屏幕录制功能

    一.模块安装 首先,我们需要用到两个python的两个模块,win32gui和PyQt5 1.pip install win32gui 2.pip install PyQt5 1.pip install win32gui PS C:\Users\lex\Desktop> pip install win32gui Looking in indexes: http://mirrors.aliyun.com/pypi/simple Requirement already satisfied: win3

  • Android 微信小视频录制功能实现详细介绍

    Android 微信小视频录制功能 开发之前 这几天接触了一下和视频相关的控件, 所以, 继之前的微信摇一摇, 我想到了来实现一下微信小视频录制的功能, 它的功能点比较多, 我每天都抽出点时间来写写, 说实话, 有些东西还是比较费劲, 希望大家认真看看, 说得不对的地方还请大家在评论中指正. 废话不多说, 进入正题. 开发环境 最近刚更新的, 没更新的小伙伴们抓紧了 Android Studio 2.2.2 JDK1.7 API 24 Gradle 2.2.2 相关知识点 视频录制界面 Surf

  • Android手机屏幕敲击解锁功能代码

    1.前言 现在市面上有不少Android手机支持敲击屏幕解锁,敲击屏幕解锁是一项很实用的功能,但一来只支持敲击屏幕,二来只能用于解锁或锁屏,再者我们应用层的开发者切不进去,完全无法玩起来.开发者,开发者,我们既然身为开发者何不搞点大新闻,那么这次我来教教各位如何用代码来实现手机的敲击识别,听起来是不是很有趣,有些跃跃欲试呢.事实上在ios上已经有实现这个功能的应用:Knock,一款敲击来解锁Mac电脑的应用,售价4.99美元,约为33人民币.有时候真想去做ios开发,可以开心的为自己的应用定价,

  • 详解Android应用开发中Scroller类的屏幕滑动功能运用

    今天给大家介绍下Android中滑屏功能的一个基本实现过程以及原理初探,最后给大家重点讲解View视图中scrollTo 与scrollBy这两个函数的区别 .   首先 ,我们必须明白在Android View视图是没有边界的,Canvas是没有边界的,只不过我们通过绘制特定的View时对Canvas对象进行了一定的操作,例如 : translate(平移).clipRect(剪切)等,以便达到我们的对该Canvas对象绘制的要求 ,我们可以将这种无边界的视图称为"视图坐标"----

  • Android利用手势完成屏幕密码锁功能

    本文实例为大家分享了Android画笔屏幕锁小程序,具有一定的参考价值,感兴趣的小伙伴们可以参考一下 1.如果使用GestureOverlayView,在xml配置文件中使用Android.gesture.GestureOverlayView,否则会报ClassNotFoundException 2.关于判断MEDIA_MOUNTED,API的解释:getExternalStorageState() returns MEDIA_MOUNTED if the media ispresent and

  • Android开发实现调节屏幕亮度功能

    本文实例讲述了Android开发实现调节屏幕亮度功能.分享给大家供大家参考,具体如下: 在很多app中进入二维码显示界面时会自动调整屏幕亮度,那么如何实现调节app的屏幕亮度呢?下面我来为大家介绍: 注:调节屏幕亮度的核心思想就是对安卓系统提供的ContentProvider进行操作 1.声明权限 需要允许用户修改系统配置 <uses-permission android:name="android.permission.CHANGE_CONFIGURATION"/> &l

  • Android使用MediaRecorder类实现视频和音频录制功能

    一.前期基础知识储备 Android提供了MediaRecorder这一个类来实现视频和音频的录制. 由官方配图可知,MediaRecorder用于录制视频时需要调用一系列的API来设置和录制相关的配置,而且调用方法的顺序是固定的,必须按照这个顺序进行API调用才能正确利用手机摄像头实现录像功能. 调用MediaRecorder的录像API顺序如下: 1)Open Camera - Use the Camera.open() to get an instance of the camera ob

随机推荐