Unity 截屏 录屏 并且保存到 iOS相册

来源:互联网 发布:上海迈博软件 编辑:程序博客网 时间:2024/06/01 08:24

1. 先在Untiy中 建一个新的script 用来截屏 录屏后的地址 发送到iOS

using UnityEngine;using System.Runtime.InteropServices;public class UnitySendMessageToiOS : MonoBehaviour {[DllImport("__Internal")]private static extern void _TakeAPictureBtn(string path);[DllImport("__Internal")]private static extern void _RecordingBtn(string path);public static void SentPicturePathToiOS(string path){if (Application.platform != RuntimePlatform.OSXEditor) {_TakeAPictureBtn (path);}}public static void SentVideoPathToiOS(string path){if (Application.platform != RuntimePlatform.OSXEditor) {_RecordingBtn (path);}}}

2. 在 截屏  录屏结束  按钮点击事件 分别调用

    截屏功能实现,并且发送地址到iOS(无UI)

public void TakeAPictureBtnClick(){StartCoroutine (CaptureByCamera (CameraTrans, new Rect (0, 0, 1024, 768),m_OtherCameraPath));UnitySendMessageToiOS.SentPicturePathToiOS (m_OtherCameraPath);}
private IEnumerator  CaptureByCamera (Camera mCamera, Rect mRect, string mFileName){//等待渲染线程结束yield return new WaitForEndOfFrame ();//初始化RenderTexture   深度只能是【0、16、24】截不全图请修改RenderTexture mRender = new RenderTexture ((int)mRect.width, (int)mRect.height,16);//设置相机的渲染目标mCamera.targetTexture = mRender;//开始渲染mCamera.Render ();//激活渲染贴图读取信息RenderTexture.active = mRender;Texture2D mTexture = new Texture2D ((int)mRect.width, (int)mRect.height, TextureFormat.RGB24, false);//读取屏幕像素信息并存储为纹理数据mTexture.ReadPixels (mRect, 0, 0);//应用mTexture.Apply ();//释放相机,销毁渲染贴图mCamera.targetTexture = null;   RenderTexture.active = null; GameObject.Destroy (mRender);  //将图片信息编码为字节信息byte[] bytes = mTexture.EncodeToPNG ();  //保存System.IO.File.WriteAllBytes (mFileName, bytes);//需要展示次截图,可以返回截图//return mTexture;}



录屏方法实现 录屏使用ShareREC  (导入ShareREC-for-Unity3d 的包,AppKey Share网站注册)


using com.mob;using System;
void Start () {
ShareREC.registerApp("1dd5e013484a8");}

ShareREC.startRecoring();}public void StopRecordingBtnClick(){FinishedRecordEvent evt = new FinishedRecordEvent(recordFinishedHandler);ShareREC.stopRecording(evt);}{if (ex == null){UnitySendMessageToiOS.SentVideoPathToiOS (ShareREC.lastRecordingPath());//ShareREC.playLastRecording();}}3.继续Xcode里 新建类用来接收 Unity里发来的地址 并且保存到 iOS相册#import <AssetsLibrary/AssetsLibrary.h>@implementation ReceiveUnitySystemEvent- ( void ) imageSaved: ( UIImage *) image didFinishSavingWithError:( NSError *)error contextInfo: ( void *) contextInfo{ if (error != nil) { NSLog(@"有错误"); } else { NSLog(@"保存结束"); }}void _TakeAPictureBtn(char *path){ NSString *strReadAddr = [NSString stringWithUTF8String:path]; UIImage *img = [UIImage imageWithContentsOfFile:strReadAddr]; NSLog(@"%@",[NSString stringWithFormat:@"w:%f, h:%f", img.size.width, img.size.height]); ReceiveUnitySystemEvent * instance = [[ReceiveUnitySystemEvent alloc]init]; UIImageWriteToSavedPhotosAlbum(img, instance, @selector(imageSaved:didFinishSavingWithError:contextInfo:), nil);}void _RecordingBtn(char *path){ NSLog(@"存入视频地址%s-------------",path); if (path == NULL) { return; } NSString *videoPath = [NSString stringWithUTF8String:path]; ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; [library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:videoPath] completionBlock:^(NSURL *assetURL, NSError *error) { if (error) { NSLog(@"Save video fail:%@",error); } else { NSLog(@"Save video succeed."); } }];}@end

拖得时候 Copy 和 Creat group 都都选libicucore.dylib libz.1.2.5.dyliblibstdc++.dylib删除包, 重新拖一遍Build Phases - Compile Sources - JSONKit.m 改为 -fno-obj-arc
public void RecordingBtnClick(){
void recordFinishedHandler(Exception ex)
Unity端完工
#import "ReceiveUnitySystemEvent.h"
xcode 导入 ShareREC的包
必须添加的依赖库如下: 
'ShareREC/ShareREC+Ext.h' file not found 报错
JSONkit里  mrc报错的话
                                             
1 0
原创粉丝点击