位置: 编程技术 - 正文
推荐整理分享IOS为UNITY写插件/通信(unity iphone),希望有所帮助,仅作参考,欢迎阅读内容。
文章相关热门搜索词:ios unity,unity插件编写,unity ui 插件,unity写app,unity 编辑器插件,unity 编辑器插件,unity ui 插件,ios嵌入unity,内容如对您有帮助,希望把文章链接给更多的朋友!
直接将.h .m 文件拷贝导Unity 》 Plugs>IOS 下,不要存在子文件夹;录音,百度SDK需要的库文件,在Unity导出IOS工程后需要再添加
#import <Foundation/Foundation.h>
#import "XHVoiceRecordHelper.h"
#import "BDVRFileRecognizer.h"
// Max record Time
#define kVoiceRecorderTotalTime .0
#define API_KEY @"1pMskN3kIS6G8ccU4TKXiZ"
#define SECRET_KEY @"fglBlnpKo6Q6VDXBD4efZGwZkW"
@interface VoiceRecordPlug :NSObject<MVoiceRecognitionClientDelegate>
//@property (nonatomic, strong) XHVoiceRecordHelper *voiceRecordHelper;
void startVoiceRecord();
void stopVoicRecord();
-(void)start;
(void)autoInit;
@end
//
// VoiceRecordPlug.m
// VoiceRecord
//
// Created by bin.li on -3-3.
// Copyright (c) 年 bin.li. All rights reserved.
//
#import "VoiceRecordPlug.h"
static VoiceRecordPlug *plug =nil;
static XHVoiceRecordHelper *voiceRecordHelper;
@implementation VoiceRecordPlug
//- (id)init {
// NSLog(@"cute init..");
// self = [super init];
// plug = self;
// return self;
//}
(void)autoInit
{
plug = [[VoiceRecordPlugalloc] init];
}
-(void)start
{
startVoiceRecord();
}
void startVoiceRecord()
{
if (!plug) {
[VoiceRecordPlug autoInit];
}
NSLog(@"start record");
NSString *recorderPath = nil;
recorderPath = [[NSStringalloc] initWithFormat:@"%@/Documents/",NSHomeDirectory()];
recorderPath = [recorderPath stringByAppendingFormat:@"MySound.wav"];
if (!voiceRecordHelper)
{
NSLog(@"voiceRecordHelper init..");
voiceRecordHelper = [[XHVoiceRecordHelperalloc] init];
voiceRecordHelper.maxRecordTime =;
voiceRecordHelper.maxTimeStopRecorderCompletion = ^{
[voiceRecordHelperstopRecordingWithStopRecorderCompletion:^{
NSLog(@"");
finishRecord();
}];
};
voiceRecordHelper.peakPowerForChannel = ^(float peakPowerForChannel) {
// weakSelf.voiceRecordHUD.peakPower = peakPowerForChannel;
};
}
[voiceRecordHelper startRecordingWithPath:recorderPath StartRecorderCompletion:^{///录音存储
NSLog(@"");
// finishRecord();
}];
}
void stopVoicRecord()
{
NSLog(@"stop record");
finishRecord();
}
void finishRecord()
{
NSLog(@"cute finishRecord()..");
NSString *recorderPath = nil;
recorderPath = [[NSStringalloc] initWithFormat:@"%@/Documents/",NSHomeDirectory()];
recorderPath = [recorderPath stringByAppendingFormat:@"MySound.wav"];
[voiceRecordHelper stopRecordingWithStopRecorderCompletion:^{
NSLog(@"begin connect baidu.");
[[BDVoiceRecognitionClientsharedInstance] setApiKey:API_KEYwithSecretKey:SECRET_KEY];
BDVRFileRecognizer *fileRecognizer = [[BDVRFileRecognizeralloc] initFileRecognizerWithFilePath:recorderPathsampleRate:property:EVoiceRecognitionPropertyInputdelegate:plug];//将语言文件上传百度识别
int status = [fileRecognizer startFileRecognition];
NSLog(@"begin connect baidu. status = %d",status);
// if (status != EVoiceRecognitionStartWorking) {
// return;
// }
}];
}
void playRecord()
{
// voiceRecordHelper
}
#pragma mark - MVoiceRecognitionClientDelegate 语音识别工作状态通知
- (void)VoiceRecognitionClientWorkStatus:(int) aStatus obj:(id)aObj
{
NSLog(@"enter.. VoiceRecognitionClientWorkStatus");
switch (aStatus) {
caseEVoiceRecognitionClientWorkStatusFinish:
{
// 该状态表示语音识别服务器返回了最终结果,结果以数组的形式保存在 aObj对象中
// 接受到该消息时应当清空显示区域的文字以免重复
NSLog(@"EVoiceRecognitionClientWorkStatusFinish");
if ([[BDVoiceRecognitionClientsharedInstance] getRecognitionProperty] !=EVoiceRecognitionPropertyInput)
{
NSMutableArray *resultData = (NSMutableArray *)aObj;
NSMutableString *tmpString = [[NSMutableStringalloc] initWithString:@""];
// 获取识别候选词列表
for (int i=0; i<[resultDatacount]; i) {
[tmpString appendFormat:@"%@rn",[resultDataobjectAtIndex:i]]; }
NSLog(@"result: %@", tmpString);
} else {
NSMutableString *sentenceString = [[NSMutableStringalloc] initWithString:@""];
for (NSArray *resultin aObj)// 此时 aObj 是 array,result 也是 array
{
// 取每条候选结果的第一条,进行组合
// result 的元素是 dictionary,对应一个候选词和对应的可信度
NSDictionary *dic = [resultobjectAtIndex:0];
NSString *candidateWord = [[dicallKeys] objectAtIndex:0];
[sentenceString appendString:candidateWord];
}
NSLog(@"result: %@", sentenceString);
// UnitySendMessage("MainManager",uFun,[sentenceString UTF8String]);//与Unity通信 ,调用Unity 中的方法 uFun ,传参数 sentenceString
}
break;
}
caseEVoiceRecognitionClientWorkStatusFlushData:
{
// 该状态表示服务器返回了中间结果,如果想要将中间结果展示给用户(形成连续上屏的效果),
// 可以利用与该状态同时返回的数据,每当接到新的该类消息应当清空显示区域的文字以免重复
NSMutableString *tmpString = [[NSMutableStringalloc] initWithString:@""];
[tmpString appendFormat:@"%@",[aObjobjectAtIndex:0]];
NSLog(@"%@",tmpString);
break;
}
caseEVoiceRecognitionClientWorkStatusError:
{
NSLog(@"EVoiceRecognitionClientWorkStatusError");
}
default:
break;
}
}
- (void)VoiceRecognitionClientErrorStatus:(int) aStatus subStatus:(int)aSubStatus
{
NSLog(@"VoiceRecognitionClientErrorStatus");
NSString *str = @"";
switch (aStatus) {
caseEVoiceRecognitionClientErrorStatusNoSpeech:
str = @"你怎么不说话呀!";
break;
caseEVoiceRecognitionClientErrorStatusShort:
str = @"你说话声音太短啦!";
break;
caseEVoiceRecognitionClientErrorStatusChangeNotAvailable:
str = @"录音设备不可用啊!";
break;
caseEVoiceRecognitionClientErrorStatusIntrerruption:
str = @"录音中断咯!";
break;
caseEVoiceRecognitionClientErrorNetWorkStatusUnusable:
str = @"网络不可用啦!";
break;
caseEVoiceRecognitionClientErrorNetWorkStatusError:
str = @"网络发生错误啦!";
break;
caseEVoiceRecognitionClientErrorNetWorkStatusTimeOut:
str = @"请求超时咯!";
break;
default:
str = @" 请认真说话呀! ";
break;
}
NSLog(@"error = %@",str);
// UnitySendMessage("MainManager",ufnc,[str UTF8String]);
}
@end
Protocol Buffers(Protobuf)开发者指南---概览 欢迎来到protocolbuffers的开发者指南文档,protocolbuffers是一个与编程语言无关‘、系统平台无关、可扩展的结构化数据序列化/反序列化工具,适用于
python标准库之SocketServer 转载自:
Unity PlayerPrefs.DeleteAll 无效以及 PlayerPrefs文件保存位置 DeleteAll无效检查下设置的公司名称和项目名称是否是中午!!!!!改成英文即可各平台保存路径OnMacOSXPlayerPrefsarestoredin~/Library/Preferencesfolder,inafilenamedu
标签: unity iphone
本文链接地址:https://www.jiuchutong.com/biancheng/383541.html 转载请保留说明!上一篇:Unity NGUI——常见NGUI鼠标快捷操作(unity_jail)
下一篇:Protocol Buffers(Protobuf)开发者指南---概览
友情链接: 武汉网站建设