//
var DEBUG = true;
//var TEST_DIR = "C:/ymd/temp/test_stt";  // null or folder name
var TEST_DIR = null;  // null or folder name
//----------------------------------------------

var HashMap = Java.type("java.util.HashMap");
var Properties = Java.type("java.util.Properties");
var JString = Java.type("java.lang.String");
var ByteArrayInputStream = Java.type("java.io.ByteArrayInputStream");
var FileOutputStream = Java.type("java.io.FileOutputStream");
var Pattern = Java.type("java.util.regex.Pattern");
var Integer = Java.type("java.lang.Integer");
var Thread = Java.type("java.lang.Thread");
var ByteBuffer = Java.type("java.nio.ByteBuffer");
var Objects = Java.type("java.util.Objects");
var DriverManager = Java.type("java.sql.DriverManager");	// ccs
var URL = Java.type("java.net.URL");						// ccs
var ByteArray = Java.type("byte[]");
var Runnable = Java.type("java.lang.Runnable");
var AudioFormat = Java.type("javax.sound.sampled.AudioFormat");
var NoteUtils = Java.type("com.brekeke.pbx.common.NoteUtils");
var PayloadInfo = Java.type("com.brekeke.tel.sip.stack.PayloadInfo");
var UlawUtil = Java.type("com.brekeke.sound.UlawUtil");
//var Manager = Java.type("com.brekeke.pbx.Manager");			// ccs
var HttpC = Java.type("com.brekeke.web.HttpC");				// ccs
var Dump = Java.type("com.brekeke.util.Dump");
var logger = Dump.getLogger();

// for GCP
var GoogleCredentials = null;
var FixedCredentialsProvider = null;
var ResponseObserver = null;
var SpeechSettingsV1 = null;
var SpeechClientV1 = null;
var RecognitionConfigV1 = null;
var RecognitionConfigV1_AudioEncoding = null;
var StreamingRecognitionConfigV1 = null;
var StreamingRecognizeRequestV1 = null;
var SpeechAdaptationV1 = null;
var SpeechSettingsV1p1beta1 = null;
var SpeechClientV1p1beta1 = null;
var RecognitionConfigV1p1beta1 = null;
var RecognitionConfigV1p1beta1_AudioEncoding = null;
var StreamingRecognitionConfigV1p1beta1 = null;
var StreamingRecognizeRequestV1p1beta1 = null;
var SpeechAdaptationV1p1beta1 = null;
var SpeechSettingsV2 = null;
var SpeechClientV2 = null;
var ExplicitDecodingConfigV2 = null;
var ExplicitDecodingConfigV2_AudioEncoding = null;
var RecognitionFeaturesV2 = null;
var RecognitionFeaturesV2_MultiChannelMode = null;
var RecognitionConfigV2 = null;
var StreamingRecognitionConfigV2 = null;
var StreamingRecognizeRequestV2 = null;
var StreamingRecognitionFeaturesV2 = null;
var SpeechAdaptationV2 = null;
var AdaptationPhraseSetV2= null;
var ByteString = null;

// for AWS
var AwsBasicCredentials = null;
var StaticCredentialsProvider = null;
var Region = null;
var LanguageCode = null;
var MediaEncoding = null;
var TranscribeStreamingAsyncClient = null;
var StartStreamTranscriptionResponseHandler = null;
var StartStreamTranscriptionRequest = null;
var AudioEvent = null;
var SdkBytes = null;
var Subscription = null;
var Publisher = null;

// for AmiVoice
var Wrp = null;
var WrpListener = null;
var AudioDispatcherFactory = null;
var RateTransposer = null;
var AudioProcessor = null;

var GCP_STREAMING_LIMIT = 290000;	// ~5 minutes
var gcp_currentSettings = {};
var GCP_V1_RETRY_ERROR_PATTERN = /^com\.google\.api\.gax\.rpc\.OutOfRangeException:.+Audio Timeout Error: Long duration elapsed without audio.*$/;
var GCP_V2_RETRY_ERROR_PATTERN = /^(com\.google\.api\.gax\.rpc\.OutOfRangeException:.+Audio Timeout Error: Long duration elapsed without audio.*|com\.google\.api\.gax\.rpc\.AbortedException:.+Stream timed out after receiving no more client requests.*)$/;
var GCP_IGNORE_ERROR_PATTERN = /^java\.util\.concurrent\.CancellationException: User cancelled stream.*$/;
var AWS_RETRY_ERROR_PATTERN = /^(java\.util\.concurrent\.(ExecutionException|CompletionException): )?software\.amazon\.awssdk\.services\.transcribestreaming\.model\.BadRequestException: Your request timed out because no new audio was received for 15 seconds.*$/;
var AMI_COMPLETE_PATTERN = /^INFO: stopped feeding data to WebSocket server$/;
var AMI_RETRY_ERROR_PATTERN = /^ERROR: (can't stop feeding data to WebSocket server \((timeout occurred|timeout occurred while recognizing audio data from client)\)|can't feed data to WebSocket server \(can't feed audio data to recognizer server\))$/;
var AMI_WS_URL_LOG = "wss://acp-api.amivoice.com/v1/";				// エンドポイントURL(ログ保存あり)
var AMI_WS_URL_NOLOG = "wss://acp-api.amivoice.com/v1/nolog/";		// エンドポイントURL(ログ保存なし)
var AMI_WS_CONNECTION_TIMEOUT = 5000;	// 接続タイムアウト(ms)
var AMI_WS_READ_TIMEOUT = 0;			// 読み出しタイムアウト(ms)
var CCS_STT_MODE_GCP = 1;
var CCS_STT_MODE_AWS = 2;
var CCS_STT_MODE_AMI = 3;
var STT_SETTING_NOTE = "stt_setting";
var setting_modified = 0;
var stt_currentSettings;

// assi = AudioStreamScriptInfo
function startAudio(assi) {
	if(DEBUG) {
		logger.info("[stt:startAudio (" + assi.handler.sid + ")] payload=" + assi.payload);
		if(assi.params instanceof HashMap) {
			logger.info("from ivr");
		}
		else if(!assi.params || (assi.params instanceof JString)) {
			logger.info("from call recording");
		}

		var ivrInfo = assi.handler.getIvrInfo();
		if(ivrInfo) {
			var propRec = ivrInfo.propRec;
			if(propRec) {
				logger.info("recording properties.");
				var ite = propRec.keySet().iterator();
				while(ite.hasNext()) {
					var key = ite.next();
					logger.info("\t" + key + "," + propRec.get(key));
				}
			}
		}
	}

	var ret = false;
	var tenant = "";
	var stt_service_type = "";
	var um = assi.handler.getUserManager();
	if(um) {
		tenant = um.tenant;
	}

	if(!assi.params || (assi.params instanceof JString)) {
		// 通話録音
		if(um) {
			// STT設定情報(ノート)を取得
			var stt_prop = getSttSetting(tenant);
			if(DEBUG) {
				if(stt_prop) {
					logger.info("stt properties.");
					var ite = stt_prop.keySet().iterator();
					while(ite.hasNext()) {
						var key = ite.next();
						logger.info("\t" + key + "," + stt_prop.get(key));
					}
				}
			}

			stt_service_type = um.prop.getProperty("rec.stt.service_type", "none");
			if(stt_prop) {
				stt_service_type = stt_prop.getProperty("rec.stt.service_type", "none");
			}
//			logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt_service_type=" + stt_service_type);

			var stt_info = {};
			stt_info.max_time = "";
			stt_info.sttdb_driver_class = "";
			stt_info.sttdb_url = "";
			stt_info.sttdb_user = "";
			stt_info.sttdb_password = "";
			var user = "";		// ccs
			var is_exec = false;
			var ivrInfo = assi.handler.getIvrInfo();
			if(ivrInfo) {
				var propRec = ivrInfo.propRec;
				if(propRec) {
					var condition_ptn_user = null;
					var condition_ptn_other_number = null;
					var condition_user = "";					// 音声認識条件(ユーザー)
					var condition_other_number = "";			// 音声認識条件(相手番号)
					var condition_setting = "";
					if(stt_prop) {
						// STT設定情報(ノート)がある場合
						condition_setting = stt_prop.getProperty("rec.stt.condition", "");
						stt_info.max_time = stt_prop.getProperty("rec.stt.max_time", "")
						stt_info.sttdb_driver_class = stt_prop.getProperty("rec.stt.sttdb_driver_class", "");
						stt_info.sttdb_url = stt_prop.getProperty("rec.stt.sttdb_url", "");
						stt_info.sttdb_user = stt_prop.getProperty("rec.stt.sttdb_user", "");
						stt_info.sttdb_password = stt_prop.getProperty("rec.stt.sttdb_password", "");
					}
					else {
						// STT設定情報(ノート)がない場合、テナント設定から取得
						condition_setting = um.prop.getProperty("rec.stt.condition", "");
						stt_info.max_time = um.prop.getProperty("rec.stt.max_time", "")
						stt_info.sttdb_driver_class = um.prop.getProperty("rec.stt.sttdb_driver_class", "");
						stt_info.sttdb_url = um.prop.getProperty("rec.stt.sttdb_url", "");
						stt_info.sttdb_user = um.prop.getProperty("rec.stt.sttdb_user", "");
						stt_info.sttdb_password = um.prop.getProperty("rec.stt.sttdb_password", "");
					}

					if(condition_setting) {
						var condition_setting_tmp = condition_setting.split(",");
						for(var i=0; i<condition_setting_tmp.length; i++) {
							var condition_param = condition_setting_tmp[i].split("=");
							if(condition_param.length == 2) {
								if(condition_param[0] == "user") {
									condition_user = condition_param[1];
									if(condition_user) {
										condition_ptn_user = Pattern.compile(condition_user);
									}
								}
								else if(condition_param[0] == "other_number") {
									condition_other_number = condition_param[1];
									if(condition_other_number) {
										condition_ptn_other_number = Pattern.compile(condition_other_number);
									}
								}
							}
						}
					}
//					logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt condition: user=" + condition_user + ", other number=" + condition_other_number);

//					var user = "";		// ccs
					var other_number = "";
					var from = propRec.getProperty("from", "");
					var to = propRec.getProperty("to", "");
					var call_type = propRec.getProperty("calltype", "");
					if(call_type == "i") {
						// 着信
						user = to;
						var match = from.match(/^sip:(.+)@.*$/);
						if(match) {
							other_number = match[1];
						}
					}
					else if(call_type == "o") {
						// 発信
						var match = from.match(/^sip:(.+)@.*$/);
						if(match) {
							user = to;
							other_number = match[1];
							var is_getroom = false;
							if(other_number.startsWith("*")) {
								// コールピックアップ発信の場合
								is_getroom = true;
							}
							else {
/*
								var ui = um.getUserInfo(other_number);
								if(ui) {
									if(ui.isRinggroup()) {
										// グループへの発信の場合
									}
								}
*/
							}

							var room_info_users = [];
							if(is_getroom) {
								// 通話IDからコールステータスを取得
								var cmd_ret = assi.handler.getService().sendCommandToPbx("getroominfo", tenant + " " + propRec.getProperty("rid", ""), null);
								var room_info = cmd_ret.split("\n");
								for(var i=0; i<room_info.length; i++) {
									if(room_info[i]) {
										// 最初はステータス情報なので除外
										if(i > 0) {
											var room_info_tmp = room_info[i].split(",");
											// 通話録音セッション、自セッションは除外
											if(!(/media-record/.test(room_info_tmp[2])) && (user != room_info_tmp[0])) {
//												logger.debug("number=" + room_info_tmp[0]);
												room_info_users.push(room_info_tmp[0]);
											}
										}
									}
								}
							}
						}
					}

					if(is_getroom) {
						var other_numbers = "";
						for(var i=0; i<room_info_users.length; i++) {
							if(i > 0) {
								other_numbers += "|";
							}
							other_numbers += room_info_users[i];
						}
						logger.debug("[stt:startAudio (" + assi.handler.sid + ")] user=" + user + ", other number=" + other_numbers);
					}
					else {
						logger.debug("[stt:startAudio (" + assi.handler.sid + ")] user=" + user + ", other number=" + other_number);
					}

					if(condition_ptn_user && condition_ptn_other_number) {
						// 音声認識条件：ユーザー、かつ、相手番号
						var mu = condition_ptn_user.matcher(user);

						if(is_getroom) {
							// コールステータスのユーザーで判定
							for(var i=0; i<room_info_users.length; i++) {
								var mo = condition_ptn_other_number.matcher(room_info_users[i]);
								if(mu.matches() && mo.matches()) {
									logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt condition match");
									is_exec = true;
									break;
								}
							}
						}
						else {
							var mo = condition_ptn_other_number.matcher(other_number);
							if(mo.matches()) {
								logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt condition match");
								is_exec = true;
							}
						}
					}
					else if(condition_ptn_user) {
						// 音声認識条件：ユーザー
						var mu = condition_ptn_user.matcher(user);
						if(mu.matches()) {
							logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt condition match");
							is_exec = true;
						}
					}
					else if(condition_ptn_other_number) {
						// 音声認識条件：相手番号
						if(is_getroom) {
							// コールステータスのユーザーで判定
							for(var i=0; i<room_info_users.length; i++) {
								var mo = condition_ptn_other_number.matcher(room_info_users[i]);
								if(mo.matches()) {
									logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt condition match");
									is_exec = true;
									break;
								}
							}
						}
						else {
							var mo = condition_ptn_other_number.matcher(other_number);
							if(mo.matches()) {
								logger.debug("[stt:startAudio (" + assi.handler.sid + ")] stt condition match");
								is_exec = true;
							}
						}
					}
				}
			}

			if(is_exec) {
				// 音声認識サービス毎の処理
				switch(stt_service_type) {
					case "gcp":		// GCP
						assi.params = new  HashMap();
						assi.params.put("stt.max_time", stt_info.max_time);
						assi.params.put("stt.sttdb_driver_class", stt_info.sttdb_driver_class);
						assi.params.put("stt.sttdb_url", stt_info.sttdb_url);
						assi.params.put("stt.sttdb_user", stt_info.sttdb_user);
						assi.params.put("stt.sttdb_password", stt_info.sttdb_password);
						// ccs start
						assi.params.put("stt.source", "call_record");
						assi.params.put("stt.tenant", tenant);
						assi.params.put("stt.user", user);
						assi.params.put("stt.other_number", other_number);
						assi.params.put("stt.rid" , propRec.getProperty("rid", ""));
						// ccs end

						if(stt_prop) {
							// STT設定情報(ノート)がある場合
							assi.params.put("stt_g.note.service_key", stt_prop.getProperty("stt_g.note_service_key", ""));
							assi.params.put("stt_g.version", stt_prop.getProperty("stt_g.version", ""));
							assi.params.put("stt_g.recognizer", stt_prop.getProperty("stt_g.recognizer", ""));
							assi.params.put("stt_g.language", stt_prop.getProperty("stt_g.language", ""));
							assi.params.put("stt_g.model", stt_prop.getProperty("stt_g.model", ""));
							assi.params.put("stt_g.phrase_set", stt_prop.getProperty("stt_g.phrase_set", ""));
							// ccs start
							assi.params.put("stt.ccs_enable", stt_prop.getProperty("rec.stt.ccs_enable", "false"));
							assi.params.put("stt.reportdb_driver_class", stt_prop.getProperty("rec.stt.reportdb_driver_class", ""));
							assi.params.put("stt.reportdb_url", stt_prop.getProperty("rec.stt.reportdb_url", ""));
							assi.params.put("stt.reportdb_user", stt_prop.getProperty("rec.stt.reportdb_user", ""));
							assi.params.put("stt.reportdb_password", stt_prop.getProperty("rec.stt.reportdb_password", ""));
							assi.params.put("stt.rfs_access", stt_prop.getProperty("rec.stt.rfs_access", "false"));
							assi.params.put("stt.record_app_url", stt_prop.getProperty("rec.stt.record_app_url", ""));
							// ccs end
						}
						else {
							// STT設定情報(ノート)がない場合、テナント設定から取得して設定
							assi.params.put("stt_g.note.service_key", um.prop.getProperty("stt_g.note_service_key", ""));
							assi.params.put("stt_g.version", um.prop.getProperty("stt_g.version", ""));
							assi.params.put("stt_g.recognizer", um.prop.getProperty("stt_g.recognizer", ""));
							assi.params.put("stt_g.language", um.prop.getProperty("stt_g.language", ""));
							assi.params.put("stt_g.model", um.prop.getProperty("stt_g.model", ""));
							assi.params.put("stt_g.phrase_set", um.prop.getProperty("stt_g.phrase_set", ""));
							// ccs start
							assi.params.put("stt.ccs_enable", um.prop.getProperty("rec.stt.ccs_enable", "false"));
							assi.params.put("stt.reportdb_driver_class", um.prop.getProperty("rec.stt.reportdb_driver_class", ""));
							assi.params.put("stt.reportdb_url", um.prop.getProperty("rec.stt.reportdb_url", ""));
							assi.params.put("stt.reportdb_user", um.prop.getProperty("rec.stt.reportdb_user", ""));
							assi.params.put("stt.reportdb_password", um.prop.getProperty("rec.stt.reportdb_password", ""));
							assi.params.put("stt.rfs_access", um.prop.getProperty("rec.stt.rfs_access", "false"));
							assi.params.put("stt.record_app_url", um.prop.getProperty("rec.stt.record_app_url", ""));
							// ccs end
						}

						ret = startAudio_g(assi, tenant);
						break;

					case "aws":		// AWS
						assi.params = new  HashMap();
						assi.params.put("stt.max_time", stt_info.max_time);
						assi.params.put("stt.sttdb_driver_class", stt_info.sttdb_driver_class);
						assi.params.put("stt.sttdb_url", stt_info.sttdb_url);
						assi.params.put("stt.sttdb_user", stt_info.sttdb_user);
						assi.params.put("stt.sttdb_password", stt_info.sttdb_password);
						// ccs start
						assi.params.put("stt.source", "call_record");
						assi.params.put("stt.tenant", tenant);
						assi.params.put("stt.user", user);
						assi.params.put("stt.other_number", other_number);
						assi.params.put("stt.rid" , propRec.getProperty("rid", ""));
						// ccs end

						if(stt_prop) {
							// STT設定情報(ノート)がある場合
							assi.params.put("stt_aws.access_key", stt_prop.getProperty("stt_aws.access_key", ""));
							assi.params.put("stt_aws.secret_key", stt_prop.getProperty("stt_aws.secret_key", ""));
							assi.params.put("stt_aws.region", stt_prop.getProperty("stt_aws.region", ""));
							assi.params.put("stt_aws.language", stt_prop.getProperty("stt_aws.language", ""));
							assi.params.put("stt_aws.vocabulary", stt_prop.getProperty("stt_aws.vocabulary", ""));
							// ccs start
							assi.params.put("stt.ccs_enable", stt_prop.getProperty("rec.stt.ccs_enable", "false"));
							assi.params.put("stt.reportdb_driver_class", stt_prop.getProperty("rec.stt.reportdb_driver_class", ""));
							assi.params.put("stt.reportdb_url", stt_prop.getProperty("rec.stt.reportdb_url", ""));
							assi.params.put("stt.reportdb_user", stt_prop.getProperty("rec.stt.reportdb_user", ""));
							assi.params.put("stt.reportdb_password", stt_prop.getProperty("rec.stt.reportdb_password", ""));
							assi.params.put("stt.rfs_access", stt_prop.getProperty("rec.stt.rfs_access", "false"));
							assi.params.put("stt.record_app_url", stt_prop.getProperty("rec.stt.record_app_url", ""));
							// ccs end
						}
						else {
							// STT設定情報(ノート)がない場合、テナント設定から取得して設定
							assi.params.put("stt_aws.access_key", um.prop.getProperty("stt_aws.access_key", ""));
							assi.params.put("stt_aws.secret_key", um.prop.getProperty("stt_aws.secret_key", ""));
							assi.params.put("stt_aws.region", um.prop.getProperty("stt_aws.region", ""));
							assi.params.put("stt_aws.language", um.prop.getProperty("stt_aws.language", ""));
							assi.params.put("stt_aws.vocabulary", um.prop.getProperty("stt_aws.vocabulary", ""));
							// ccs start
							assi.params.put("stt.ccs_enable", um.prop.getProperty("rec.stt.ccs_enable", "false"));
							assi.params.put("stt.reportdb_driver_class", um.prop.getProperty("rec.stt.reportdb_driver_class", ""));
							assi.params.put("stt.reportdb_url", um.prop.getProperty("rec.stt.reportdb_url", ""));
							assi.params.put("stt.reportdb_user", um.prop.getProperty("rec.stt.reportdb_user", ""));
							assi.params.put("stt.reportdb_password", um.prop.getProperty("rec.stt.reportdb_password", ""));
							assi.params.put("stt.rfs_access", um.prop.getProperty("rec.stt.rfs_access", "false"));
							assi.params.put("stt.record_app_url", um.prop.getProperty("rec.stt.record_app_url", ""));
							// ccs end
						}

						ret = startAudio_aws(assi, tenant);
						break;

					case "ami":		// AmiVoice
						assi.params = new  HashMap();
						assi.params.put("stt.max_time", stt_info.max_time);
						assi.params.put("stt.sttdb_driver_class", stt_info.sttdb_driver_class);
						assi.params.put("stt.sttdb_url", stt_info.sttdb_url);
						assi.params.put("stt.sttdb_user", stt_info.sttdb_user);
						assi.params.put("stt.sttdb_password", stt_info.sttdb_password);
						// ccs start
						assi.params.put("stt.source", "call_record");
						assi.params.put("stt.tenant", tenant);
						assi.params.put("stt.user", user);
						assi.params.put("stt.other_number", other_number);
						assi.params.put("stt.rid" , propRec.getProperty("rid", ""));
						// ccs end

						if(stt_prop) {
							// STT設定情報(ノート)がある場合
							assi.params.put("stt_ami.app_key", stt_prop.getProperty("stt_ami.app_key", ""));
							assi.params.put("stt_ami.save_log", stt_prop.getProperty("stt_ami.save_log", "true"));
							assi.params.put("stt_ami.engine", stt_prop.getProperty("stt_ami.engine", "-a2-ja-general"));
							assi.params.put("stt_ami.keep_filler", stt_prop.getProperty("stt_ami.keep_filler", "false"));
							assi.params.put("stt_ami.profile_id", stt_prop.getProperty("stt_ami.profile_id", ""));
							// ccs start
							assi.params.put("stt.ccs_enable", stt_prop.getProperty("rec.stt.ccs_enable", "false"));
							assi.params.put("stt.reportdb_driver_class", stt_prop.getProperty("rec.stt.reportdb_driver_class", ""));
							assi.params.put("stt.reportdb_url", stt_prop.getProperty("rec.stt.reportdb_url", ""));
							assi.params.put("stt.reportdb_user", stt_prop.getProperty("rec.stt.reportdb_user", ""));
							assi.params.put("stt.reportdb_password", stt_prop.getProperty("rec.stt.reportdb_password", ""));
							assi.params.put("stt.rfs_access", stt_prop.getProperty("rec.stt.rfs_access", "false"));
							assi.params.put("stt.record_app_url", stt_prop.getProperty("rec.stt.record_app_url", ""));
							// ccs end
						}
						else {
							// STT設定情報(ノート)がない場合、テナント設定から取得して設定
							assi.params.put("stt_ami.app_key", um.prop.getProperty("stt_ami.app_key", ""));
							assi.params.put("stt_ami.save_log", um.prop.getProperty("stt_ami.save_log", "true"));
							assi.params.put("stt_ami.engine", um.prop.getProperty("stt_ami.engine", "-a2-ja-general"));
							assi.params.put("stt_ami.keep_filler", um.prop.getProperty("stt_ami.keep_filler", "false"));
							assi.params.put("stt_ami.profile_id", um.prop.getProperty("stt_ami.profile_id", ""));
							// ccs start
							assi.params.put("stt.ccs_enable", um.prop.getProperty("rec.stt.ccs_enable", "false"));
							assi.params.put("stt.reportdb_driver_class", um.prop.getProperty("rec.stt.reportdb_driver_class", ""));
							assi.params.put("stt.reportdb_url", um.prop.getProperty("rec.stt.reportdb_url", ""));
							assi.params.put("stt.reportdb_user", um.prop.getProperty("rec.stt.reportdb_user", ""));
							assi.params.put("stt.reportdb_password", um.prop.getProperty("rec.stt.reportdb_password", ""));
							assi.params.put("stt.rfs_access", um.prop.getProperty("rec.stt.rfs_access", "false"));
							assi.params.put("stt.record_app_url", um.prop.getProperty("rec.stt.record_app_url", ""));
							// ccs end
						}

						ret = startAudio_ami(assi, tenant);
						break;

					default:
						break;
				}
			}
		}
	}
	else {
		// IVR
		if(assi.params.get("stt.service_type")) {
			stt_service_type = assi.params.get("stt.service_type");
		}

		// 音声認識サービス毎の処理
		switch(stt_service_type) {
			case "gcp":		// GCP
				ret = startAudio_g(assi, tenant);
				break;

			case "aws":		// AWS
				ret = startAudio_aws(assi, tenant);
				break;

			case "ami":		// AmiVoice
				ret = startAudio_ami(assi, tenant);
				break;

			default:
				break;
		}
	}

	return ret;
}

function startAudio_g(assi, tenant) {
	if(DEBUG) {
//		logger.info("[stt_g:startAudio] payload=" + assi.payload);
	}

	// Googleライブラリを設定
	setGcpLib();

	var ret = false;
	var is_error = false;
	var params = assi.params;

	var version = params.get("stt_g.version");						// バージョン
	if(!version) {
		logger.error("[stt_g:startAudio (" + assi.handler.sid + ")] No version");
		is_error = true;
	}

	if(!is_error) {
		var note_service_key = params.get("stt_g.note.service_key");	// サービスアカウントキー
		// テナントのノートが存在する場合は、そちらを使用
		if(tenant) {
			if(tenant != "-") {
				if(note_service_key) {
					if(NoteUtils.exists(tenant + "." + note_service_key)) {
						note_service_key = tenant + "." + note_service_key;
					}
				}
			}
		}

		// サービスアカウントキー(ノート)
		var l = NoteUtils.lastModified(note_service_key);
		if(l > 0) {
			var settings;
			// ノートが更新されていない、かつ、今回のバージョンと前回実行のバージョンが同じ場合はSpeechSettingsを再設定しない
			if(gcp_currentSettings[tenant] && (l == gcp_currentSettings[tenant].modified) &&
				(((version == "v1") && (gcp_currentSettings[tenant].settings instanceof SpeechSettingsV1)) ||
				((version == "v1p1beta1") && (gcp_currentSettings[tenant].settings instanceof SpeechSettingsV1p1beta1)) ||
				((version == "v2") && (gcp_currentSettings[tenant].settings instanceof SpeechSettingsV2)))) {
				settings = gcp_currentSettings[tenant].settings;
			}
			else {
				if(DEBUG) {
					logger.info("[stt_g:startAudio (" + assi.handler.sid + ")] SpeechSettings update");
				}
				var n = NoteUtils.read(note_service_key);
				var b = n.getBytes("UTF8");
				var is = new ByteArrayInputStream(b);	
				var credentials = GoogleCredentials.fromStream(is);
				switch(version) {
					case "v1":
						settings = SpeechSettingsV1.newBuilder()
							.setCredentialsProvider(FixedCredentialsProvider.create(credentials))
							.build();
						break;
					case "v1p1beta1":
						settings = SpeechSettingsV1p1beta1.newBuilder()
							.setCredentialsProvider(FixedCredentialsProvider.create(credentials))
							.build();
						break;
					case "v2":
						settings = SpeechSettingsV2.newBuilder()
							.setCredentialsProvider(FixedCredentialsProvider.create(credentials))
							.build();
						break;
					default:
						logger.error("[stt_g:startAudio (" + assi.handler.sid + ")] version is mismatch. version=" + version);
						break;
				}
				gcp_currentSettings[tenant] = { modified: l, settings: settings };
			}
			assi.obj = new HashMap();
			var pattern = params.get("result.pattern");								// 認識パターン
			if(pattern) {
				var ptn = Pattern.compile(pattern);
				assi.obj.put("result.pattern", ptn);
				var groupOnly = params.get("result.pattern.group_only");
				if(groupOnly) {
					assi.obj.put("result.pattern.group_only", true);
				}
			}
			assi.obj.put("stt.service_type", "gcp");								// 音声認識タイプ(GCP)
			assi.obj.put("stt_g.version", version);									// バージョン
			assi.obj.put("stt_g.recognizer", params.get("stt_g.recognizer"));		// 認識ツール
			var language = params.get("stt_g.language");							// 言語
			if(!language) {
				language = "ja-JP";
			}
			assi.obj.put("stt_g.language", language);
			var model = params.get("stt_g.model");									// モデル
			if(!model) {
				if(version == "v2") {
					model = "long";
				}
				else {
					model = "default";
				}
			}
			assi.obj.put("stt_g.model", model);
			var start_time = new Date().getTime();
			assi.obj.put("stt.start_time", start_time);								// 開始時刻(ms)
			assi.obj.put("stt.first_start_time", start_time);						// 初回開始時刻(ms)
			var timeout = params.get("stt.timeout");								// 音声認識タイムアウト(ms)
			if(timeout) {
				timeout = Number(timeout);
			}
			else {
				timeout = 0;
			}
			assi.obj.put("stt.timeout", timeout);
			assi.obj.put("stt_g.phrase_set",  params.get("stt_g.phrase_set"));		// フレーズセット
			assi.obj.put("stt.extend_timeout", params.get("stt.extend_timeout"));	// 延長音声認識タイムアウト(ms)
			var silent_detection_time = assi.params.get("stt.silent_detection_time");	// 無音検知時間(ms)
			if(silent_detection_time) {
				silent_detection_time = Number(silent_detection_time);
			}
			else {
				silent_detection_time = 0;
			}
			assi.obj.put("stt.silent_detection_time", silent_detection_time);
			// ccs start
			var stt_source = params.get("stt.source")
			assi.obj.put("stt.source", stt_source);
			if(stt_source == "call_record") {
				timeout = params.get("stt.max_time");								// 音声認識タイムアウト(ms)
				if(timeout) {
					timeout = Number(timeout);
					if(timeout >= 0) {
						timeout *= 1000;
					}
					else {
						timeout = 0;
					}
				}
				else {
					timeout = 0;
				}
				assi.obj.put("stt.timeout", timeout);

				assi.obj.put("stt.tenant", assi.params.get("stt.tenant"));				// テナント
				assi.obj.put("stt.user", assi.params.get("stt.user"));					// ユーザー
				assi.obj.put("stt.other_number", assi.params.get("stt.other_number"));	// 相手番号
				assi.obj.put("stt.rid", assi.params.get("stt.rid"));					// 通話ID
				try {
//					var pbx_con = DriverManager.getConnection("jdbc:hsqldb:hsql://localhost/stt", "sa", "");
					var pbx_con = DriverManager.getConnection(assi.params.get("stt.sttdb_url"), assi.params.get("stt.sttdb_user"), assi.params.get("stt.sttdb_password"));
					assi.obj.put("stt.pbx_db_connection", pbx_con);
				}
				catch(e) {
					logger.error("[stt_g:startAudio (" + assi.handler.sid + ")] pbx db connect error. " + e);
				}
				assi.obj.put("stt.rfs_access", assi.params.get("stt.rfs_access"));			// RFS連携有無
				assi.obj.put("stt.record_app_url", assi.params.get("stt.record_app_url"));	// RFS URL

				var ccs_enable = assi.params.get("stt.ccs_enable");					// CCS連携有無
				if(ccs_enable == "true") {
//					var service = Manager.getService();
//					var db = service.getDb("logdb");
//					var con = db.getConnection();
//					assi.obj.put("stt.db", db);
					try {
						var ccs_con = DriverManager.getConnection(assi.params.get("stt.reportdb_url"), assi.params.get("stt.reportdb_user"), assi.params.get("stt.reportdb_password"));
						assi.obj.put("stt.ccs_db_connection", ccs_con);
					}
					catch(e) {
						logger.error("[stt_g:startAudio (" + assi.handler.sid + ")] ccs db connect error. " + e);
					}
				}
			}
			// ccs end

			if(DEBUG) {
				logger.info("[stt_g:startAudio (" + assi.handler.sid + ")] note_service_account_key=" + note_service_key + ", version=" + version + ", recognizer=" + params.get("stt_g.recognizer") + ", language=" + language + ", model=" + model + ", phrase_set=" + params.get("stt_g.phrase_set"));
			}

			if(TEST_DIR) {
				fout = new FileOutputStream(TEST_DIR + "/stt_" + assi.handler.sid + ".ul");
				assi.obj.put("file_output", fout);
			}

			switch(version) {
				case "v1":
					ret = createClientStreamV1(assi, settings);
					break;
				case "v1p1beta1":
					ret = createClientStreamV1p1beta1(assi, settings);
					break;
				case "v2":
					ret = createClientStreamV2(assi, settings);
					break;
				default:
					break;
			}
		}
		else {
			logger.debug("[stt_g:startAudio (" + assi.handler.sid + ")] Note of service account key does not exist. note_service_key=" + note_service_key);
		}
	}

	return ret;
}

function startAudio_aws(assi, tenant) {
	if(DEBUG) {
//		logger.info("[stt_aws:startAudio] payload=" + assi.payload);
	}

	// AWSライブラリを設定
	setAwsLib();

	var ret = false;
	var params = assi.params;

	var access_key = params.get("stt_aws.access_key");		// アクセスキー
	var secret_key = params.get("stt_aws.secret_key");		// シークレットキー

	if(access_key && secret_key) {
		assi.obj = new HashMap();
		var credentials = AwsBasicCredentials.create(access_key, secret_key);	// 認証情報
		var provider = StaticCredentialsProvider.create(credentials);
		assi.obj.put("stt_aws.provider", provider);
		var pattern = params.get("result.pattern");								// 認識パターン
		if(pattern) {
			var ptn = Pattern.compile(pattern);
			assi.obj.put("result.pattern", ptn);
			var groupOnly = params.get("result.pattern.group_only");
			if(groupOnly) {
				assi.obj.put("result.pattern.group_only", true);
			}
		}
		assi.obj.put("stt.service_type", "aws");								// 音声認識タイプ(AWS)
		var setting_region = params.get("stt_aws.region");						// リージョン
		var region = null;
		if(!setting_region) {
			region = Region.AP_NORTHEAST_1
		}
		else {
			region = Region.of(setting_region);
		}
		assi.obj.put("stt_aws.region", region);
		var setting_language = params.get("stt_aws.language");					// 言語
		var language = null;
		if(!setting_language) {
			language = LanguageCode.JA_JP;
		}
		else {
			language = LanguageCode.fromValue(setting_language);
		}
		assi.obj.put("stt_aws.language", language);
		var start_time = new Date().getTime();
		assi.obj.put("stt.start_time", start_time);								// 開始時刻(ms)
		assi.obj.put("stt.first_start_time", start_time);						// 初回開始時刻(ms)
		var timeout = params.get("stt.timeout");								// 音声認識タイムアウト(ms)
		if(timeout) {
			timeout = Number(timeout);
		}
		else {
			timeout = 0;
		}
		assi.obj.put("stt.timeout", timeout);
		assi.obj.put("stt_aws.vocabulary",  params.get("stt_aws.vocabulary"));	// カスタム語彙
		assi.obj.put("stt.extend_timeout", params.get("stt.extend_timeout"));	// 延長音声認識タイムアウト(ms)
		var silent_detection_time = assi.params.get("stt.silent_detection_time");	// 無音検知時間(ms)
		if(silent_detection_time) {
			silent_detection_time = Number(silent_detection_time);
		}
		else {
			silent_detection_time = 0;
		}
		assi.obj.put("stt.silent_detection_time", silent_detection_time);
		// ccs start
		var stt_source = params.get("stt.source")
		assi.obj.put("stt.source", stt_source);
		if(stt_source == "call_record") {
			timeout = params.get("stt.max_time");								// 音声認識タイムアウト(ms)
			if(timeout) {
				timeout = Number(timeout);
				if(timeout >= 0) {
					timeout *= 1000;
				}
				else {
					timeout = 0;
				}
			}
			else {
				timeout = 0;
			}
			assi.obj.put("stt.timeout", timeout);

			assi.obj.put("stt.tenant", assi.params.get("stt.tenant"));				// テナント
			assi.obj.put("stt.user", assi.params.get("stt.user"));					// ユーザー
			assi.obj.put("stt.other_number", assi.params.get("stt.other_number"));	// 相手番号
			assi.obj.put("stt.rid", assi.params.get("stt.rid"));					// 通話ID
			try {
//				var pbx_con = DriverManager.getConnection("jdbc:hsqldb:hsql://localhost/stt", "sa", "");
				var pbx_con = DriverManager.getConnection(assi.params.get("stt.sttdb_url"), assi.params.get("stt.sttdb_user"), assi.params.get("stt.sttdb_password"));
				assi.obj.put("stt.pbx_db_connection", pbx_con);
			}
			catch(e) {
				logger.error("[stt_aws:startAudio (" + assi.handler.sid + ")] pbx db connect error. " + e);
			}
			assi.obj.put("stt.rfs_access", assi.params.get("stt.rfs_access"));			// RFS連携有無
			assi.obj.put("stt.record_app_url", assi.params.get("stt.record_app_url"));	// RFS URL

			var ccs_enable = assi.params.get("stt.ccs_enable");					// CCS連携有無
			if(ccs_enable == "true") {
				try {
					var ccs_con = DriverManager.getConnection(assi.params.get("stt.reportdb_url"), assi.params.get("stt.reportdb_user"), assi.params.get("stt.reportdb_password"));
					assi.obj.put("stt.ccs_db_connection", ccs_con);
				}
				catch(e) {
					logger.error("[stt_aws:startAudio (" + assi.handler.sid + ")] ccs db connect error. " + e);
				}
			}
		}
		// ccs end

		if(DEBUG) {
			logger.info("[stt_aws:startAudio (" + assi.handler.sid + ")] access_key=" + access_key + ", secret_key=" + secret_key + ", region=" + (region.metadata() ? region.metadata().id() : null) + ", language=" + language.toString() + ", vocabulary=" + params.get("stt_aws.vocabulary"));
		}

		if(TEST_DIR) {
			fout = new FileOutputStream(TEST_DIR + "/stt_" + assi.handler.sid + ".ul");
			assi.obj.put("file_output", fout);
		}

		ret = createClientStreamAws(assi);
	}
	else {
		logger.debug("[stt_aws:startAudio (" + assi.handler.sid + ")] access key or secret key does not exist. access_key=" + access_key + ", secret_key=" + secret_key);
	}

	return ret;
}

function startAudio_ami(assi, tenant) {
	if(DEBUG) {
//		logger.info("[stt_ami:startAudio] payload=" + assi.payload);
	}

	// AmiVoiceライブラリを設定
	setAmiLib();

	var ret = false;
	var params = assi.params;

	var app_key = params.get("stt_ami.app_key");		// APPキー
	if(app_key) {
		assi.obj = new HashMap();
		var pattern = params.get("result.pattern");								// 認識パターン
		if(pattern) {
			var ptn = Pattern.compile(pattern);
			assi.obj.put("result.pattern", ptn);
			var groupOnly = params.get("result.pattern.group_only");
			if(groupOnly) {
				assi.obj.put("result.pattern.group_only", true);
			}
		}
		assi.obj.put("stt.service_type", "ami");								// 音声認識タイプ(AmiVoice)
		var ami_url = AMI_WS_URL_LOG;											// エンドポイントURL
		if(params.get("stt_ami.save_log") == "false") {
			ami_url = AMI_WS_URL_NOLOG;
		}
		assi.obj.put("stt_ami.url", ami_url);
		assi.obj.put("stt_ami.app_key", app_key);								// APPキー
		assi.obj.put("stt_ami.engine", params.get("stt_ami.engine"));			// 音声認識エンジン
		var is_keep_filler = params.get("stt_ami.keep_filler");					// フィラーの自動削除抑制
		var keep_filler = "0";
		if(is_keep_filler == "true") {
			keep_filler = "1";
		}
		assi.obj.put("stt_ami.keep_filler", keep_filler);
		assi.obj.put("stt_ami.profile_id",  params.get("stt_ami.profile_id"));	// プロファイルID
		var start_time = new Date().getTime();
		assi.obj.put("stt.start_time", start_time);								// 開始時刻(ms)
		assi.obj.put("stt.first_start_time", start_time);						// 初回開始時刻(ms)
		var timeout = params.get("stt.timeout");								// 音声認識タイムアウト(ms)
		if(timeout) {
			timeout = Number(timeout);
		}
		else {
			timeout = 0;
		}
		assi.obj.put("stt.timeout", timeout);
		assi.obj.put("stt.extend_timeout", params.get("stt.extend_timeout"));	// 延長音声認識タイムアウト(ms)
		var silent_detection_time = assi.params.get("stt.silent_detection_time");	// 無音検知時間(ms)
		if(silent_detection_time) {
			silent_detection_time = Number(silent_detection_time);
		}
		else {
			silent_detection_time = 0;
		}
		assi.obj.put("stt.silent_detection_time", silent_detection_time);
		// ccs start
		var stt_source = params.get("stt.source")
		assi.obj.put("stt.source", stt_source);
		if(stt_source == "call_record") {
			timeout = params.get("stt.max_time");								// 音声認識タイムアウト(ms)
			if(timeout) {
				timeout = Number(timeout);
				if(timeout >= 0) {
					timeout *= 1000;
				}
				else {
					timeout = 0;
				}
			}
			else {
				timeout = 0;
			}
			assi.obj.put("stt.timeout", timeout);

			assi.obj.put("stt.tenant", assi.params.get("stt.tenant"));				// テナント
			assi.obj.put("stt.user", assi.params.get("stt.user"));					// ユーザー
			assi.obj.put("stt.other_number", assi.params.get("stt.other_number"));	// 相手番号
			assi.obj.put("stt.rid", assi.params.get("stt.rid"));					// 通話ID
			try {
				var pbx_con = DriverManager.getConnection(assi.params.get("stt.sttdb_url"), assi.params.get("stt.sttdb_user"), assi.params.get("stt.sttdb_password"));
				assi.obj.put("stt.pbx_db_connection", pbx_con);
			}
			catch(e) {
				logger.error("[stt_ami:startAudio (" + assi.handler.sid + ")] pbx db connect error. " + e);
			}
			assi.obj.put("stt.rfs_access", assi.params.get("stt.rfs_access"));			// RFS連携有無
			assi.obj.put("stt.record_app_url", assi.params.get("stt.record_app_url"));	// RFS URL

			var ccs_enable = assi.params.get("stt.ccs_enable");					// CCS連携有無
			if(ccs_enable == "true") {
				try {
					var ccs_con = DriverManager.getConnection(assi.params.get("stt.reportdb_url"), assi.params.get("stt.reportdb_user"), assi.params.get("stt.reportdb_password"));
					assi.obj.put("stt.ccs_db_connection", ccs_con);
				}
				catch(e) {
					logger.error("[stt_ami:startAudio (" + assi.handler.sid + ")] ccs db connect error. " + e);
				}
			}
		}
		// ccs end

		if(DEBUG) {
			logger.info("[stt_ami:startAudio (" + assi.handler.sid + ")] app_key=" + app_key + ", url=" + ami_url + ", engine=" + params.get("stt_ami.engine") + ", keep_filler=" + params.get("stt_ami.keep_filler") + ", profile_id=" + params.get("stt_ami.profile_id"));
		}

		if(TEST_DIR) {
			fout = new FileOutputStream(TEST_DIR + "/stt_" + assi.handler.sid + ".ul");
			assi.obj.put("file_output", fout);
		}

		ret = createClientStreamAmi(assi);
	}
	else {
		logger.debug("[stt_ami:startAudio (" + assi.handler.sid + ")] app key does not exist. app_key=" + app_key);
	}

	return ret;
}


function endAudio(assi) {
	if(DEBUG) {
		logger.info("[stt:endAudio (" + assi.handler.sid + ")]");
	}

	// 音声認識サービス毎の処理
	switch(assi.obj.get("stt.service_type")) {
		case "gcp":		// GCP
			endAudio_g(assi);
			break;

		case "aws":		// AWS
			endAudio_aws(assi);
			break;

		case "ami":		// AmiVoice
			endAudio_ami(assi);
			break;

		default:
			break;
	}
}

function endAudio_g(assi) {
	if(DEBUG) {
//		logger.info("[stt_g:endAudio]");
	}

	// ccs start
	// 録音中断した状態で通話を終了した場合、onCompleteが来ないため、ここでDBの後処理を行う
	if(assi.obj.get("restart")) {
		// RFSに音声認識結果をアップロード
		uploadToRfs(assi);

		var pbx_con = assi.obj.get("stt.pbx_db_connection");
		if(pbx_con) {
			if(DEBUG) {
				logger.info("[stt_g:endAudio (" + assi.handler.sid + ")] pbx db close");
			}
			pbx_con.close();
		}
		var ccs_con = assi.obj.get("stt.ccs_db_connection");
		if(ccs_con) {
			if(DEBUG) {
				logger.info("[stt_g:endAudio (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
			}
			var current_time = new Date().getTime();
			// 最終レコードをDBに追加
			insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
			ccs_con.close();
		}
	}
	// ccs end

	var clientStream = assi.obj.get("clientStream");
	var client = assi.obj.get("client");

	if(TEST_DIR) {
		var fout = assi.obj.get("file_output");
		if(fout) {
			try {
				fout.flush();
				fout.close();
			}
			catch(e) {}
		}
	}

	try {
		clientStream.closeSend();
	}
	catch(e) {}
	client.close();
}

function endAudio_aws(assi) {
	if(DEBUG) {
//		logger.info("[stt_aws:endAudio]");
	}

	// ccs start
	// 録音中断した状態で通話を終了した場合、onCompleteが来ないため、ここでDBの後処理を行う
	if(assi.obj.get("restart")) {
		// RFSに音声認識結果をアップロード
		uploadToRfs(assi);

		var pbx_con = assi.obj.get("stt.pbx_db_connection");
		if(pbx_con) {
			if(DEBUG) {
				logger.info("[stt_aws:endAudio (" + assi.handler.sid + ")] pbx db close");
			}
			pbx_con.close();
		}
		var ccs_con = assi.obj.get("stt.ccs_db_connection");
		if(ccs_con) {
			if(DEBUG) {
				logger.info("[stt_aws:endAudio (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
			}
			var current_time = new Date().getTime();
			// 最終レコードをDBに追加
			insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_AWS, 0, null, null, current_time, current_time);
			ccs_con.close();
		}
	}
	// ccs end

	var subscriber = assi.obj.get("audio_stream_publisher").get("subscriber");
	var client = assi.obj.get("client");

	if(TEST_DIR) {
		var fout = assi.obj.get("file_output");
		if(fout) {
			try {
				fout.flush();
				fout.close();
			}
			catch(e) {}
		}
	}

	if(!assi.obj.get("aws_run").get("error")) {
		subscriber.onComplete();
	}
	client.close();
}

function endAudio_ami(assi) {
	if(DEBUG) {
//		logger.info("[stt_ami:endAudio]");
	}

	var AmiRun = Java.extend(Runnable, HashMap);
	var ch = assi.obj.get("channel");
	if(ch == 1) {
		// モノラル

		var ami_run = new AmiRun() {
			run: function() {
				var wrp = Java.super(ami_run).get("wrp");

				// 録音中断した状態で通話を終了した場合、既にサーバーの音声認識は終了しているため、音声データの送信完了は行わない
				if(!assi.obj.get("restart_ch1")) {
					// WebSocket 音声認識サーバーへの音声データの送信の完了
					if(!wrp.feedDataPause()) {
						logger.error("[stt_ami:endAudio (" + assi.handler.sid + ")] feedDataPause failed. " + wrp.getLastMessage());
					}
					else {
						// 音声データ送信完了の応答待ち
						var cnt = 0;
						do {
							if(Java.super(ami_run).get("listener").is_end) {
								break;
							}
							cnt++;
							Thread.sleep(100);
						} while(cnt < 10);
					}
				}

				// WebSocket 音声認識サーバーからの切断
				if(wrp.isConnected()) {
					wrp.disconnect();
				}

				if(TEST_DIR) {
					var fout = assi.obj.get("file_output");
					if(fout) {
						try {
							fout.flush();
							fout.close();
						}
						catch(e) {}
					}
				}

				// ccs start
				// RFSに音声認識結果をアップロード
				uploadToRfs(assi);

				var pbx_con = assi.obj.get("stt.pbx_db_connection");
				if(pbx_con) {
					if(DEBUG) {
						logger.info("[stt_ami (" + assi.handler.sid + ")] pbx db close");
					}
					pbx_con.close();
				}
				var ccs_con = assi.obj.get("stt.ccs_db_connection");
				if(ccs_con) {
					if(DEBUG) {
						logger.info("[stt_ami (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
					}
					var current_time = new Date().getTime();
					// 最終レコードをDBに追加
					insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_AMI, 0, null, null, current_time, current_time);
					ccs_con.close();
				}
				// ccs end
			}
		};
		ami_run.put("wrp", assi.obj.get("wrp"));
		ami_run.put("listener", assi.obj.get("listener"));
		var ami_end_thread = new Thread(ami_run);
		ami_end_thread.start();
	}
	else {
		// ステレオ

		var ami_run = new AmiRun() {
			run: function() {
				// チャネル1
				var wrp_ch1 = Java.super(ami_run).get("wrp_ch1");

				// 録音中断した状態で通話を終了した場合、既にサーバーの音声認識は終了しているため、音声データの送信完了は行わない
				if(!assi.obj.get("restart_ch1")) {
					// WebSocket 音声認識サーバーへの音声データの送信の完了
					if(!wrp_ch1.feedDataPause()) {
						logger.error("[stt_ami:endAudio (" + assi.handler.sid + ")] ch1: feedDataPause failed. " + wrp_ch1.getLastMessage());
					}
					else {
						// 音声データ送信完了の応答待ち
						var cnt = 0;
						do {
							if(Java.super(ami_run).get("listener_ch1").is_end) {
								break;
							}
							cnt++;
							Thread.sleep(100);
						} while(cnt < 10);
					}
				}

				// WebSocket 音声認識サーバーからの切断
				if(wrp_ch1.isConnected()) {
					wrp_ch1.disconnect();
				}

				// チャネル2
				var wrp_ch2 = Java.super(ami_run).get("wrp_ch2");

				// 録音中断した状態で通話を終了した場合、既にサーバーの音声認識は終了しているため、音声データの送信完了は行わない
				if(!assi.obj.get("restart_ch2")) {
					// WebSocket 音声認識サーバーへの音声データの送信の完了
					if(!wrp_ch2.feedDataPause()) {
						logger.error("[stt_ami:endAudio (" + assi.handler.sid + ")] ch2: feedDataPause failed. " + wrp_ch2.getLastMessage());
					}
					else {
						// 音声データ送信完了の応答待ち
						var cnt = 0;
						do {
							if(Java.super(ami_run).get("listener_ch2").is_end) {
								break;
							}
							cnt++;
							Thread.sleep(100);
						} while(cnt < 10);
					}
				}

				// WebSocket 音声認識サーバーからの切断
				if(wrp_ch2.isConnected()) {
					wrp_ch2.disconnect();
				}

				if(TEST_DIR) {
					var fout = assi.obj.get("file_output");
					if(fout) {
						try {
							fout.flush();
							fout.close();
						}
						catch(e) {}
					}
				}

				// ccs start
				// RFSに音声認識結果をアップロード
				uploadToRfs(assi);

				var pbx_con = assi.obj.get("stt.pbx_db_connection");
				if(pbx_con) {
					if(DEBUG) {
						logger.info("[stt_ami (" + assi.handler.sid + ")] pbx db close");
					}
					pbx_con.close();
				}
				var ccs_con = assi.obj.get("stt.ccs_db_connection");
				if(ccs_con) {
					if(DEBUG) {
						logger.info("[stt_ami (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
					}
					var current_time = new Date().getTime();
					// 最終レコードをDBに追加
					insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_AMI, 0, null, null, current_time, current_time);
					ccs_con.close();
				}
				// ccs end
			}
		};
		ami_run.put("wrp_ch1", assi.obj.get("wrp_ch1"));
		ami_run.put("listener_ch1", assi.obj.get("listener_ch1"));
		ami_run.put("wrp_ch2", assi.obj.get("wrp_ch2"));
		ami_run.put("listener_ch2", assi.obj.get("listener_ch2"));
		var ami_end_thread = new Thread(ami_run);
		ami_end_thread.start();
	}
}


function eventAudio(assi, data, size, ev, pt) {
	if(DEBUG) {
		logger.info("[stt:eventAudio (" + assi.handler.sid + ")] size=" + size + ", event=" + ev);
	}

	if(ev == 2) { // voice detected
//		assi.raiseResult(assi);		// ToDo
		return;
	}
	if(data == null) {
		return;
	}

	// 音声認識サービス毎の処理
	switch(assi.obj.get("stt.service_type")) {
		case "gcp":		// GCP
			eventAudio_g(assi, data, size, ev, pt);
			break;

		case "aws": 	// AWS
			eventAudio_aws(assi, data, size, ev, pt);
			break;

		case "ami": 	// AmiVoice
			eventAudio_ami(assi, data, size, ev, pt);
			break;

		default:
			break;
	}
}

function eventAudio_g(assi, data, size, ev, pt) {
	if(DEBUG) {
//		logger.info("[stt_g:eventAudio] size=" + size + ", event=" + ev);
	}

	// 音声認識開始からのタイムアウト判定
	var is_timeout = false;
	var timeout = Number(assi.obj.get("stt.timeout"));
	var current_time = new Date().getTime();
	var elapsed_time = current_time - assi.obj.get("stt.start_time");
	if(timeout > 0) {
		if(assi.obj.get("stt.source") == "call_record") {
			// 通話録音の場合、経過時間は初回音声認識開始から算出
			var first_elapsed_time = current_time - assi.obj.get("stt.first_start_time");

			if(first_elapsed_time >= timeout) {
				if(DEBUG) {
					logger.info("[stt_g:eventAudio (" + assi.handler.sid + ")] max voice recognition time over.");
				}
				assi.stop();
				is_timeout = true;
			}
		}
		else {
			// IVRの場合

			// 最後に音声認識した時点から無音検知時間が経過した場合、タイムアウト
			var silent_detection_time = Number(assi.obj.get("stt.silent_detection_time"));
			if(silent_detection_time > 0) {
				var voice_detection_time = assi.obj.get("voice_detection_time");
				if(voice_detection_time) {
					if((current_time - voice_detection_time) >= silent_detection_time) {
						logger.debug("[stt_g:eventAudio (" + assi.handler.sid + ")] no voice detection");
						if(!assi.obj.get("result")) {
							assi.obj.put("result", "TIMEOUT");
						}
						assi.raiseResult(assi);
						is_timeout = true;
					}
				}
			}

			// 音声認識結果を検出した場合、タイムアウトを延長
			if(assi.obj.get("voice_detection")) {
				timeout += Number(assi.obj.get("stt.extend_timeout"));
			}

			if(elapsed_time >= timeout) {
				if(DEBUG) {
					logger.info("[stt_g:eventAudio (" + assi.handler.sid + ")] Timeout.");
				}
				assi.obj.put("result", "TIMEOUT");
				assi.raiseResult(assi);
				is_timeout = true;
			}
		}
	}

	// 音声認識開始からの経過時間がリミットを超えた場合、再接続する
	if(elapsed_time >= GCP_STREAMING_LIMIT) {
		logger.debug("[stt_g:eventAudio (" + assi.handler.sid + ")] restart");
		assi.obj.put("restart", true);
	}

	if(!is_timeout) {
		var version = assi.obj.get("stt_g.version");		// バージョン

		// 再接続する場合、再度Configを送る
		if(assi.obj.get("restart")) {
			assi.obj.get("clientStream").closeSend();
			assi.obj.get("stream_controller").cancel();		// remove Observer

			var clientStream_new = assi.obj.get("client").streamingRecognizeCallable().splitCall(assi.obj.get("response_observer"));
			assi.obj.put("clientStream", clientStream_new);
			var request_new = null;
			switch(version) {
				case "v1":
					request_new = StreamingRecognizeRequestV1.newBuilder()
									.setStreamingConfig(assi.obj.get("streaming_recognition_config"))
									.build();
					break;
				case "v1p1beta1":
					request_new = StreamingRecognizeRequestV1p1beta1.newBuilder()
									.setStreamingConfig(assi.obj.get("streaming_recognition_config"))
									.build();
					break;
				case "v2":
					request_new = StreamingRecognizeRequestV2.newBuilder()
									.setStreamingConfig(assi.obj.get("streaming_recognition_config"))
									.setRecognizer(assi.obj.get("stt_g.recognizer"))
									.build();
					break;
				default:
					break;
			}
			clientStream_new.send(request_new);

			assi.obj.remove("restart");
			assi.obj.put("stt.start_time", current_time);	// 音声認識開始時間を更新
		}

		var clientStream = assi.obj.get("clientStream");
		var request = null;
		switch(version) {
			case "v1":
				request = StreamingRecognizeRequestV1.newBuilder()
							.setAudioContent(ByteString.copyFrom(data, 0, size))
							.build();
				break;
			case "v1p1beta1":
				request = StreamingRecognizeRequestV1p1beta1.newBuilder()
							.setAudioContent(ByteString.copyFrom(data, 0, size))
							.build();
				break;
			case "v2":
				request = StreamingRecognizeRequestV2.newBuilder()
							.setAudio(ByteString.copyFrom(data, 0, size))
							.build();
				break;
			default:
				break;
		}

		if(request) {
			clientStream.send(request);
		}

		if(TEST_DIR) {
			var fout = assi.obj.get("file_output");
			if(fout) {
				try {
					fout.write(data, 0, size);
				}
				catch(e) {
					if(!e.getClass) {
						logger.error(e.message + "\n" + e.stack);
					}
					else {
						logger.error(e.getMessage(), e);
					}
				}
			}
		}
	}
}

function eventAudio_aws(assi, data, size, ev, pt) {
	if(DEBUG) {
//		logger.info("[stt_aws:eventAudio] size=" + size + ", event=" + ev);
	}

	// 音声認識開始からのタイムアウト判定
	var is_timeout = false;
	var timeout = Number(assi.obj.get("stt.timeout"));
	var current_time = new Date().getTime();
	var elapsed_time = current_time - assi.obj.get("stt.start_time");
	if(timeout > 0) {
		if(assi.obj.get("stt.source") == "call_record") {
			// 通話録音の場合、経過時間は初回音声認識開始から算出
			var first_elapsed_time = current_time - assi.obj.get("stt.first_start_time");

			if(first_elapsed_time >= timeout) {
				if(DEBUG) {
					logger.info("[stt_aws:eventAudio (" + assi.handler.sid + ")] max voice recognition time over.");
				}
				assi.stop();
				is_timeout = true;
			}
		}
		else {
			// IVRの場合

			// 最後に音声認識した時点から無音検知時間が経過した場合、タイムアウト
			var silent_detection_time = Number(assi.obj.get("stt.silent_detection_time"));
			if(silent_detection_time > 0) {
				var voice_detection_time = assi.obj.get("voice_detection_time");
				if(voice_detection_time) {
					if((current_time - voice_detection_time) >= silent_detection_time) {
						logger.debug("[stt_aws:eventAudio (" + assi.handler.sid + ")] no voice detection");
						if(!assi.obj.get("result")) {
							assi.obj.put("result", "TIMEOUT");
						}
						assi.raiseResult(assi);
						is_timeout = true;
					}
				}
			}

			// 音声認識結果を検出した場合、タイムアウトを延長
			if(assi.obj.get("voice_detection")) {
				timeout += Number(assi.obj.get("stt.extend_timeout"));
			}

			if(elapsed_time >= timeout) {
				if(DEBUG) {
					logger.info("[stt_aws:eventAudio (" + assi.handler.sid + ")] Timeout.");
				}
				assi.obj.put("result", "TIMEOUT");
				assi.raiseResult(assi);
				is_timeout = true;
			}
		}
	}

	if(!is_timeout) {
		// 再接続する場合、再度リクエストを送る
		if(assi.obj.get("restart")) {
			// リクエスト実行完了待ちスレッド終了待ち
//			logger.debug("aws_thread alive=" + assi.obj.get("aws_thread").isAlive());
			if(assi.obj.get("aws_thread").isAlive()) {
				Thread.sleep(20);
			}

			// 現在のsubscriberを取得
			var subscriber_current = assi.obj.get("audio_stream_publisher").get("subscriber");

			var result = assi.obj.get("client").startStreamTranscription(assi.obj.get("request"), assi.obj.get("audio_stream_publisher"), assi.obj.get("response_handler"));

			// リクエスト実行完了待ちスレッドを再度開始
			assi.obj.get("aws_run").put("future", result);
			var aws_thread_new = new Thread(assi.obj.get("aws_run"));
			aws_thread_new.start();
			assi.obj.put("aws_thread", aws_thread_new);

			// subscriber更新待ち
//			logger.debug("subscriber match=" + Objects.equals(subscriber_current, assi.obj.get("audio_stream_publisher").get("subscriber")));
			while(Objects.equals(subscriber_current, assi.obj.get("audio_stream_publisher").get("subscriber"))) {
				Thread.sleep(20);
			}

			assi.obj.remove("restart");
			assi.obj.put("stt.start_time", current_time);	// 音声認識開始時間を更新
		}

		var cnt = 0;
		var subscriber = null;
		do {
			subscriber = assi.obj.get("audio_stream_publisher").get("subscriber");
			if(subscriber) {
				break;
			}
			cnt++;
			Thread.sleep(20);
		} while(cnt < 5);

		if(subscriber) {
			var is_pcm_convert = false;
			switch(assi.payload) {
				case PayloadInfo.PAYLOADTYPE_PCM16_MONO:
					break;
				case PayloadInfo.PAYLOADTYPE_PCMU_MONO:
					is_pcm_convert = true;
					break;
				case PayloadInfo.PAYLOADTYPE_PCM16_STEREO:
					break;
				case PayloadInfo.PAYLOADTYPE_PCMU_STEREO:
					is_pcm_convert = true;
					break;
				case PayloadInfo.PAYLOADTYPE_PCM16_48000_MONO:
					break;
				case PayloadInfo.PAYLOADTYPE_PCMU_48000_MONO:
					break;
				case PayloadInfo.PAYLOADTYPE_PCM16_48000_STEREO:
					break;
				case PayloadInfo.PAYLOADTYPE_PCMU_48000_STEREO:
					break;
				default:
					logger.error("[stt_aws:eventAudio_aws (" + assi.handler.sid + ")] Unkown payload:" + assi.payload);
					break;
			}

			var audio_buffer = null;
			var audio = null;
			if(is_pcm_convert) {
				// ulawをpcm16bit(リトルエンディアン)に変換
				if(assi.obj.get("audio_buffer")) {
					audio_buffer = assi.obj.get("audio_buffer");
				}
				else {
					audio_buffer = new ByteArray(size * 2);
					assi.obj.put("audio_buffer", audio_buffer);
				}
				var audio_buffer_cnt = 0;
				for(var i=0; i<size; i++) {
					var tmp = UlawUtil.toPcm16(data[i]);
					audio_buffer[audio_buffer_cnt] = tmp & 0xff;
					audio_buffer[audio_buffer_cnt + 1] = (tmp >> 8) & 0xff;
					audio_buffer_cnt += 2;
				}

				audio = AudioEvent.builder()
							.audioChunk(SdkBytes.fromByteBuffer(ByteBuffer.wrap(audio_buffer)))
							.build();
			}
			else {
				audio = AudioEvent.builder()
							.audioChunk(SdkBytes.fromByteBuffer(ByteBuffer.wrap(data, 0, size)))
							.build();

			}
			subscriber.onNext(audio);
		}

		if(TEST_DIR) {
			var fout = assi.obj.get("file_output");
			if(fout) {
				try {
					fout.write(data, 0, size);
				}
				catch(e) {
					if(!e.getClass) {
						logger.error(e.message + "\n" + e.stack);
					}
					else {
						logger.error(e.getMessage(), e);
					}
				}
			}
		}
	}
}

function eventAudio_ami(assi, data, size, ev, pt) {
	if(DEBUG) {
//		logger.info("[stt_ami:eventAudio] size=" + size + ", event=" + ev);
	}

	// 音声認識開始からのタイムアウト判定
	var is_timeout = false;
	var timeout = Number(assi.obj.get("stt.timeout"));
	var current_time = new Date().getTime();
	var elapsed_time = current_time - assi.obj.get("stt.start_time");
	if(timeout > 0) {
		if(assi.obj.get("stt.source") == "call_record") {
			// 通話録音の場合、経過時間は初回音声認識開始から算出
			var first_elapsed_time = current_time - assi.obj.get("stt.first_start_time");

			if(first_elapsed_time >= timeout) {
				if(DEBUG) {
					logger.info("[stt_ami:eventAudio (" + assi.handler.sid + ")] max voice recognition time over.");
				}
				assi.stop();
				is_timeout = true;
			}
		}
		else {
			// IVRの場合

			// 最後に音声認識した時点から無音検知時間が経過した場合、タイムアウト
			var silent_detection_time = Number(assi.obj.get("stt.silent_detection_time"));
			if(silent_detection_time > 0) {
				var voice_detection_time = assi.obj.get("voice_detection_time");
				if(voice_detection_time) {
					if((current_time - voice_detection_time) >= silent_detection_time) {
						logger.debug("[stt_ami:eventAudio (" + assi.handler.sid + ")] no voice detection");
						if(!assi.obj.get("result")) {
							assi.obj.put("result", "TIMEOUT");
						}
						assi.raiseResult(assi);
						is_timeout = true;
					}
				}
			}

			// 音声認識結果を検出した場合、タイムアウトを延長
			if(assi.obj.get("voice_detection")) {
				timeout += Number(assi.obj.get("stt.extend_timeout"));
			}

			if(elapsed_time >= timeout) {
				if(DEBUG) {
					logger.info("[stt_ami:eventAudio (" + assi.handler.sid + ")] Timeout.");
				}
				assi.obj.put("result", "TIMEOUT");
				assi.raiseResult(assi);
				is_timeout = true;
			}
		}
	}

	if(!is_timeout) {
		var ExAudioProcessor = Java.extend(AudioProcessor, HashMap, {
			process: function(audioEvent) { return true; },
			processingFinished: function() {}
		});

		// オーディオプロセッサー作成処理
		function createAudioProcessorWithSharedLogic(baseAudioProcessor) {
	   		var ImplClass = Java.extend(ExAudioProcessor, {
				process: function(audioEvent) {
					// WebSocket 音声認識サーバーへの音声データの送信
					baseAudioProcessor.get("wrp").feedData(audioEvent.getByteBuffer(), 0, audioEvent.getBufferSize());
					return true;
				},
				processingFinished: function() {}
			});

			return new ImplClass();
		}

		var ExAmiAudioRun = Java.extend(Runnable, HashMap, {
			run: function() {}
		});

		// オーディオプロセッサー実行ランナー作成処理
		function createRunnerWithSharedLogic(baseRunner) {
	   		var ImplClass = Java.extend(ExAmiAudioRun, {
				run: function() {
					baseRunner.get("dispatcher").run();
				}
			});

			return new ImplClass();
		}

		var ch = assi.obj.get("channel");
		if(ch == 1) {
			// モノラル

			// 再接続する場合、再度リクエストを送る
			if(assi.obj.get("restart_ch1")) {
				var wrp = assi.obj.get("wrp");
				// WebSocket 音声認識サーバーからの切断
				if(wrp.isConnected()) {
					wrp.disconnect();
				}
				// WebSocket 音声認識サーバーへの接続
				if(wrp.connect()) {
					// WebSocket 音声認識サーバーへの音声データの送信の開始
					if(wrp.feedDataResume()) {
						assi.obj.get("listener").remove("end_time");
						assi.obj.remove("restart_ch1");
						assi.obj.put("stt.start_time", current_time);	// 音声認識開始時間を更新
					}
					else {
						logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] feedDataResume failed." + wrp.getLastMessage());
					}
				}
				else {
					logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] connect failed." + wrp.getLastMessage());
				}
			}

			var audio_buffer = data;
			if(assi.obj.get("is_pcm_convert")) {
				// ulawをpcm16bit(リトルエンディアン)に変換
				if(assi.obj.get("audio_buffer")) {
					audio_buffer = assi.obj.get("audio_buffer");
				}
				else {
					audio_buffer = new ByteArray(size * 2);
					assi.obj.put("audio_buffer", audio_buffer);
				}
				var audio_buffer_cnt = 0;
				for(var i=0; i<size; i++) {
					var tmp = UlawUtil.toPcm16(data[i]);
					audio_buffer[audio_buffer_cnt] = tmp & 0xff;
					audio_buffer[audio_buffer_cnt + 1] = (tmp >> 8) & 0xff;
					audio_buffer_cnt += 2;
				}
			}

			if(assi.obj.get("is_samplingrate_convert")) {
				// サンプリング周波数を8kHzから16kHzに変更
				try {
					var audioFormat = new AudioFormat(8000, 16, 1, true, false);
					var dispatcher = AudioDispatcherFactory.fromByteArray(audio_buffer, audioFormat, audio_buffer.length, 0);
					var resampler = new RateTransposer(2);
					dispatcher.addAudioProcessor(resampler);
					var ex_audioProcessor = new ExAudioProcessor();
					ex_audioProcessor.put("wrp", assi.obj.get("wrp"));
					var audioProcessor = createAudioProcessorWithSharedLogic(ex_audioProcessor);
					dispatcher.addAudioProcessor(audioProcessor);

					// サンプリング周波数変更スレッドを開始
					var ex_ami_audio_run = new ExAmiAudioRun();
					ex_ami_audio_run.put("dispatcher", dispatcher);
					var ami_audio_run = createRunnerWithSharedLogic(ex_ami_audio_run);
					var ami_audio_thread = new Thread(ami_audio_run);
					ami_audio_thread.start();
				}
				catch(e) {
					logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] Changing sampling rate failed: " + e);
				}
			}
			else {
				// WebSocket 音声認識サーバーへの音声データの送信
				assi.obj.get("wrp").feedData(audio_buffer, 0, audio_buffer.length);
			}
		}
		else if(ch == 2) {
			// ステレオ

			// 再接続する場合、再度リクエストを送る
			// チャネル1
			if(assi.obj.get("restart_ch1")) {
				var wrp_ch1 = assi.obj.get("wrp_ch1");
				// WebSocket 音声認識サーバーからの切断
				if(wrp_ch1.isConnected()) {
					wrp_ch1.disconnect();
				}
				// WebSocket 音声認識サーバーへの接続
				if(wrp_ch1.connect()) {
					// WebSocket 音声認識サーバーへの音声データの送信の開始
					if(wrp_ch1.feedDataResume()) {
						assi.obj.get("listener_ch1").remove("end_time");
						assi.obj.remove("restart_ch1");
						assi.obj.put("stt.start_time", current_time);	// 音声認識開始時間を更新
					}
					else {
						logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] ch1: feedDataResume failed." + wrp_ch1.getLastMessage());
					}
				}
				else {
					logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] ch1: connect failed." + wrp_ch1.getLastMessage());
				}
			}
			// チャネル2
			if(assi.obj.get("restart_ch2")) {
				var wrp_ch2 = assi.obj.get("wrp_ch2");
				// WebSocket 音声認識サーバーからの切断
				if(wrp_ch2.isConnected()) {
					wrp_ch2.disconnect();
				}
				// WebSocket 音声認識サーバーへの接続
				if(wrp_ch2.connect()) {
					// WebSocket 音声認識サーバーへの音声データの送信の開始
					if(wrp_ch2.feedDataResume()) {
						assi.obj.get("listener_ch2").remove("end_time");
						assi.obj.remove("restart_ch2");
						assi.obj.put("stt.start_time", current_time);	// 音声認識開始時間を更新
					}
					else {
						logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] ch2: feedDataResume failed." + wrp_ch2.getLastMessage());
					}
				}
				else {
					logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] ch2: connect failed." + wrp_ch2.getLastMessage());
				}
			}

			// 音声データをチャネル別に分割
			var data_ch1 = null;
			var data_ch2 = null;
			if(assi.obj.get("data_buffer_ch1")) {
				data_ch1 = assi.obj.get("data_buffer_ch1");
			}
			else {
				data_ch1 = new ByteArray(size / 2);
				assi.obj.put("data_buffer_ch1", data_ch1);
			}
			if(assi.obj.get("data_buffer_ch2")) {
				data_ch2 = assi.obj.get("data_buffer_ch2");
			}
			else {
				data_ch2 = new ByteArray(size / 2);
				assi.obj.put("data_buffer_ch2", data_ch2);
			}

			var index_ch1 = 0;
			var index_ch2 = 0;
			var audio_division_unit = assi.obj.get("audio_division_unit");
			if(audio_division_unit == 1) {
				// 1バイト毎に分割
				for(var i=0; i<size; i++) {
					if((i % 2) == 0) {
						data_ch1[index_ch1++] = data[i];
					}
					else {
						data_ch2[index_ch2++] = data[i];
					}
				}
			}
			else if(audio_division_unit == 2) {
				// 2バイト毎に分割
				for(var i=0; i<size; i+=2) {
					if(((i / 2) % 2) == 0) {
						data_ch1[index_ch1++] = data[i];
						data_ch1[index_ch1++] = data[i + 1];
					}
					else {
						data_ch2[index_ch2++] = data[i];
						data_ch2[index_ch2++] = data[i + 1];
					}
				}
			}

			var audio_buffer_ch1 = data_ch1;
			var audio_buffer_ch2 = data_ch2;
			if(assi.obj.get("is_pcm_convert")) {
				// ulawをpcm16bit(リトルエンディアン)に変換
				if(assi.obj.get("audio_buffer_ch1")) {
					audio_buffer_ch1 = assi.obj.get("audio_buffer_ch1");
				}
				else {
					audio_buffer_ch1 = new ByteArray(size);
					assi.obj.put("audio_buffer_ch1", audio_buffer_ch1);
				}
				if(assi.obj.get("audio_buffer_ch2")) {
					audio_buffer_ch2 = assi.obj.get("audio_buffer_ch2");
				}
				else {
					audio_buffer_ch2 = new ByteArray(size);
					assi.obj.put("audio_buffer_ch2", audio_buffer_ch2);
				}
				var audio_buffer_cnt = 0;
				for(var i=0; i<(size / 2); i++) {
					// チャネル1
					var tmp_ch1 = UlawUtil.toPcm16(data_ch1[i]);
					audio_buffer_ch1[audio_buffer_cnt] = tmp_ch1 & 0xff;
					audio_buffer_ch1[audio_buffer_cnt + 1] = (tmp_ch1 >> 8) & 0xff;
					// チャネル2
					var tmp_ch2 = UlawUtil.toPcm16(data_ch2[i]);
					audio_buffer_ch2[audio_buffer_cnt] = tmp_ch2 & 0xff;
					audio_buffer_ch2[audio_buffer_cnt + 1] = (tmp_ch2 >> 8) & 0xff;
					audio_buffer_cnt += 2;
				}
			}

			if(assi.obj.get("is_samplingrate_convert")) {
				// サンプリング周波数を8kHzから16kHzに変更
				try {
					var audioFormat = new AudioFormat(8000, 16, 1, true, false);
					var resampler = new RateTransposer(2);
					// チャネル1
					var dispatcher_ch1 = AudioDispatcherFactory.fromByteArray(audio_buffer_ch1, audioFormat, audio_buffer_ch1.length, 0);
					dispatcher_ch1.addAudioProcessor(resampler);
					var ex_audioProcessor_ch1 = new ExAudioProcessor();
					ex_audioProcessor_ch1.put("wrp", assi.obj.get("wrp_ch1"));
					var audioProcessor_ch1 = createAudioProcessorWithSharedLogic(ex_audioProcessor_ch1);
					dispatcher_ch1.addAudioProcessor(audioProcessor_ch1);

					// サンプリング周波数変更スレッドを開始
					var ex_ami_audio_run_ch1 = new ExAmiAudioRun();
					ex_ami_audio_run_ch1.put("dispatcher", dispatcher_ch1);
					var ami_audio_run_ch1 = createRunnerWithSharedLogic(ex_ami_audio_run_ch1);
					var ami_audio_thread_ch1 = new Thread(ami_audio_run_ch1);
					ami_audio_thread_ch1.start();

					// チャネル2
					var dispatcher_ch2 = AudioDispatcherFactory.fromByteArray(audio_buffer_ch2, audioFormat, audio_buffer_ch2.length, 0);
					dispatcher_ch2.addAudioProcessor(resampler);
					var ex_audioProcessor_ch2 = new ExAudioProcessor();
					ex_audioProcessor_ch2.put("wrp", assi.obj.get("wrp_ch2"));
					var audioProcessor_ch2 = createAudioProcessorWithSharedLogic(ex_audioProcessor_ch2);
					dispatcher_ch2.addAudioProcessor(audioProcessor_ch2);

					// サンプリング周波数変更スレッドを開始
					var ex_ami_audio_run_ch2 = new ExAmiAudioRun();
					ex_ami_audio_run_ch2.put("dispatcher", dispatcher_ch2);
					var ami_audio_run_ch2 = createRunnerWithSharedLogic(ex_ami_audio_run_ch2);
					var ami_audio_thread_ch2 = new Thread(ami_audio_run_ch2);
					ami_audio_thread_ch2.start();
				}
				catch(e) {
					logger.error("[stt_ami:eventAudio_ami (" + assi.handler.sid + ")] Changing sampling rate failed: " + e);
				}
			}
			else {
				// WebSocket 音声認識サーバーへの音声データの送信
				assi.obj.get("wrp_ch1").feedData(audio_buffer_ch1, 0, audio_buffer_ch1.length);
				assi.obj.get("wrp_ch2").feedData(audio_buffer_ch2, 0, audio_buffer_ch2.length);
			}
		}

		if(TEST_DIR) {
			var fout = assi.obj.get("file_output");
			if(fout) {
				try {
					fout.write(data, 0, size);
				}
				catch(e) {
					if(!e.getClass) {
						logger.error(e.message + "\n" + e.stack);
					}
					else {
						logger.error(e.getMessage(), e);
					}
				}
			}
		}
	}
}


function createClientStreamV1(assi, speechSettings) {
	var ret = false;
	var responseObserver = null;
	var client = null;
	try {
		client = SpeechClientV1.create(speechSettings);
		responseObserver = new ResponseObserver() {
			onStart : function(controller) {
				if(DEBUG) {
					logger.info("[stt_g:V1 (" + assi.handler.sid + ")] Transcribe start.");
				}
				assi.obj.put("stream_controller", controller);
			},
			onResponse : function(response) {
				if(DEBUG) {
					logger.info("[stt_g:V1 (" + assi.handler.sid + ")] onResponse:");
				}
				try {
					var response_time = new Date().getTime();	// ccs
					assi.obj.put("response", response);
					if(!response.getResultsList().isEmpty()) {
						var res = response.getResultsList().get(0);
						if(!res.getAlternativesList().isEmpty()) {
							var alternative = res.getAlternativesList().get(0);
							var word = null;
							if(!alternative.getWordsList().isEmpty()) {
								word = alternative.getWordsList().get(0);
							}
							if(DEBUG) {
								logger.info("[stt_g:V1 (" + assi.handler.sid + ")] starttime=" + (word ? (word.getStartTime().getSeconds()*1000) + (word.getStartTime().getNanos()/1000000) : null) +
												", endtime=" + ((res.getResultEndTime().getSeconds()*1000) + (res.getResultEndTime().getNanos()/1000000)) +
												", final=" + res.getIsFinal() +
												", ch=" + res.getChannelTag() +
												": " + alternative.getTranscript());
							}
							var result = alternative.getTranscript();
							if(result) {
								// 初回の音声認識結果検出を設定
								if(!assi.obj.get("voice_detection")) {
									assi.obj.put("voice_detection", true);
								}
								// 音声認識結果を受信した時間を保持
								assi.obj.put("voice_detection_time", response_time);
							}

							var ptn = assi.obj.get("result.pattern");
							if(ptn) {
								var m = ptn.matcher(result);
								if(m.find()) {
									if(DEBUG) {
										logger.info("[stt_g:V1 (" + assi.handler.sid + ")] " + result + ", ptn=" + ptn);
									}
									var group_only = assi.obj.get("result.pattern.group_only");
									if(group_only) {
										var sep = assi.obj.get("result.pattern.group_sep");
										sep = sep || "";
										var r = "";
										var c = m.groupCount()
										for(var i=1; i<=c; i++) {
											if(r.length > 0) {
												r += sep;
											}
											r += m.group(i);
										}
										result = r;
									}
									assi.obj.put("result", result);
									assi.obj.put("result.pattern.matcher", m);
									assi.obj.put("result.raised", true);
									assi.raiseResult(assi);
									return;
								}
							}
							assi.obj.put("result", result);
							// ccs start
							if(assi.obj.get("stt.source") == "call_record") {
								// 通話録音の場合
								if(res.getIsFinal() || (!assi.obj.get("before_result") && result) || (assi.obj.get("before_result") && result && (assi.obj.get("before_result") != result))) {
									// 最終結果、または、前回の音声認識結果と異なる場合
									var current_time = new Date().getTime();
									var context = {};
									var channel = "";

									if(res.getIsFinal()) {
										// 最終結果の場合
										var pbx_con = assi.obj.get("stt.pbx_db_connection");
										if(pbx_con) {
											context.responsetime = response_time;
											context.starttime = (word ? (word.getStartTime().getSeconds()*1000) + (word.getStartTime().getNanos()/1000000) : null);
											context.endtime = ((res.getResultEndTime().getSeconds()*1000) + (res.getResultEndTime().getNanos()/1000000));
											context.final = res.getIsFinal();
											switch(res.getChannelTag()) {
												case 0:					// モノラル
												case 1:
													channel = "ch1";
													break;
												case 2:
													channel = "ch2";
													break;
												default:
													break;
											}
											if(channel) {
												context.channel = channel;
											}
											context.content = result;

											// PBXのDBに追加
											if(DEBUG) {
												logger.info("[stt_g:V1 (" + assi.handler.sid + ")] add record to pbx");
	 										}
											insertRecordToPbx(pbx_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), assi.obj.get("stt.other_number"), JSON.stringify(context));
										}
									}

									var ccs_con = assi.obj.get("stt.ccs_db_connection");
									if(ccs_con) {
										if(Object.keys(context).length == 0) {
											context.responsetime = response_time;
											context.starttime = (word ? (word.getStartTime().getSeconds()*1000) + (word.getStartTime().getNanos()/1000000) : null);
											context.endtime = ((res.getResultEndTime().getSeconds()*1000) + (res.getResultEndTime().getNanos()/1000000));
											context.final = res.getIsFinal();
											switch(res.getChannelTag()) {
												case 0:					// モノラル
												case 1:
													channel = "ch1";
													break;
												case 2:
													channel = "ch2";
													break;
												default:
													break;
											}
											if(channel) {
												context.channel = channel;
											}
											context.content = result;
										}

										// CCSのDBに追加
										if(DEBUG) {
											logger.info("[stt_g:V1 (" + assi.handler.sid + ")] add record to ccs");
 										}
										insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), JSON.stringify(context), 0, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
									}

									if(res.getIsFinal()) {
										// 最終結果の場合は前回の音声認識結果をクリア
										assi.obj.remove("before_result");
									}
									else {
										// 最終結果以外の場合は前回の音声認識結果を更新
										assi.obj.put("before_result", result);
									}
								}
							}
							// ccs end
						}
					}
				}
				catch(e) {
					if(!e.getClass) {
						logger.error("[stt_g:V1 (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
					}
					else {
						logger.error("[stt_g:V1 (" + assi.handler.sid + ")] " + e.getMessage(), e);
					}
				}
			},
			onComplete : function() {
				if(DEBUG) {
					logger.info("[stt_g:V1 (" + assi.handler.sid + ")] onComplete");
				}

				// ccs start
				// RFSに音声認識結果をアップロード
				uploadToRfs(assi);

				var pbx_con = assi.obj.get("stt.pbx_db_connection");
				if(pbx_con) {
					if(DEBUG) {
						logger.info("[stt_g:V1 (" + assi.handler.sid + ")] pbx db close");
 					}
					pbx_con.close();
				}

				var ccs_con = assi.obj.get("stt.ccs_db_connection");
				if(ccs_con) {
					if(DEBUG) {
						logger.info("[stt_g:V1 (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
 					}
					var current_time = new Date().getTime();
					// 最終レコードをDBに追加
					insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
					ccs_con.close();
				}
				// ccs end

				assi.stop();
				if(!assi.obj.get("result.raised")) {
					var r = assi.obj.put("result");
					assi.raiseResult(r);
				}
			},
			onError : function(t) {
				if(GCP_V1_RETRY_ERROR_PATTERN.test(t.toString())) {
					// 録音中断して一定時間経過した場合は再接続する
					logger.debug("[stt_g:V1 (" + assi.handler.sid + ")] " + t);
					assi.obj.put("restart", true);
				}
				else if(GCP_IGNORE_ERROR_PATTERN.test(t.toString())) {
					// 音声認識開始からの経過時間がリミットを超えた場合は何もしない
					logger.debug("[stt_g:V1 (" + assi.handler.sid + ")] " + t);
				}
				else {
					logger.error("[stt_g:V1 (" + assi.handler.sid + ")] onError. " + t);
					assi.stop();
				}
			}
		};

		var smpl = 8000;
		var ch = 1;
		var enc = RecognitionConfigV1_AudioEncoding.MULAW;
		switch(assi.payload) {
			case PayloadInfo.PAYLOADTYPE_PCM16_MONO:
				enc = RecognitionConfigV1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_MONO:
				//
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_STEREO:
				ch = 2;
				enc = RecognitionConfigV1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_STEREO:
				ch = 2;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_MONO:
				smpl = 48000;
				enc = RecognitionConfigV1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_MONO:
				smpl = 48000;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_STEREO:
				smpl = 48000;
				ch = 2;
				enc = RecognitionConfigV1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_STEREO:
				smpl = 48000;
				ch = 2;
				break;
			default:
				logger.error("[stt_g:createClientStreamV1 (" + assi.handler.sid + ")] Unkown payload:" + a.payload);
				break;
		}

		var callable = client.streamingRecognizeCallable();
		var clientStream = callable.splitCall(responseObserver);
		assi.obj.put("response_observer", responseObserver);

		// 言語
		var language = assi.obj.get("stt_g.language");
		// モデル
		var model = assi.obj.get("stt_g.model");
		// フレーズセット
		var speechAdaptation = null;
		if(assi.obj.get("stt_g.phrase_set")) {
			speechAdaptation = SpeechAdaptationV1.newBuilder()
						.addPhraseSetReferences(assi.obj.get("stt_g.phrase_set"))
						.build();
		}

		var recognitionConfig = null;
		if(ch == 2) {
			if(speechAdaptation) {
				recognitionConfig = RecognitionConfigV1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setAudioChannelCount(2)									// チャネル数
							.setEnableSeparateRecognitionPerChannel(true)				// チャネル別の認識
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setAdaptation(speechAdaptation)							// フレーズセット
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
			else {
				recognitionConfig = RecognitionConfigV1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setAudioChannelCount(2)									// チャネル数
							.setEnableSeparateRecognitionPerChannel(true)				// チャネル別の認識
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
		}
		else {
			if(speechAdaptation) {
				recognitionConfig = RecognitionConfigV1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setAdaptation(speechAdaptation)							// フレーズセット
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
			else {
				recognitionConfig = RecognitionConfigV1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
		}

		var streamingRecognitionConfig = StreamingRecognitionConfigV1.newBuilder()
							.setConfig(recognitionConfig)
							.setInterimResults(true)									// 中間結果出力
							.build();
		assi.obj.put("streaming_recognition_config", streamingRecognitionConfig);

		var request = StreamingRecognizeRequestV1.newBuilder()
				.setStreamingConfig(streamingRecognitionConfig)
				.build(); // The first request in a streaming call has to be a config

		clientStream.send(request);
		assi.obj.put("clientStream", clientStream);
		assi.obj.put("client", client);
		ret = true;
	}
	catch(e) {
		if(!e.getClass) {
			logger.error("[stt_g:createClientStreamV1 (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
		}
		else {
			logger.error("[stt_g:createClientStreamV1 (" + assi.handler.sid + ")] " + e.getMessage(), e);
		}

		if(client) {
			client.close();
		}
	}

	return ret;
}


function createClientStreamV1p1beta1(assi, speechSettings) {
	var ret = false;
	var responseObserver = null;
	var client = null;
	try {
		client = SpeechClientV1p1beta1.create(speechSettings);
		responseObserver = new ResponseObserver() {
			onStart : function(controller) {
				if(DEBUG) {
					logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] Transcribe start.");
				}
				assi.obj.put("stream_controller", controller);
			},
			onResponse : function(response) {
				if(DEBUG) {
					logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] onResponse:");
				}
				try {
					var response_time = new Date().getTime();	// ccs
					assi.obj.put("response", response);
					if(!response.getResultsList().isEmpty()) {
						var res = response.getResultsList().get(0);
						if(!res.getAlternativesList().isEmpty()) {
							var alternative = res.getAlternativesList().get(0);
							var word = null;
							if(!alternative.getWordsList().isEmpty()) {
								word = alternative.getWordsList().get(0);
							}
							if(DEBUG) {
								logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] starttime=" + (word ? (word.getStartTime().getSeconds()*1000) + (word.getStartTime().getNanos()/1000000) : null) +
												", endtime=" + ((res.getResultEndTime().getSeconds()*1000) + (res.getResultEndTime().getNanos()/1000000)) +
												", final=" + res.getIsFinal() +
												", ch=" + res.getChannelTag() +
												": " + alternative.getTranscript());
							}
							var result = alternative.getTranscript();
							if(result) {
								// 初回の音声認識結果検出を設定
								if(!assi.obj.get("voice_detection")) {
									assi.obj.put("voice_detection", true);
								}
								// 音声認識結果を受信した時間を保持
								assi.obj.put("voice_detection_time", response_time);
							}

							var ptn = assi.obj.get("result.pattern");
							if(ptn) {
								var m = ptn.matcher(result);
								if(m.find()) {
									if(DEBUG) {
										logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] " + result + ", ptn=" + ptn);
									}
									var group_only = assi.obj.get("result.pattern.group_only");
									if(group_only) {
										var sep = assi.obj.get("result.pattern.group_sep");
										sep = sep || "";
										var r = "";
										var c = m.groupCount()
										for(var i=1; i<=c; i++) {
											if(r.length > 0) {
												r += sep;
											}
											r += m.group(i);
										}
										result = r;
									}
									assi.obj.put("result", result);
									assi.obj.put("result.pattern.matcher", m);
									assi.obj.put("result.raised", true);
									assi.raiseResult(assi);
									return;
								}
							}
							assi.obj.put("result", result);
							// ccs start
							if(assi.obj.get("stt.source") == "call_record") {
								// 通話録音の場合
								if(res.getIsFinal() || (!assi.obj.get("before_result") && result) || (assi.obj.get("before_result") && result && (assi.obj.get("before_result") != result))) {
									// 最終結果、または、前回の音声認識結果と異なる場合
									var current_time = new Date().getTime();
									var context = {};
									var channel = "";

									if(res.getIsFinal()) {
										// 最終結果の場合
										var pbx_con = assi.obj.get("stt.pbx_db_connection");
										if(pbx_con) {
											context.responsetime = response_time;
											context.starttime = (word ? (word.getStartTime().getSeconds()*1000) + (word.getStartTime().getNanos()/1000000) : null);
											context.endtime = ((res.getResultEndTime().getSeconds()*1000) + (res.getResultEndTime().getNanos()/1000000));
											context.final = res.getIsFinal();
											switch(res.getChannelTag()) {
												case 0:					// モノラル
												case 1:
													channel = "ch1";
													break;
												case 2:
													channel = "ch2";
													break;
												default:
													break;
											}
											if(channel) {
												context.channel = channel;
											}
											context.content = result;

											// PBXのDBに追加
											if(DEBUG) {
												logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] add record to pbx");
	 										}
											insertRecordToPbx(pbx_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), assi.obj.get("stt.other_number"), JSON.stringify(context));
										}
									}

									var ccs_con = assi.obj.get("stt.ccs_db_connection");
									if(ccs_con) {
										if(Object.keys(context).length == 0) {
											context.responsetime = response_time;
											context.starttime = (word ? (word.getStartTime().getSeconds()*1000) + (word.getStartTime().getNanos()/1000000) : null);
											context.endtime = ((res.getResultEndTime().getSeconds()*1000) + (res.getResultEndTime().getNanos()/1000000));
											context.final = res.getIsFinal();
											switch(res.getChannelTag()) {
												case 0:					// モノラル
												case 1:
													channel = "ch1";
													break;
												case 2:
													channel = "ch2";
													break;
												default:
													break;
											}
											if(channel) {
												context.channel = channel;
											}
											context.content = result;
										}

										// CCSのDBに追加
										if(DEBUG) {
											logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] add record to ccs");
 										}
										insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), JSON.stringify(context), 0, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
									}

									if(res.getIsFinal()) {
										// 最終結果の場合は前回の音声認識結果をクリア
										assi.obj.remove("before_result");
									}
									else {
										// 最終結果以外の場合は前回の音声認識結果を更新
										assi.obj.put("before_result", result);
									}
								}
							}
							// ccs end
						}
					}
				}
				catch(e) {
					if(!e.getClass) {
						logger.error("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
					}
					else {
						logger.error("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] " + e.getMessage(), e);
					}
				}
			},
			onComplete : function() {
				if(DEBUG) {
					logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] onComplete");
				}

				// ccs start
				// RFSに音声認識結果をアップロード
				uploadToRfs(assi);

				var pbx_con = assi.obj.get("stt.pbx_db_connection");
				if(pbx_con) {
					if(DEBUG) {
						logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] pbx db close");
 					}
					pbx_con.close();
				}

				var ccs_con = assi.obj.get("stt.ccs_db_connection");
				if(ccs_con) {
					if(DEBUG) {
						logger.info("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
 					}
					var current_time = new Date().getTime();
					// 最終レコードをDBに追加
					insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
					ccs_con.close();
				}
				// ccs end

				assi.stop();
				if(!assi.obj.get("result.raised")) {
					var r = assi.obj.put("result");
					assi.raiseResult(r);
				}
			},
			onError : function(t) {
				if(GCP_V1_RETRY_ERROR_PATTERN.test(t.toString())) {
					// 録音中断して一定時間経過した場合は再接続する
					logger.debug("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] " + t);
					assi.obj.put("restart", true);
				}
				else if(GCP_IGNORE_ERROR_PATTERN.test(t.toString())) {
					// 音声認識開始からの経過時間がリミットを超えた場合は何もしない
					logger.debug("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] " + t);
				}
				else {
					logger.error("[stt_g:V1p1beta1 (" + assi.handler.sid + ")] onError. " + t);
					assi.stop();
				}
			}
		};

		var smpl = 8000;
		var ch = 1;
		var enc = RecognitionConfigV1p1beta1_AudioEncoding.MULAW;
		switch(assi.payload) {
			case PayloadInfo.PAYLOADTYPE_PCM16_MONO:
				enc = RecognitionConfigV1p1beta1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_MONO:
				//
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_STEREO:
				ch = 2;
				enc = RecognitionConfigV1p1beta1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_STEREO:
				ch = 2;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_MONO:
				smpl = 48000;
				enc = RecognitionConfigV1p1beta1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_MONO:
				smpl = 48000;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_STEREO:
				smpl = 48000;
				ch = 2;
				enc = RecognitionConfigV1p1beta1_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_STEREO:
				smpl = 48000;
				ch = 2;
				break;
			default:
				logger.error("[stt_g:createClientStreamV1p1beta1 (" + assi.handler.sid + ")] Unkown payload:" + a.payload);
				break;
		}

		var callable = client.streamingRecognizeCallable();
		var clientStream = callable.splitCall(responseObserver);
		assi.obj.put("response_observer", responseObserver);

		// 言語
		var language = assi.obj.get("stt_g.language");
		// モデル
		var model = assi.obj.get("stt_g.model");
		// フレーズセット
		var speechAdaptation = null;
		if(assi.obj.get("stt_g.phrase_set")) {
			speechAdaptation = SpeechAdaptationV1p1beta1.newBuilder()
						.addPhraseSetReferences(assi.obj.get("stt_g.phrase_set"))
						.build();
		}

		var recognitionConfig = null;
		if(ch == 2) {
			if(speechAdaptation) {
				recognitionConfig = RecognitionConfigV1p1beta1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setAudioChannelCount(2)									// チャネル数
							.setEnableSeparateRecognitionPerChannel(true)				// チャネル別の認識
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setAdaptation(speechAdaptation)							// フレーズセット
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
			else {
				recognitionConfig = RecognitionConfigV1p1beta1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setAudioChannelCount(2)									// チャネル数
							.setEnableSeparateRecognitionPerChannel(true)				// チャネル別の認識
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
		}
		else {
			if(speechAdaptation) {
				recognitionConfig = RecognitionConfigV1p1beta1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setAdaptation(speechAdaptation)							// フレーズセット
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
			else {
				recognitionConfig = RecognitionConfigV1p1beta1.newBuilder()
							.setEncoding(enc)											// エンコード
							.setLanguageCode(language)									// 言語
							.setSampleRateHertz(smpl)									// サンプリング周波数
							.setModel(model)											// モデル
							.setEnableAutomaticPunctuation(true)						// 句読点追加
							.setEnableWordTimeOffsets(true)								// 単語のタイムスタンプ取得
							.build();
			}
		}

		var streamingRecognitionConfig = StreamingRecognitionConfigV1p1beta1.newBuilder()
							.setConfig(recognitionConfig)
							.setInterimResults(true)									// 中間結果出力
							.build();
		assi.obj.put("streaming_recognition_config", streamingRecognitionConfig);

		var request = StreamingRecognizeRequestV1p1beta1.newBuilder()
				.setStreamingConfig(streamingRecognitionConfig)
				.build(); // The first request in a streaming call has to be a config

		clientStream.send(request);
		assi.obj.put("clientStream", clientStream);
		assi.obj.put("client", client);
		ret = true;
	}
	catch(e) {
		if(!e.getClass) {
			logger.error("[stt_g:createClientStreamV1p1beta1 (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
		}
		else {
			logger.error("[stt_g:createClientStreamV1p1beta1 (" + assi.handler.sid + ")] " + e.getMessage(), e);
		}

		if(client) {
			client.close();
		}
	}

	return ret;
}


function createClientStreamV2(assi, speechSettings) {
	var ret = false;
	var responseObserver = null;
	var client = null;
	try {
		client = SpeechClientV2.create(speechSettings);
		responseObserver = new ResponseObserver() {
			onStart : function(controller) {
				if(DEBUG) {
					logger.info("[stt_g:V2 (" + assi.handler.sid + ")] Transcribe start.");
				}
				assi.obj.put("stream_controller", controller);
			},
			onResponse : function(response) {
				if(DEBUG) {
					logger.info("[stt_g:V2 (" + assi.handler.sid + ")] onResponse:");
				}
				try {
					var response_time = new Date().getTime();	// ccs
					assi.obj.put("response", response);
					if(!response.getResultsList().isEmpty()) {
						var res = response.getResultsList().get(0);
						if(!res.getAlternativesList().isEmpty()) {
							var alternative = res.getAlternativesList().get(0);
							var word = null;
							if(!alternative.getWordsList().isEmpty()) {
								word = alternative.getWordsList().get(0);
							}
							if(DEBUG) {
								logger.info("[stt_g:V2 (" + assi.handler.sid + ")] starttime=" + (word ? (word.getStartOffset().getSeconds()*1000) + (word.getStartOffset().getNanos()/1000000) : null) +
												", endtime=" + ((res.getResultEndOffset().getSeconds()*1000) + (res.getResultEndOffset().getNanos()/1000000)) +
												", final=" + res.getIsFinal() +
												", ch=" + res.getChannelTag() +
												": " + alternative.getTranscript());
							}
							var result = alternative.getTranscript();
							if(result) {
								// 初回の音声認識結果検出を設定
								if(!assi.obj.get("voice_detection")) {
									assi.obj.put("voice_detection", true);
								}
								// 音声認識結果を受信した時間を保持
								assi.obj.put("voice_detection_time", response_time);
							}

							var ptn = assi.obj.get("result.pattern");
							if(ptn) {
								var m = ptn.matcher(result);
								if(m.find()) {
									if(DEBUG) {
										logger.info("[stt_g:V2 (" + assi.handler.sid + ")] " + result + ", ptn=" + ptn);
									}
									var group_only = assi.obj.get("result.pattern.group_only");
									if(group_only) {
										var sep = assi.obj.get("result.pattern.group_sep");
										sep = sep || "";
										var r = "";
										var c = m.groupCount()
										for(var i=1; i<=c; i++) {
											if(r.length > 0) {
												r += sep;
											}
											r += m.group(i);
										}
										result = r;
									}
									assi.obj.put("result", result);
									assi.obj.put("result.pattern.matcher", m);
									assi.obj.put("result.raised", true);
									assi.raiseResult(assi);
									return;
								}
							}
							assi.obj.put("result", result);
							// ccs start
							if(assi.obj.get("stt.source") == "call_record") {
								// 通話録音の場合
								if(res.getIsFinal() || (!assi.obj.get("before_result") && result) || (assi.obj.get("before_result") && result && (assi.obj.get("before_result") != result))) {
									// 最終結果、または、前回の音声認識結果と異なる場合
									var current_time = new Date().getTime();
									var context = {};
									var channel = "";

									if(res.getIsFinal()) {
										// 最終結果の場合
										var pbx_con = assi.obj.get("stt.pbx_db_connection");
										if(pbx_con) {
											context.responsetime = response_time;
											context.starttime = (word ? (word.getStartOffset().getSeconds()*1000) + (word.getStartOffset().getNanos()/1000000) : null);
											context.endtime = ((res.getResultEndOffset().getSeconds()*1000) + (res.getResultEndOffset().getNanos()/1000000));
											context.final = res.getIsFinal();
											switch(res.getChannelTag()) {
												case 0:					// モノラル
												case 1:
													channel = "ch1";
													break;
												case 2:
													channel = "ch2";
													break;
												default:
													break;
											}
											if(channel) {
												context.channel = channel;
											}
											context.content = result;

											// PBXのDBに追加
											if(DEBUG) {
												logger.info("[stt_g:V2 (" + assi.handler.sid + ")] add record to pbx");
	 										}
											insertRecordToPbx(pbx_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), assi.obj.get("stt.other_number"), JSON.stringify(context));
										}
									}

									var ccs_con = assi.obj.get("stt.ccs_db_connection");
									if(ccs_con) {
										if(Object.keys(context).length == 0) {
											context.responsetime = response_time;
											context.starttime = (word ? (word.getStartOffset().getSeconds()*1000) + (word.getStartOffset().getNanos()/1000000) : null);
											context.endtime = ((res.getResultEndOffset().getSeconds()*1000) + (res.getResultEndOffset().getNanos()/1000000));
											context.final = res.getIsFinal();
											switch(res.getChannelTag()) {
												case 0:					// モノラル
												case 1:
													channel = "ch1";
													break;
												case 2:
													channel = "ch2";
													break;
												default:
													break;
											}
											if(channel) {
												context.channel = channel;
											}
											context.content = result;
										}

										// CCSのDBに追加
										if(DEBUG) {
											logger.info("[stt_g:V2 (" + assi.handler.sid + ")] add record to ccs");
 										}
										insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), JSON.stringify(context), 0, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
									}

									if(res.getIsFinal()) {
										// 最終結果の場合は前回の音声認識結果をクリア
										assi.obj.remove("before_result");
									}
									else {
										// 最終結果以外の場合は前回の音声認識結果を更新
										assi.obj.put("before_result", result);
									}
								}
							}
							// ccs end
						}
					}
				}
				catch(e) {
					if(!e.getClass) {
						logger.error("[stt_g:V2 (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
					}
					else {
						logger.error("[stt_g:V2 (" + assi.handler.sid + ")] " + e.getMessage(), e);
					}
				}
			},
			onComplete : function() {
				if(DEBUG) {
					logger.info("[stt_g:V2 (" + assi.handler.sid + ")] onComplete");
				}

				// ccs start
				// RFSに音声認識結果をアップロード
				uploadToRfs(assi);

				var pbx_con = assi.obj.get("stt.pbx_db_connection");
				if(pbx_con) {
					if(DEBUG) {
						logger.info("[stt_g:V2 (" + assi.handler.sid + ")] pbx db close");
 					}
					pbx_con.close();
				}

				var ccs_con = assi.obj.get("stt.ccs_db_connection");
				if(ccs_con) {
					if(DEBUG) {
						logger.info("[stt_g:V2 (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
 					}
					var current_time = new Date().getTime();
					// 最終レコードをDBに追加
					insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_GCP, 0, null, null, current_time, current_time);
					ccs_con.close();
				}
				// ccs end

				assi.stop();
				if(!assi.obj.get("result.raised")) {
					var r = assi.obj.put("result");
					assi.raiseResult(r);
				}
			},
			onError : function(t) {
				if(GCP_V2_RETRY_ERROR_PATTERN.test(t.toString())) {
					// 録音中断して一定時間経過した場合は再接続する
					logger.debug("[stt_g:V2 (" + assi.handler.sid + ")] " + t);
					assi.obj.put("restart", true);
				}
				else if(GCP_IGNORE_ERROR_PATTERN.test(t.toString())) {
					// 音声認識開始からの経過時間がリミットを超えた場合は何もしない
					logger.debug("[stt_g:V2 (" + assi.handler.sid + ")] " + t);
				}
				else {
					logger.error("[stt_g:V2 (" + assi.handler.sid + ")] onError. " + t);
					assi.stop();
				}
			}
		};

		var smpl = 8000;
		var ch = 1;
		var enc = ExplicitDecodingConfigV2_AudioEncoding.MULAW;
		switch(assi.payload) {
			case PayloadInfo.PAYLOADTYPE_PCM16_MONO:
				enc = ExplicitDecodingConfigV2_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_MONO:
				//
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_STEREO:
				ch = 2;
				enc = ExplicitDecodingConfigV2_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_STEREO:
				ch = 2;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_MONO:
				smpl = 48000;
				enc = ExplicitDecodingConfigV2_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_MONO:
				smpl = 48000;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_STEREO:
				smpl = 48000;
				ch = 2;
				enc = ExplicitDecodingConfigV2_AudioEncoding.LINEAR16;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_STEREO:
				smpl = 48000;
				ch = 2;
				break;
			default:
				logger.error("[stt_g:createClientStreamV2 (" + assi.handler.sid + ")] Unkown payload:" + a.payload);
				break;
		}

		var callable = client.streamingRecognizeCallable();
		var clientStream = callable.splitCall(responseObserver);
		assi.obj.put("response_observer", responseObserver);

		var explicitDecodingConfig = ExplicitDecodingConfigV2.newBuilder()
						.setEncoding(enc)												// エンコード
						.setSampleRateHertz(smpl)										// サンプリング周波数
						.setAudioChannelCount(ch)										// チャネル数
						.build();

		// 言語
		var language = assi.obj.get("stt_g.language");
		// モデル
		var model = assi.obj.get("stt_g.model");

		var recognitionFeatures;
		if(ch == 2) {
			recognitionFeatures = RecognitionFeaturesV2.newBuilder()
						.setMultiChannelMode(RecognitionFeaturesV2_MultiChannelMode.SEPARATE_RECOGNITION_PER_CHANNEL)
						.setEnableAutomaticPunctuation(true)		// 句読点追加
						.setEnableWordTimeOffsets(true)				// 単語のタイムスタンプ取得
						.build();
		}
		else {
			recognitionFeatures = RecognitionFeaturesV2.newBuilder()
						.setEnableAutomaticPunctuation(true)		// 句読点追加
						.setEnableWordTimeOffsets(true)				// 単語のタイムスタンプ取得
						.build();
		}

		// フレーズセット
		var speechAdaptation = null;
		if(assi.obj.get("stt_g.phrase_set")) {
			var adaptationPhraseSet = AdaptationPhraseSetV2.newBuilder()
						.setPhraseSet(assi.obj.get("stt_g.phrase_set"))
						.build();

			speechAdaptation = SpeechAdaptationV2.newBuilder()
						.addPhraseSets(adaptationPhraseSet)
						.build();
		}

		var recognitionConfig = null;
		if(speechAdaptation) {
			recognitionConfig = RecognitionConfigV2.newBuilder()
						.setExplicitDecodingConfig(explicitDecodingConfig)
						.setFeatures(recognitionFeatures)
						.addLanguageCodes(language)				// 言語
						.setModel(model)						// モデル
						.setAdaptation(speechAdaptation)		// フレーズセット
						.build();
		}
		else {
			recognitionConfig = RecognitionConfigV2.newBuilder()
						.setExplicitDecodingConfig(explicitDecodingConfig)
						.setFeatures(recognitionFeatures)
						.addLanguageCodes(language)				// 言語
						.setModel(model)						// モデル
						.build();
		}

		var streamingRecognitionFeatures = StreamingRecognitionFeaturesV2.newBuilder()
							.setInterimResults(true)			// 中間結果出力
							.build();

		var streamingRecognitionConfig = StreamingRecognitionConfigV2.newBuilder()
							.setConfig(recognitionConfig)
							.setStreamingFeatures(streamingRecognitionFeatures)
							.build();
		assi.obj.put("streaming_recognition_config", streamingRecognitionConfig);

		// 言語ツール
		var recognizer = assi.obj.get("stt_g.recognizer");

		var request = StreamingRecognizeRequestV2.newBuilder()
				.setStreamingConfig(streamingRecognitionConfig)
				.setRecognizer(recognizer)
				.build(); // The first request in a streaming call has to be a config

		clientStream.send(request);
		assi.obj.put("clientStream", clientStream);
		assi.obj.put("client", client);
		ret = true;
	}
	catch(e) {
		if(!e.getClass) {
			logger.error("[stt_g:createClientStreamV2 (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
		}
		else {
			logger.error("[stt_g:createClientStreamV2 (" + assi.handler.sid + ")] " + e.getMessage(), e);
		}

		if(client) {
			client.close();
		}
	}

	return ret;
}


function createClientStreamAws(assi) {
	var ret = false;
	var client = null;
	try {
		client = TranscribeStreamingAsyncClient.builder()
					.region(assi.obj.get("stt_aws.region"))
					.credentialsProvider(assi.obj.get("stt_aws.provider"))
					.build();

		var responseHandler = StartStreamTranscriptionResponseHandler.builder()
				.onResponse(function(r) {
					if(DEBUG) {
						logger.info("[stt_aws (" + assi.handler.sid + ")] Received Initial response.");
					}
				})
				.onError(function(e) {
					if(!AWS_RETRY_ERROR_PATTERN.test(e.toString())) {
						logger.error("[stt_aws (" + assi.handler.sid + ")] onError. " + e.getMessage());
					}
				})
				.onComplete(function() {
					if(DEBUG) {
						logger.info("[stt_aws (" + assi.handler.sid + ")] onComplete");
					}

					// ccs start
					// RFSに音声認識結果をアップロード
					uploadToRfs(assi);

					var pbx_con = assi.obj.get("stt.pbx_db_connection");
					if(pbx_con) {
						if(DEBUG) {
							logger.info("[stt_aws (" + assi.handler.sid + ")] pbx db close");
	 					}
						pbx_con.close();
					}

					var ccs_con = assi.obj.get("stt.ccs_db_connection");
					if(ccs_con) {
						if(DEBUG) {
							logger.info("[stt_aws (" + assi.handler.sid + ")] add final record to ccs and ccs db close");
	 					}
						var current_time = new Date().getTime();
						// 最終レコードをDBに追加
						insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), null, 1, CCS_STT_MODE_AWS, 0, null, null, current_time, current_time);
						ccs_con.close();
					}
					// ccs end
				})
				.onEventStream(function(onSubscribe) {
					onSubscribe.subscribe(function(consumer) {
						var response_time = new Date().getTime();	// ccs
						var results = consumer.transcript().results();
						if(results.size() > 0) {
							if(!results.get(0).alternatives().get(0).transcript().isEmpty()) {
								if(DEBUG) {
									logger.info("[stt_aws (" + assi.handler.sid + ")] Transcript starttime=" + (results.get(0).startTime()*1000) +
													", endtime=" + (results.get(0).endTime()*1000) +
													", partial=" + results.get(0).isPartial() +
													", " + results.get(0).channelId() + ": "
													+ results.get(0).alternatives().get(0).transcript());
								}
								var result = results.get(0).alternatives().get(0).transcript();
								if(result) {
									// 初回の音声認識結果検出を設定
									if(!assi.obj.get("voice_detection")) {
										assi.obj.put("voice_detection", true);
									}
									// 音声認識結果を受信した時間を保持
									assi.obj.put("voice_detection_time", response_time);
								}

								var ptn = assi.obj.get("result.pattern");
								if(ptn) {
									var m = ptn.matcher(result);
									if(m.find()) {
										if(DEBUG) {
											logger.info("[stt_aws (" + assi.handler.sid + ")] " + result + ", ptn=" + ptn);
										}
										var group_only = assi.obj.get("result.pattern.group_only");
										if(group_only) {
											var sep = assi.obj.get("result.pattern.group_sep");
											sep = sep || "";
											var r = "";
											var c = m.groupCount()
											for(var i=1; i<=c; i++) {
												if(r.length > 0) {
													r += sep;
												}
												r += m.group(i);
											}
											result = r;
										}
										assi.obj.put("result", result);
										assi.obj.put("result.pattern.matcher", m);
										assi.obj.put("result.raised", true);
										assi.raiseResult(assi);
										return;
									}
								}
								assi.obj.put("result", result);
								// ccs start
								if(assi.obj.get("stt.source") == "call_record") {
									// 通話録音の場合
									if(!results.get(0).isPartial() || (!assi.obj.get("before_result") && result) || (assi.obj.get("before_result") && result && (assi.obj.get("before_result") != result))) {
										// 最終結果、または、前回の音声認識結果と異なる場合
										var current_time = new Date().getTime();
										var context = {};
										var channel = "";

										if(!results.get(0).isPartial()) {
											// 最終結果の場合
											var pbx_con = assi.obj.get("stt.pbx_db_connection");
											if(pbx_con) {
												context.responsetime = response_time;
												context.starttime = (results.get(0).startTime()*1000);
												context.endtime = (results.get(0).endTime()*1000);
												context.final = !results.get(0).isPartial();
												switch(results.get(0).channelId()) {
													case "ch_0":
														channel = "ch1";
														break;
													case "ch_1":
														channel = "ch2";
														break;
													default:
														break;
												}
												if(channel) {
													context.channel = channel;
												}
												context.content = result;

												// PBXのDBに追加
												if(DEBUG) {
													logger.info("[stt_aws (" + assi.handler.sid + ")] add record to pbx");
		 										}
												insertRecordToPbx(pbx_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), assi.obj.get("stt.other_number"), JSON.stringify(context));
											}
										}

										var ccs_con = assi.obj.get("stt.ccs_db_connection");
										if(ccs_con) {
											if(Object.keys(context).length == 0) {
												context.responsetime = response_time;
												context.starttime = (results.get(0).startTime()*1000);
												context.endtime = (results.get(0).endTime()*1000);
												context.final = !results.get(0).isPartial();
												switch(results.get(0).channelId()) {
													case "ch_0":
														channel = "ch1";
														break;
													case "ch_1":
														channel = "ch2";
														break;
													default:
														break;
												}
												if(channel) {
													context.channel = channel;
												}
												context.content = result;
											}

											// CCSのDBに追加
											if(DEBUG) {
												logger.info("[stt_aws (" + assi.handler.sid + ")] add record to ccs");
	 										}
											insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), JSON.stringify(context), 0, CCS_STT_MODE_AWS, 0, null, null, current_time, current_time);
										}

										if(!results.get(0).isPartial()) {
											// 最終結果の場合は前回の音声認識結果をクリア
											assi.obj.remove("before_result");
										}
										else {
											// 最終結果以外の場合は前回の音声認識結果を更新
											assi.obj.put("before_result", result);
										}
									}
								}
								// ccs end
							}
						}
					});
				})
/*
				.subscriber(function(event) {
					var results = event.transcript().results();
					if(results.size() > 0) {
						if(!results.get(0).alternatives().get(0).transcript().isEmpty()) {
							if(DEBUG) {
								logger.info("[stt_aws] partial=" + results.get(0).isPartial() + ", " + results.get(0).channelId() + ": " + results.get(0).alternatives().get(0).transcript());
							}
							var result = results.get(0).alternatives().get(0).transcript();
							if(result) {
								// 初回の音声認識結果検出を設定
								if(!assi.obj.get("voice_detection")) {
									assi.obj.put("voice_detection", true);
								}
							}

							var ptn = assi.obj.get("result.pattern");
							if(ptn) {
								var m = ptn.matcher(result);
								if(m.find()) {
									if(DEBUG) {
										logger.info("[stt_aws] " + result + ", ptn=" + ptn);
									}
									var group_only = assi.obj.get("result.pattern.group_only");
									if(group_only) {
										var sep = assi.obj.get("result.pattern.group_sep");
										sep = sep || "";
										var r = "";
										var c = m.groupCount()
										for(var i=1; i<=c; i++) {
											if(r.length > 0) {
												r += sep;
											}
											r += m.group(i);
										}
										result = r;
									}
									assi.obj.put("result", result);
									assi.obj.put("result.pattern.matcher", m);
									assi.obj.put("result.raised", true);
									assi.raiseResult(assi);
									return;
								}
							}
							assi.obj.put("result", result);
						}
					}
				})
*/
				.build();
		assi.obj.put("response_handler", responseHandler);

		var SubscriptionImpl = Java.extend(Subscription, {
				request : function(n) {
//					if(DEBUG) {
//						logger.info("[stt_aws (" + assi.handler.sid + ")] Subscription request");
//					}
				},
				cancel : function() {
					if(DEBUG) {
						logger.info("[stt_aws (" + assi.handler.sid + ")] Subscription cancel");
					}
				}
		});

		var AudioStreamPublisher = Java.extend(Publisher, HashMap);
		var audioStreamPublisher = new AudioStreamPublisher() {
				subscribe : function(s) {
					var currentSubscription = Java.super(audioStreamPublisher).get("currentSubscription");
					if(!currentSubscription) {
						currentSubscription = new SubscriptionImpl();
						Java.super(audioStreamPublisher).put("currentSubscription", currentSubscription);
					}
					else {
						currentSubscription.cancel();
						currentSubscription = new SubscriptionImpl();
						Java.super(audioStreamPublisher).put("currentSubscription", currentSubscription);
					}
					s.onSubscribe(currentSubscription);
					Java.super(audioStreamPublisher).put("subscriber", s);
				}
		};
		assi.obj.put("audio_stream_publisher", audioStreamPublisher);

		var smpl = Integer.valueOf(8000);
		var ch = 1;
		var enc = MediaEncoding.PCM;
		switch(assi.payload) {
			case PayloadInfo.PAYLOADTYPE_PCM16_MONO:
				//
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_MONO:
				//
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_STEREO:
				ch = 2;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_STEREO:
				ch = 2;
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_MONO:
				smpl = Integer.valueOf(48000);
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_MONO:
				smpl = Integer.valueOf(48000);
				break;
			case PayloadInfo.PAYLOADTYPE_PCM16_48000_STEREO:
				smpl = Integer.valueOf(48000);
				ch = 2;
				break;
			case PayloadInfo.PAYLOADTYPE_PCMU_48000_STEREO:
				smpl = Integer.valueOf(48000);
				ch = 2;
				break;
			default:
				logger.error("[stt_aws:createClientStreamAws (" + assi.handler.sid + ")] Unkown payload:" + a.payload);
				break;
		}

		// 言語
		var language = assi.obj.get("stt_aws.language");
		// カスタム語彙
		var vocabulary = assi.obj.get("stt_aws.vocabulary");

		var request = null;
		if(ch == 2) {
			if(vocabulary) {
				request = StartStreamTranscriptionRequest.builder()
							.mediaEncoding(enc)											// エンコード
							.languageCode(language)										// 言語
							.mediaSampleRateHertz(smpl)									// サンプリング周波数
							.enableChannelIdentification(true)							// マルチチャネル有効
							.numberOfChannels(Integer.valueOf(2))						// チャネル数
							.vocabularyName(vocabulary)									// カスタム語彙名
							.build();
			}
			else {
				request = StartStreamTranscriptionRequest.builder()
							.mediaEncoding(enc)											// エンコード
							.languageCode(language)										// 言語
							.mediaSampleRateHertz(smpl)									// サンプリング周波数
							.enableChannelIdentification(true)							// マルチチャネル有効
							.numberOfChannels(Integer.valueOf(2))						// チャネル数
							.build();
			}
		}
		else {
			if(vocabulary) {
				request = StartStreamTranscriptionRequest.builder()
							.mediaEncoding(enc)											// エンコード
							.languageCode(language)										// 言語
							.mediaSampleRateHertz(smpl)									// サンプリング周波数
							.vocabularyName(vocabulary)									// カスタム語彙名
							.build();
			}
			else {
				request = StartStreamTranscriptionRequest.builder()
							.mediaEncoding(enc)											// エンコード
							.languageCode(language)										// 言語
							.mediaSampleRateHertz(smpl)									// サンプリング周波数
							.build();
			}
		}
		assi.obj.put("request", request);

		var result = client.startStreamTranscription(request, audioStreamPublisher, responseHandler);

		// リクエスト実行完了待ちスレッドを開始
		var AwsRun = Java.extend(Runnable, HashMap);
		var aws_run = new AwsRun() {
			run: function() {
				try {
					Java.super(aws_run).get("future").get();
				}
				catch(e) {
					if(AWS_RETRY_ERROR_PATTERN.test(e.toString())) {
						// 録音中断して一定時間経過した場合は再接続する
						logger.debug("[stt_aws (" + assi.handler.sid + ")] " + e.getMessage());
						assi.obj.put("restart", true);
					}
					else {
						logger.error("[stt_aws (" + assi.handler.sid + ")] error. " + e.getMessage());
						Java.super(aws_run).put("error", e);
						assi.stop();
					}
				}
			}
		};
		aws_run.put("future", result);
		var aws_thread = new Thread(aws_run);
		aws_thread.start();

		assi.obj.put("client", client);
		assi.obj.put("aws_run", aws_run);
		assi.obj.put("aws_thread", aws_thread);
		ret = true;
	}
	catch(e) {
		if(!e.getClass) {
			logger.error("[stt_aws:createClientStreamAws (" + assi.handler.sid + ")] " + e.message + "\n" + e.stack);
		}
		else {
			logger.error("[stt_aws:createClientStreamAws (" + assi.handler.sid + ")] " + e.getMessage(), e);
		}

		if(client) {
			client.close();
		}
	}

	return ret;
}


function createClientStreamAmi(assi) {
	var ret = false;
	var ExWrpListener = Java.extend(WrpListener, HashMap, {
		utteranceStarted: function(startTime) {},
		utteranceEnded: function(endTime) {},
		resultCreated: function() {},
		resultUpdated: function(result) {},
		resultFinalized: function(result) {},
		eventNotified: function(eventId, eventMessage) {},
		TRACE: function(message) {}
	});

	// WebSocket 音声認識サーバーイベントリスナー作成処理
	function createListenerWithSharedLogic(baseListener) {
   		var ImplClass = Java.extend(ExWrpListener, {
			// 発話開始
			utteranceStarted: function(startTime) {
				if(DEBUG) {
					logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": Voice detection started. time=" + startTime);
				}
				baseListener.put("start_time", startTime);
			},
			// 発話終了
			utteranceEnded: function(endTime) {
				if(DEBUG) {
					logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": Voice detection ended. time=" + endTime);
				}
				baseListener.put("end_time", endTime);
			},
			// 音声認識開始
			resultCreated: function() {
				if(DEBUG) {
					logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": Transcript started");
				}
			},
			// 中間認識結果
			resultUpdated: function(result) {
				var response_time = new Date().getTime();	// ccs
				if(result) {
					var result_cnv = JSON.parse(unicodeToUtf8(result));
					var text = result_cnv.text;
					while(text && (text.charAt(text.length() - 1) == '.')) {
						text = text.replaceFirst("\\.$", "");
					}
					if(text) {
						if(DEBUG) {
							logger.info("[stt_ami (" + assi.handler.sid + ")] Transcript starttime=" + baseListener.get("start_time") +
											", endtime=" + (baseListener.get("end_time") ? baseListener.get("end_time") : null) +
											", final=false" +
											", ch" + baseListener.get("channel") +
											": " + text);
						}

						// 初回の音声認識結果検出を設定
						if(!assi.obj.get("voice_detection")) {
							assi.obj.put("voice_detection", true);
						}
						// 音声認識結果を受信した時間を保持
						assi.obj.put("voice_detection_time", response_time);
					}

					var ptn = assi.obj.get("result.pattern");
					if(ptn) {
						var m = ptn.matcher(text);
						if(m.find()) {
							if(DEBUG) {
								logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": " + text + ", ptn=" + ptn);
							}
							var group_only = assi.obj.get("result.pattern.group_only");
							if(group_only) {
								var sep = assi.obj.get("result.pattern.group_sep");
								sep = sep || "";
								var r = "";
								var c = m.groupCount()
								for(var i=1; i<=c; i++) {
									if(r.length > 0) {
										r += sep;
									}
									r += m.group(i);
								}
								text = r;
							}
							assi.obj.put("result", text);
							assi.obj.put("result.pattern.matcher", m);
							assi.obj.put("result.raised", true);
							assi.raiseResult(assi);
							return;
						}
					}
					if(text) {
						assi.obj.put("result", text);
					}
					// ccs start
					if(assi.obj.get("stt.source") == "call_record") {
						// 通話録音の場合
						if((!baseListener.get("before_result") && text) || (baseListener.get("before_result") && text && (baseListener.get("before_result") != text))) {
							// 前回の音声認識結果と異なる場合
							var current_time = new Date().getTime();
							var context = {};
							var channel = "";

							var ccs_con = assi.obj.get("stt.ccs_db_connection");
							if(ccs_con) {
								context.responsetime = response_time;
								context.starttime = baseListener.get("start_time");
								context.endtime = baseListener.get("end_time") ? baseListener.get("end_time") : 0;
								context.final = false;
								switch(baseListener.get("channel")) {
									case 1:
										channel = "ch1";
										break;
									case 2:
										channel = "ch2";
										break;
									default:
										break;
								}
								if(channel) {
									context.channel = channel;
								}
								context.content = text;

								// CCSのDBに追加
								if(DEBUG) {
									logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": add record to ccs");
								}
								insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), JSON.stringify(context), 0, CCS_STT_MODE_AMI, 0, null, null, current_time, current_time);
							}

							// 前回の音声認識結果を更新
							baseListener.put("before_result", text);
						}
					}
					// ccs end
				}
			},
			// 最終音声認識結果
			resultFinalized: function(result) {
				var response_time = new Date().getTime();	// ccs
				if(result) {
					var result_cnv = JSON.parse(unicodeToUtf8(result));
					var results = result_cnv.results;
					for(var cnt=0; cnt<results.length; cnt++) {
						var text = results[cnt].text;
						if(text) {
							if(DEBUG) {
								logger.info("[stt_ami (" + assi.handler.sid + ")] Transcript starttime=" + results[cnt].starttime +
													", endtime=" + results[cnt].endtime +
													", final=true" +
													", ch" + baseListener.get("channel") +
													": " + text);
							}

							// 初回の音声認識結果検出を設定
							if(!assi.obj.get("voice_detection")) {
								assi.obj.put("voice_detection", true);
							}
							// 音声認識結果を受信した時間を保持
							assi.obj.put("voice_detection_time", response_time);
						}

						var ptn = assi.obj.get("result.pattern");
						if(ptn) {
							var m = ptn.matcher(text);
							if(m.find()) {
								if(DEBUG) {
									logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": " + text + ", ptn=" + ptn);
								}
								var group_only = assi.obj.get("result.pattern.group_only");
								if(group_only) {
									var sep = assi.obj.get("result.pattern.group_sep");
									sep = sep || "";
									var r = "";
									var c = m.groupCount()
									for(var i=1; i<=c; i++) {
										if(r.length > 0) {
											r += sep;
										}
										r += m.group(i);
									}
									text = r;
								}
								assi.obj.put("result", text);
								assi.obj.put("result.pattern.matcher", m);
								assi.obj.put("result.raised", true);
								assi.raiseResult(assi);
								return;
							}
						}
						if(text) {
							assi.obj.put("result", text);
						}
						// ccs start
						if(assi.obj.get("stt.source") == "call_record") {
							// 通話録音の場合
							if(text) {
								var current_time = new Date().getTime();
								var context = {};
								var channel = "";

								var pbx_con = assi.obj.get("stt.pbx_db_connection");
								if(pbx_con) {
									context.responsetime = response_time;
									context.starttime = results[cnt].starttime;
									context.endtime = results[cnt].endtime;
									context.final = true;
									switch(baseListener.get("channel")) {
										case 1:
											channel = "ch1";
											break;
										case 2:
											channel = "ch2";
											break;
										default:
											break;
									}
									if(channel) {
										context.channel = channel;
									}
									context.content = text;

									// PBXのDBに追加
									if(DEBUG) {
										logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": add record to pbx");
									}
									insertRecordToPbx(pbx_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), assi.obj.get("stt.other_number"), JSON.stringify(context));
								}

								var ccs_con = assi.obj.get("stt.ccs_db_connection");
								if(ccs_con) {
									if(Object.keys(context).length == 0) {
										context.responsetime = response_time;
										context.starttime = results[cnt].starttime;
										context.endtime = results[cnt].endtime;
										context.final = true;
										switch(baseListener.get("channel")) {
											case 1:
												channel = "ch1";
												break;
											case 2:
												channel = "ch2";
												break;
											default:
												break;
										}
										if(channel) {
											context.channel = channel;
										}
										context.content = text;
									}

									// CCSのDBに追加
									if(DEBUG) {
										logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": add record to ccs");
									}
									insertRecordToCcs(ccs_con, assi.obj.get("stt.tenant"), assi.obj.get("stt.rid"), assi.obj.get("stt.user"), JSON.stringify(context), 0, CCS_STT_MODE_AMI, 0, null, null, current_time, current_time);
								}

								// 前回の音声認識結果をクリア
								baseListener.remove("before_result");
							}
						}
						// ccs end

						// 終了時間をクリア
						baseListener.remove("end_time");
					}
				}
			},
			// サーバーイベント通知
			eventNotified: function(eventId, eventMessage) {
				if(DEBUG) {
					logger.info("[stt_ami (" + assi.handler.sid + ")] ch" + baseListener.get("channel") + ": event id=" + eventId + ", event message=" + eventMessage);
				}
			},
			// トレース
			TRACE: function(message) {
				if(DEBUG) {
					logger.info("[stt_ami (" + assi.handler.sid + ")][TRACE] ch" + baseListener.get("channel") + ": " + message);
				}
				if(AMI_COMPLETE_PATTERN.test(message)) {
					// 音声認識終了
					baseListener.put("is_end", true);
				}
				else if(AMI_RETRY_ERROR_PATTERN.test(message)) {
					// 録音中断して一定時間経過した場合は再接続する
					assi.obj.put("restart_ch" + baseListener.get("channel"), true);
				}
			}
		});

		return new ImplClass();
	}

	var is_smpl_convert = false;
	switch(assi.obj.get("stt_ami.engine")) {
		case "-a-medical":				// 汎用_医療
		case "-a-bizfinance":			// 汎用_金融
		case "-a-bizinsurance":			// 汎用_保険
			is_smpl_convert = true;
			break;

		default:
			break;
	}
	assi.obj.put("is_samplingrate_convert", is_smpl_convert);

	var audio_division_unit = 1;
	var is_pcm_convert = false;
	var smpl = "MULAW";
	var ch = 1;
	switch(assi.payload) {
		case PayloadInfo.PAYLOADTYPE_PCM16_MONO:
			if(is_smpl_convert) {
				smpl = "LSB16K";
			}
			else {
				smpl = "LSB8K";
			}
			break;
		case PayloadInfo.PAYLOADTYPE_PCMU_MONO:
			if(is_smpl_convert) {
				smpl = "LSB16K";
				is_pcm_convert = true;
			}
			break;
		case PayloadInfo.PAYLOADTYPE_PCM16_STEREO:
			if(is_smpl_convert) {
				smpl = "LSB16K";
			}
			else {
				smpl = "LSB8K";
			}
			ch = 2;
			audio_division_unit = 2;
			break;
		case PayloadInfo.PAYLOADTYPE_PCMU_STEREO:
			if(is_smpl_convert) {
				smpl = "LSB16K";
				is_pcm_convert = true;
			}
			ch = 2;
			break;
		case PayloadInfo.PAYLOADTYPE_PCM16_48000_MONO:
			smpl = "LSB48K";
			break;
		case PayloadInfo.PAYLOADTYPE_PCMU_48000_MONO:
			smpl = "LSB48K";
			break;
		case PayloadInfo.PAYLOADTYPE_PCM16_48000_STEREO:
			smpl = "LSB48K";
			ch = 2;
			break;
		case PayloadInfo.PAYLOADTYPE_PCMU_48000_STEREO:
			smpl = "LSB48K";
			ch = 2;
			break;
		default:
			logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] Unkown payload:" + a.payload);
			break;
	}
	assi.obj.put("channel", ch);
	assi.obj.put("is_pcm_convert", is_pcm_convert);
	assi.obj.put("audio_division_unit", audio_division_unit);

	if(ch == 1) {
		// モノラル

		// WebSocket 音声認識サーバーイベントリスナーの作成
		var ex_listener = new ExWrpListener();
		ex_listener.put("channel", ch);
		ex_listener.put("is_end", false);
		var listener = createListenerWithSharedLogic(ex_listener);

		// WebSocket 音声認識サーバーの初期化
		var wrp = Wrp.construct();
		wrp.setListener(listener);
		wrp.setServerURL(assi.obj.get("stt_ami.url"));									// エンドポイントURL
		wrp.setCodec(smpl);																// コーデック
		wrp.setGrammarFileNames(assi.obj.get("stt_ami.engine"));						// 音声認識エンジン
		wrp.setKeepFillerToken(assi.obj.get("stt_ami.keep_filler"));					// フィラー自動削除抑制
		wrp.setProfileId(":" + assi.obj.get("stt_ami.profile_id"));						// プロファイルID
		wrp.setAuthorization(assi.obj.get("stt_ami.app_key"));							// APPキー
		wrp.setConnectTimeout(AMI_WS_CONNECTION_TIMEOUT);								// 接続タイムアウト
		wrp.setReceiveTimeout(AMI_WS_READ_TIMEOUT);										// 読み出しタイムアウト

		// WebSocket 音声認識サーバーへの接続
		if(wrp.connect()) {
			// WebSocket 音声認識サーバーへの音声データの送信の開始
			if(wrp.feedDataResume()) {
				assi.obj.put("wrp", wrp);
				assi.obj.put("listener", listener);
				ret = true;
			}
			else {
				logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] feedDataResume failed." + wrp.getLastMessage());
				// WebSocket 音声認識サーバーからの切断
				if(wrp.isConnected()) {
					wrp.disconnect();
				}
			}
		}
		else {
			logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] connect failed." + wrp.getLastMessage());
		}
	}
	else if(ch == 2) {
		// ステレオ

		// WebSocket 音声認識サーバーイベントリスナーの作成
		// チャネル1
		var ex_listener_ch1 = new ExWrpListener();
		ex_listener_ch1.put("channel", 1);
		ex_listener_ch1.put("is_end", false);
		var listener_ch1 = createListenerWithSharedLogic(ex_listener_ch1);
		// チャネル2
		var ex_listener_ch2 = new ExWrpListener();
		ex_listener_ch2.put("channel", 2);
		ex_listener_ch2.put("is_end", false);
		var listener_ch2 = createListenerWithSharedLogic(ex_listener_ch2);

		// WebSocket 音声認識サーバーの初期化
		// チャネル1
		var wrp_ch1 = Wrp.construct();
		wrp_ch1.setListener(listener_ch1);
		wrp_ch1.setServerURL(assi.obj.get("stt_ami.url"));								// エンドポイントURL
		wrp_ch1.setCodec(smpl);															// コーデック
		wrp_ch1.setGrammarFileNames(assi.obj.get("stt_ami.engine"));					// 音声認識エンジン
		wrp_ch1.setKeepFillerToken(assi.obj.get("stt_ami.keep_filler"));				// フィラー自動削除抑制
		wrp_ch1.setProfileId(":" + assi.obj.get("stt_ami.profile_id"));					// プロファイルID
		wrp_ch1.setAuthorization(assi.obj.get("stt_ami.app_key"));						// APPキー
		wrp_ch1.setConnectTimeout(AMI_WS_CONNECTION_TIMEOUT);							// 接続タイムアウト
		wrp_ch1.setReceiveTimeout(AMI_WS_READ_TIMEOUT);									// 読み出しタイムアウト
		// チャネル2
		var wrp_ch2 = Wrp.construct();
		wrp_ch2.setListener(listener_ch2);
		wrp_ch2.setServerURL(assi.obj.get("stt_ami.url"));								// エンドポイントURL
		wrp_ch2.setCodec(smpl);															// コーデック
		wrp_ch2.setGrammarFileNames(assi.obj.get("stt_ami.engine"));					// 音声認識エンジン
		wrp_ch2.setKeepFillerToken(assi.obj.get("stt_ami.keep_filler"));				// フィラー自動削除抑制
		wrp_ch2.setProfileId(":" + assi.obj.get("stt_ami.profile_id"));					// プロファイルID
		wrp_ch2.setAuthorization(assi.obj.get("stt_ami.app_key"));						// APPキー
		wrp_ch2.setConnectTimeout(AMI_WS_CONNECTION_TIMEOUT);							// 接続タイムアウト
		wrp_ch2.setReceiveTimeout(AMI_WS_READ_TIMEOUT);									// 読み出しタイムアウト

		// WebSocket 音声認識サーバーへの接続
		var connect_ret_ch1 = wrp_ch1.connect();						// チャネル1
		var connect_ret_ch2 = wrp_ch2.connect();						// チャネル2
		if(connect_ret_ch1 && connect_ret_ch2) {
			// WebSocket 音声認識サーバーへの音声データの送信の開始
			var feedDataResume_ret_ch1 = wrp_ch1.feedDataResume();		// チャネル1
			var feedDataResume_ret_ch2 = wrp_ch2.feedDataResume();		// チャネル2
			if(feedDataResume_ret_ch1 && feedDataResume_ret_ch2) {
				assi.obj.put("wrp_ch1", wrp_ch1);
				assi.obj.put("listener_ch1", listener_ch1);
				assi.obj.put("wrp_ch2", wrp_ch2);
				assi.obj.put("listener_ch2", listener_ch2);
				ret = true;
			}
			else {
				if(!feedDataResume_ret_ch1) {
					logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] ch1: feedDataResume failed." + wrp_ch1.getLastMessage());
				}
				if(!feedDataResume_ret_ch2) {
					logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] ch2: feedDataResume failed." + wrp_ch2.getLastMessage());
				}

				// WebSocket 音声認識サーバーからの切断
				if(wrp_ch1.isConnected()) {
					wrp_ch1.disconnect();
				}
				if(wrp_ch2.isConnected()) {
					wrp_ch2.disconnect();
				}
			}
		}
		else {
			if(!connect_ret_ch1) {
				logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] ch1: connect failed." + wrp_ch1.getLastMessage());
			}
			if(!connect_ret_ch2) {
				logger.error("[stt_ami:createClientStreamAmi (" + assi.handler.sid + ")] ch2: connect failed." + wrp_ch2.getLastMessage());
			}

			// WebSocket 音声認識サーバーからの切断
			if(wrp_ch1.isConnected()) {
				wrp_ch1.disconnect();
			}
			if(wrp_ch2.isConnected()) {
				wrp_ch2.disconnect();
			}
		}
	}

	return ret;
}


function setGcpLib() {
	GoogleCredentials = Java.type("com.google.auth.oauth2.GoogleCredentials");
	FixedCredentialsProvider = Java.type("com.google.api.gax.core.FixedCredentialsProvider");
	ResponseObserver = Java.type("com.google.api.gax.rpc.ResponseObserver");
	SpeechSettingsV1 = Java.type("com.google.cloud.speech.v1.SpeechSettings");
	SpeechClientV1 = Java.type("com.google.cloud.speech.v1.SpeechClient");
	RecognitionConfigV1 = Java.type("com.google.cloud.speech.v1.RecognitionConfig");
	RecognitionConfigV1_AudioEncoding = Java.type("com.google.cloud.speech.v1.RecognitionConfig.AudioEncoding");
	StreamingRecognitionConfigV1 = Java.type("com.google.cloud.speech.v1.StreamingRecognitionConfig");
	StreamingRecognizeRequestV1 = Java.type("com.google.cloud.speech.v1.StreamingRecognizeRequest");
	SpeechAdaptationV1 = Java.type("com.google.cloud.speech.v1.SpeechAdaptation");
	SpeechSettingsV1p1beta1 = Java.type("com.google.cloud.speech.v1p1beta1.SpeechSettings");
	SpeechClientV1p1beta1 = Java.type("com.google.cloud.speech.v1p1beta1.SpeechClient");
	RecognitionConfigV1p1beta1 = Java.type("com.google.cloud.speech.v1p1beta1.RecognitionConfig");
	RecognitionConfigV1p1beta1_AudioEncoding = Java.type("com.google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding");
	StreamingRecognitionConfigV1p1beta1 = Java.type("com.google.cloud.speech.v1p1beta1.StreamingRecognitionConfig");
	StreamingRecognizeRequestV1p1beta1 = Java.type("com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest");
	SpeechAdaptationV1p1beta1 = Java.type("com.google.cloud.speech.v1p1beta1.SpeechAdaptation");
	SpeechSettingsV2 = Java.type("com.google.cloud.speech.v2.SpeechSettings");
	SpeechClientV2 = Java.type("com.google.cloud.speech.v2.SpeechClient");
	ExplicitDecodingConfigV2 = Java.type("com.google.cloud.speech.v2.ExplicitDecodingConfig");
	ExplicitDecodingConfigV2_AudioEncoding = Java.type("com.google.cloud.speech.v2.ExplicitDecodingConfig.AudioEncoding");
	RecognitionFeaturesV2 = Java.type("com.google.cloud.speech.v2.RecognitionFeatures");
	RecognitionFeaturesV2_MultiChannelMode = Java.type("com.google.cloud.speech.v2.RecognitionFeatures.MultiChannelMode");
	RecognitionConfigV2 = Java.type("com.google.cloud.speech.v2.RecognitionConfig");
	StreamingRecognitionConfigV2 = Java.type("com.google.cloud.speech.v2.StreamingRecognitionConfig");
	StreamingRecognizeRequestV2 = Java.type("com.google.cloud.speech.v2.StreamingRecognizeRequest");
	StreamingRecognitionFeaturesV2 = Java.type("com.google.cloud.speech.v2.StreamingRecognitionFeatures");
	SpeechAdaptationV2 = Java.type("com.google.cloud.speech.v2.SpeechAdaptation");
	AdaptationPhraseSetV2= Java.type("com.google.cloud.speech.v2.SpeechAdaptation.AdaptationPhraseSet");
	ByteString = Java.type("com.google.protobuf.ByteString");
}


function setAwsLib() {
	AwsBasicCredentials = Java.type("software.amazon.awssdk.auth.credentials.AwsBasicCredentials");
	StaticCredentialsProvider = Java.type("software.amazon.awssdk.auth.credentials.StaticCredentialsProvider");
	Region = Java.type("software.amazon.awssdk.regions.Region");
	LanguageCode = Java.type("software.amazon.awssdk.services.transcribestreaming.model.LanguageCode");
	MediaEncoding = Java.type("software.amazon.awssdk.services.transcribestreaming.model.MediaEncoding");
	TranscribeStreamingAsyncClient = Java.type("software.amazon.awssdk.services.transcribestreaming.TranscribeStreamingAsyncClient");
	StartStreamTranscriptionResponseHandler = Java.type("software.amazon.awssdk.services.transcribestreaming.model.StartStreamTranscriptionResponseHandler");
	StartStreamTranscriptionRequest = Java.type("software.amazon.awssdk.services.transcribestreaming.model.StartStreamTranscriptionRequest");
	AudioEvent = Java.type("software.amazon.awssdk.services.transcribestreaming.model.AudioEvent");
	SdkBytes = Java.type("software.amazon.awssdk.core.SdkBytes");
	Subscription = Java.type("org.reactivestreams.Subscription");
	Publisher = Java.type("org.reactivestreams.Publisher");
}


function setAmiLib() {
	Wrp = Java.type("com.amivoice.wrp.Wrp");
	WrpListener = Java.type("com.amivoice.wrp.WrpListener");
	AudioDispatcherFactory = Java.type("be.tarsos.dsp.io.jvm.AudioDispatcherFactory");
	RateTransposer = Java.type("be.tarsos.dsp.resample.RateTransposer");
	AudioProcessor = Java.type("be.tarsos.dsp.AudioProcessor");
}


function getSttSetting(tenant) {
	var ret = null;

	// テナントのノートが存在する場合は、そちらを使用
	var note_setting = STT_SETTING_NOTE;
	if(tenant) {
		if(tenant != "-") {
			note_setting = tenant + "." + note_setting;
		}
	}

	var l = NoteUtils.lastModified(note_setting);
	if(l > 0) {
		var settings;
		// ノートが更新されていない場合はSTT設定情報を再設定しない
		if(l == setting_modified) {
			settings = stt_currentSettings;
		}
		else {
			if(DEBUG) {
				logger.info("[stt:getSttSetting] STT Setting update");
			}
			var n = NoteUtils.read(note_setting);
			setting_modified = l;
			var b = n.getBytes("UTF8");
			var is = null;
			var prop = new Properties();
			try {
				var is = new ByteArrayInputStream(b);	
				prop.load(is);
			}
			finally {
				if(is) {
					is.close();
				}
			}
			settings = prop;
			stt_currentSettings = prop;
		}
		ret = settings;
	}

	return ret;
}


function unicodeToUtf8(ustr) {
	var ret = null;
	if(ustr != null) {
		ret = ustr.replace(/\\u([0-9a-fA-F]{4})/g, function(match, grp) {
				return String.fromCharCode(parseInt(grp, 16));
			});
	}

	return ret;
}


// ccs start
function insertRecordToCcs(con, tenant, rid, agent, context, lastlog, asr_mode, asr_format_type, sys_exinfo1, sys_exinfo2, ptime, ctime) {
	var st = null;

	try {
		var columns = "tenant_name,rid,agent_id" + (context ? ",context" : "") + ",lastlog" + (asr_mode ? ",asr_mode" : "") + (asr_format_type ? ",asr_format_type" : "") + (sys_exinfo1 ? ",sys_exinfo1" : "") + (sys_exinfo2 ? ",sys_exinfo2" : "") + ",ptime,ctime";
		var values = '"' + tenant + '","' + rid + '","' + agent + '",' + (context ? ('\'' + context + '\',') : "") + lastlog + "," + (asr_mode ? (asr_mode + ",") : "") + (asr_format_type ? (asr_format_type + ",") : "") + (sys_exinfo1 ? ('"' + sys_exinfo1 + '",') : "") + (sys_exinfo2 ? ('"' + sys_exinfo2 + '",') : "") + ptime + "," + ctime;
		var sql = "INSERT INTO m_call_realtimes2t_" + new Date().getFullYear() + " (" + columns + ") VALUES (" + values + ")";
		st = con.createStatement();
		var rows = st.executeUpdate(sql);
	}
	catch(e) {
		logger.error("[stt] insert error to ccs db. " + e);
	}
	finally {
		if(st) {
			st.close();
		}
	}
}

function insertRecordToPbx(con, tenant, rid, user, other_number, context) {
	var st = null;

	try {
		var columns = "tenant_name,rid,user_name,other_number" + (context ? ",context" : "");
		var values = "'" + tenant + "','" + rid + "','" + user + "','" + other_number + "'," + (context ? ("'" + context + "'") : "");
		var sql = "INSERT INTO t_stt (" + columns + ") VALUES (" + values + ")";
		st = con.createStatement();
		var rows = st.executeUpdate(sql);
	}
	catch(e) {
		logger.error("[stt] insert error to pbx db. " + e);
	}
	finally {
		if(st) {
			st.close();
		}
	}
}

function uploadToRfs(assi) {
	if(assi.obj.get("stt.rfs_access") == "true") {
		// RFS連携をする場合
		var rfs_url = assi.obj.get("stt.record_app_url");		// RFSの音声認識結果登録URL
		if(rfs_url) {
			var pbx_con = assi.obj.get("stt.pbx_db_connection");
			if(pbx_con) {
				var st = null;
				var rs = null;
				var context = [];
				var is_error = false;

				try {
					// 音声認識結果を取得(時間順)
					st = pbx_con.createStatement();
					var sql = "SELECT context FROM t_stt WHERE tenant_name='" + assi.obj.get("stt.tenant") + "' AND rid='" + assi.obj.get("stt.rid") + "' AND user_name='" + assi.obj.get("stt.user") + "' ORDER BY gid";
					rs = st.executeQuery(sql);
					while(rs.next()) {
						context.push(JSON.parse(rs.getString("context")));
					}
				}
				catch(e) {
					logger.error("[stt (" + assi.handler.sid + ")] select error from pbx db. " + e);
					is_error = true;
				}
				finally {
					if(rs) {
						rs.close();
					}
					if(st) {
						st.close();
					}
				}

				if(!is_error) {
					if(context.length) {
						// RFSアップロードスレッドを開始
						var RfsRun = Java.extend(Runnable, HashMap);
						var rfs_run = new RfsRun() {
							run: function() {
								var send_data = {};
								send_data.tenant = Java.super(rfs_run).get("tenant");				// テナント
								send_data.rid = Number(Java.super(rfs_run).get("rid"));				// 通話ID
								send_data.agent_id = Java.super(rfs_run).get("user");				// エージェントID
								send_data.other_number = Java.super(rfs_run).get("other_number");	// 相手番号
								send_data.context = Java.super(rfs_run).get("context");				// 音声認識結果
								logger.debug("[stt (" + assi.handler.sid + ")] upload stt result to rfs: " + JSON.stringify(send_data));

								// RFSに音声認識結果をアップロード
								var httpc = new HttpC();
//								HttpC.DEBUG = true;
								httpc.user_agent = "Brekeke PBX";
								var response = httpc.postS(new URL(Java.super(rfs_run).get("url")), JSON.stringify(send_data), "application/json;charset=utf-8", null);
								if(response.responseCode == 200) {
									if(DEBUG) {
										logger.info("[stt (" + assi.handler.sid + ")] upload stt result to rfs success");
									}
								}
								else {
									logger.error("[stt (" + assi.handler.sid + ")] upload stt result to rfs error. response code=" + response.responseCode + ", response body=" + response.body);
								}
							}
						};
						rfs_run.put("url", rfs_url);									// RFS URL
						rfs_run.put("tenant", assi.obj.get("stt.tenant"));				// テナント
						rfs_run.put("rid", assi.obj.get("stt.rid"));					// 通話ID
						rfs_run.put("user", assi.obj.get("stt.user"));					// ユーザー
						rfs_run.put("other_number", assi.obj.get("stt.other_number"));	// 相手番号
						rfs_run.put("context", context);								// 音声認識結果
						var rfs_thread = new Thread(rfs_run);
						rfs_thread.start();
					}
				}
			}
		}
	}
}
// ccs end
