Skip to content

Commit

Permalink
Add Telephony speech metrics to sensorkit plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
peyman-mohtashami committed Nov 7, 2023
1 parent 72c178d commit 72fe52e
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 60 deletions.
2 changes: 1 addition & 1 deletion RbSensorkitCordovaPlugin/src/ios/Constants.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ struct Constants {
"phoneUsageReport": "sensorkit_phone_usage",
"rotationRate": "sensorkit_rotation_rate",
"siriSpeechMetrics": "",
"telephonySpeechMetrics": "",
"telephonySpeechMetrics": "sensorkit_telephony_speech_metrics",
"visits": "sensorkit_visits",
"magneticField": "apple_ios_magnetic_field",
]
Expand Down
17 changes: 12 additions & 5 deletions RbSensorkitCordovaPlugin/src/ios/RbSensorkitCordovaPlugin.swift
Original file line number Diff line number Diff line change
Expand Up @@ -167,11 +167,11 @@ class RbSensorkitCordovaPlugin : CDVPlugin, SRSensorReaderDelegate {
// sensor = .siriSpeechMetrics
// }
// break
// case "telephonySpeechMetrics":
// if #available(iOS 15.0, *) {
// sensor = .telephonySpeechMetrics
// }
// break
case "telephonySpeechMetrics":
if #available(iOS 17.0, *) {
sensor = .telephonySpeechMetrics
}
break
case "visits":
sensor = .visits
break
Expand Down Expand Up @@ -480,6 +480,13 @@ class RbSensorkitCordovaPlugin : CDVPlugin, SRSensorReaderDelegate {
convertPhoneUsageSensorData(result: result)
}
break
case "com.apple.SensorKit.speechMetrics.telephony":
let currentRecordTS: Double = result.timestamp.rawValue * 1000
if currentRecordTS - lastRecordTS >= periodMili {
lastRecordTS = currentRecordTS
convertTelephonySpeechMetricsData(result: result)
}
break
case "com.apple.SensorKit.visits":
let currentRecordTS: Double = result.timestamp.rawValue * 1000
if currentRecordTS - lastRecordTS >= periodMili {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,54 +8,40 @@
import Foundation
import SensorKit

//class TelephonySpeechMetricsDataExtractor : SensorKitDataExtractor {
// override var topicName: String {
// return "android_phone_telephony_speech_metrics"
// }
//
// func sensorReader(_ reader: SRSensorReader, didCompleteFetch fetchRequest: SRFetchRequest) {
// print("sensorReader didCompleteFetch \(reader.sensor.rawValue) (\(counter))")
// counter = 0
// processData(sensorDataArray: sensorDataArray)
// sensorDataArray = []
// }
// /*
// No Schema
// */
// /*
// Getting session information
// var sessionIdentifier: String
// An identifier for the audio session.
// var sessionFlags: SRSpeechMetrics.SessionFlags
// Details about the audio processing.
//
// struct SRSpeechMetrics.SessionFlags
// Possible details about processing an audio stream.
//
// var timestamp: Date
// The date and time when the speech occurs.
//
// Getting speech metrics and analytics
// var audioLevel: SRAudioLevel?
// The audio level of the speech.
//
// class SRAudioLevel
// An object that represents the audio level for a range of speech.
//
// var speechRecognition: SFSpeechRecognitionResult?
// The partial or final results of the speech recognition request.
// var soundClassification: SNClassificationResult?
// The highest-ranking classifications in the time range.
// var speechExpression: SRSpeechExpression?
// The metrics and voice analytics for the range of speech.
// */
//
// // main
// override func convertSensorData(result: SRFetchResult<AnyObject>){
// let sample = result.sample as! SRSpeechMetrics
// sensorDataArray.append([
// "time": result.timestamp.toCFAbsoluteTime() + kCFAbsoluteTimeIntervalSince1970,
// "timeReceived": Date().timeIntervalSince1970,
// ])
// }
//}
extension RbSensorkitCordovaPlugin {
func convertTelephonySpeechMetricsData(result: SRFetchResult<AnyObject>) {
if #available(iOS 17.0, *) {
let sample = result.sample as! SRSpeechMetrics

var classifications: [SNClassification]? = sample.soundClassification?.classifications

var classificationsString = classifications?.compactMap { word in
word.identifier + ": " + word.confidence.description
}.joined(separator: ", ")

sensorDataArray.append([
"time": sample.timestamp.timeIntervalSince1970,
"timeReceived": sample.timestamp.timeIntervalSince1970,
"audioLevelLoudness": sample.audioLevel?.loudness as Any,
"audioLevelStart": sample.audioLevel?.timeRange.start.seconds as Any,
"audioLevelDuration": sample.audioLevel?.timeRange.duration.seconds as Any,

"speechExpressionStart": sample.speechExpression?.timeRange.start.seconds as Any,
"speechExpressionDuration": sample.speechExpression?.timeRange.duration.seconds as Any,
"speechExpressionVersion": sample.speechExpression?.version as Any,
"speechExpressionConfidence": sample.speechExpression?.confidence as Any,
"speechExpressionMood": sample.speechExpression?.mood as Any,
"speechExpressionValence": sample.speechExpression?.valence as Any,
"speechExpressionActivation": sample.speechExpression?.activation as Any,
"speechExpressionDominance": sample.speechExpression?.dominance as Any,

"soundClassificationStart": sample.soundClassification?.timeRange.start.seconds as Any,
"soundClassificationDuration": sample.soundClassification?.timeRange.duration.seconds as Any,
"soundClassification": (classificationsString ?? nil) as Any
])
} else {
// Fallback on earlier versions
}

}
}
10 changes: 7 additions & 3 deletions RbSensorkitTestApp/www/js/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ async function onDeviceReady() {
console.log("[JS] Authorize Error", e);
}
/**********************************/
const startDate = "2023-10-20T10:00:00";
const endDate = "2023-10-27T20:00:00";
const startDate = "2023-11-02T10:00:00";
const endDate = "2023-11-07T20:00:00";

// await runSensor("onWristState", "sensorkit_on_wrist", 0, 10000, startDate, endDate, 0); //'iPhone')
//
Expand All @@ -104,10 +104,14 @@ async function onDeviceReady() {
// await runSensor("phoneUsageReport", "sensorkit_phone_usage", 0, 10000, startDate, endDate, 0); //'iPhone')
//
// await sleep(20000)
await runSensor("ambientPressure", "sensorkit_ambient_pressure", 0, 10000, startDate, endDate, 0); //'iPhone')
await runSensor("telephonySpeechMetrics", "sensorkit_telephony_speech_metrics", 0, 10000, startDate, endDate, 0); //'iPhone')

await sleep(20000)

// await runSensor("ambientPressure", "sensorkit_ambient_pressure", 0, 10000, startDate, endDate, 0); //'iPhone')
//
// await sleep(20000)

// await runSensor("pedometerData", "sensorkit_pedometer", 0, 10000, startDate, endDate, 0); //'iPhone')
//
// await sleep(20000)
Expand Down

0 comments on commit 72fe52e

Please sign in to comment.