我想创建一个项目,读取用户的手势(基于加速度计)并识别它,我搜索了很多,但我发现的都太旧了,我既没有分类也没有识别问题,我将使用1美元识别器或HMM,我只想知道如何使用加速度计读取用户的手势。
加速度计数据(x,y,z值)是否足够,或者我应该使用其他数据,如姿态数据(滚动,俯仰,偏航),陀螺仪数据或幅度数据,我甚至不了解它们中的任何一个所以解释这些传感器的用途是有用的。
提前致谢!
答案 0 :(得分:6)
最后我做了,我使用userAcceleration
数据 这是设备加速由于设备排除重力 ,我发现很多人使用正常加速数据并做了很多数学运算来消除重力,现在它已经由iOS 6在userAcceleration
中完成。
我使用了1 $识别器,这是一个2D识别器(即点(5,10),没有Z)。
这是1$ recognizer的链接,下载中有c ++版本部分。
以下是我的代码的步骤......
userAcceleration
数据。这是我的代码......
@implementation MainViewController {
double previousLowPassFilteredAccelerationX;
double previousLowPassFilteredAccelerationY;
double previousLowPassFilteredAccelerationZ;
CGPoint position;
int numOfTrainedGestures;
GeometricRecognizer recognizer;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
previousLowPassFilteredAccelerationX = previousLowPassFilteredAccelerationY = previousLowPassFilteredAccelerationZ = 0.0;
recognizer = GeometricRecognizer();
//Note: I let the user train his own gestures, so i start up each time with 0 gestures
numOfTrainedGestures = 0;
}
#define kLowPassFilteringFactor 0.1
#define MOVEMENT_HZ 50
#define NOISE_REDUCTION 0.05
- (IBAction)StartAccelerometer
{
CMMotionManager *motionManager = [CMMotionManager SharedMotionManager];
if ([motionManager isDeviceMotionAvailable])
{
[motionManager setDeviceMotionUpdateInterval:1.0/MOVEMENT_HZ];
[motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue]
withHandler: ^(CMDeviceMotion *motion, NSError *error)
{
CMAcceleration lowpassFilterAcceleration, userAcceleration = motion.userAcceleration;
lowpassFilterAcceleration.x = (userAcceleration.x * kLowPassFilteringFactor) + (previousLowPassFilteredAccelerationX * (1.0 - kLowPassFilteringFactor));
lowpassFilterAcceleration.y = (userAcceleration.y * kLowPassFilteringFactor) + (previousLowPassFilteredAccelerationY * (1.0 - kLowPassFilteringFactor));
lowpassFilterAcceleration.z = (userAcceleration.z * kLowPassFilteringFactor) + (previousLowPassFilteredAccelerationZ * (1.0 - kLowPassFilteringFactor));
if (lowpassFilterAcceleration.x > NOISE_REDUCTION || lowpassFilterAcceleration.y > NOISE_REDUCTION)
[self.points addObject:[NSString stringWithFormat:@"%.2f,%.2f", lowpassFilterAcceleration.x, lowpassFilterAcceleration.y]];
previousLowPassFilteredAccelerationX = lowpassFilterAcceleration.x;
previousLowPassFilteredAccelerationY = lowpassFilterAcceleration.y;
previousLowPassFilteredAccelerationZ = lowpassFilterAcceleration.z;
// Just viewing the points to the user
self.XLabel.text = [NSString stringWithFormat:@"X : %.2f", lowpassFilterAcceleration.x];
self.YLabel.text = [NSString stringWithFormat:@"Y : %.2f", lowpassFilterAcceleration.y];
self.ZLabel.text = [NSString stringWithFormat:@"Z : %.2f", lowpassFilterAcceleration.z];
}];
}
else NSLog(@"DeviceMotion is not available");
}
- (IBAction)StopAccelerometer
{
[[CMMotionManager SharedMotionManager] stopDeviceMotionUpdates];
// View all the points to the user
self.pointsTextView.text = [NSString stringWithFormat:@"%d\n\n%@", self.points.count, [self.points componentsJoinedByString:@"\n"]];
// There must be more that 2 trained gestures because in recognizing, it gets the closest one in distance
if (numOfTrainedGestures > 1) {
Path2D path = [self createPathFromPoints]; // A method to create a 2D path from pointsArray
if (path.size()) {
RecognitionResult recongnitionResult = recognizer.recognize(path);
self.recognitionLabel.text = [NSString stringWithFormat:@"%s Detected with Prob %.2f !", recongnitionResult.name.c_str(),
recongnitionResult.score];
} else self.recognitionLabel.text = @"Not enough points for gesture !";
}
else self.recognitionLabel.text = @"Not enough templates !";
[self releaseAllVariables];
}