Need pickup the position X Y of touch over the screen on Windows 10, outside of my wpf app. I read in some post can use RegisterPointerInputTarget
I try redirect all touch to my app. Is supposed use RegisterPointerInputTarget should so easy as the code I show, bu not work no idea what I doing wrong.
Also I found some working code like this; https://github.com/yingDev/WGestures/blob/master/WGestures.Core/Impl/Windows/TouchHook.cs
There is some way to global hook touch events and know position X Y when touch happen?
I should use RegisterPointerInputTarget?
I appreciate any info and or code example.
using System;
using System.Windows;
using System.Runtime.InteropServices;
namespace WpfApp17
{
public partial class MainWindow : Window
{
public enum PointerInputType
{
POINTER = 0x00000001,
TOUCH = 0x00000002,
PEN = 0x00000003,
MOUSE = 0x00000004,
}
[DllImport("User32")]
static extern bool RegisterPointerInputTarget(IntPtr hwnd, PointerInputType pointerType);
public MainWindow()
{
InitializeComponent();
IntPtr hWnd = new System.Windows.Interop.WindowInteropHelper(Application.Current.MainWindow).Handle;
var ok = RegisterPointerInputTarget(hWnd, PointerInputType.TOUCH);
if (!ok)
{
MessageBox.Show("Error!" );
return;
}
else
{
MessageBox.Show("TouchHook Redirected.");
}
}
}
}
User contributions licensed under CC BY-SA 3.0