commit 81da026de368fb28825cd27cb1103b8644ad1041 Author: zhanli <719901725@qq.com> Date: Wed Nov 29 13:21:56 2023 +0800 upload initial version. diff --git a/Code/triad_openvr/.gitignore b/Code/triad_openvr/.gitignore new file mode 100644 index 0000000..7a60b85 --- /dev/null +++ b/Code/triad_openvr/.gitignore @@ -0,0 +1,2 @@ +__pycache__/ +*.pyc diff --git a/Code/triad_openvr/README.md b/Code/triad_openvr/README.md new file mode 100644 index 0000000..f346a27 --- /dev/null +++ b/Code/triad_openvr/README.md @@ -0,0 +1,49 @@ +# Triad OpenVR Python Wrapper + +This is an enhanced wrapper for the already excellent [pyopenvr library](https://github.com/cmbruns/pyopenvr) by [cmbruns](https://github.com/cmbruns). The goal of this library is to create easy to use python functions for any SteamVR tracked system. + +# Getting Started + +```python +import triad_openvr as vr +import pylab as plt +v = vr.triad_openvr() +data = v.devices["controller_1"].sample(1000,250) +plt.plot(data.time,data.x) +plt.title('Controller X Coordinate') +plt.xlabel('Time (seconds)') +plt.ylabel('X Coordinate (meters)') +``` + +![Example plot of captured data](images/simple_xcoord_plot.png "Example Plot") + +# Configuration file + +The goal is to identify devices by serial, in order to keep the same name for the same physical device. for maing it work, you just have to change serials and names in the 'config.json' file. Here is an example of config file : + +``` +{ + "devices":[ + { + "name": "hmd", + "type": "HMD", + "serial":"XXX-XXXXXXXX" + }, + { + "name": "tracking_reference_1", + "type": "Tracking Reference", + "serial":"LHB-XXXXXXXX" + }, + { + "name": "controller_1", + "type": "Controller", + "serial":"XXX-XXXXXXXX" + }, + { + "name": "tracker_1", + "type": "Tracker", + "serial":"LHR-XXXXXXXX" + } + ] +} +``` diff --git a/Code/triad_openvr/config.json b/Code/triad_openvr/config.json new file mode 100644 index 0000000..9fadd0a --- /dev/null +++ b/Code/triad_openvr/config.json @@ -0,0 +1,29 @@ +{ + "devices":[ + { + "name": "tracking_reference_0", + "type": "Tracking Reference", + "serial":"LHB-02F97E98" + }, + { + "name": "tracking_reference_1", + "type": "Tracking Reference", + "serial":"LHB-431A55FD" + }, + { + "name": "tracker_0", + "type": "Tracker", + "serial":"LHR-3CD1A9DA" + }, + { + "name": "tracker_1", + "type": "Tracker", + "serial":"LHR-25865D81" + }, + { + "name": "tracker_2", + "type": "Tracker", + "serial":"LHR-4359D2B6" + } + ] +} diff --git a/Code/triad_openvr/controller_test.py b/Code/triad_openvr/controller_test.py new file mode 100644 index 0000000..e4ab441 --- /dev/null +++ b/Code/triad_openvr/controller_test.py @@ -0,0 +1,26 @@ +import triad_openvr +import time +import sys + +v = triad_openvr.triad_openvr() +v.print_discovered_objects() + +if len(sys.argv) == 1: + interval = 1/250 +elif len(sys.argv) == 2: + interval = 1/float(sys.argv[1]) +else: + print("Invalid number of arguments") + interval = False + +if interval: + while(True): + start = time.time() + txt = "" + for each in v.devices["controller_1"].get_pose_euler(): + txt += "%.4f" % each + txt += " " + print("\r" + txt, end="") + sleep_time = interval-(time.time()-start) + if sleep_time>0: + time.sleep(sleep_time) \ No newline at end of file diff --git a/Code/triad_openvr/example_default.vrsettings b/Code/triad_openvr/example_default.vrsettings new file mode 100644 index 0000000..405fe59 --- /dev/null +++ b/Code/triad_openvr/example_default.vrsettings @@ -0,0 +1,118 @@ +{ + "steamvr" : { + "requireHmd" : false, + "forcedDriver" : "null", + "forcedHmd" : "", + "displayDebug" : false, + "debugProcessPipe" : "", + "enableDistortion" : true, + "displayDebugX" : 0, + "displayDebugY" : 0, + "sendSystemButtonToAllApps" : false, + "loglevel" : 3, + "ipd" : 0.063, + "ipdOffset" : 0.0, + "background" : "", + "backgroundUseDomeProjection" : false, + "backgroundCameraHeight" : 1.6, + "backgroundDomeRadius" : 0.0, + "environment" : "", + "gridColor" : "", + "playAreaColor" : "", + "showStage" : false, + "activateMultipleDrivers" : true, + "directMode" : true, + "usingSpeakers" : false, + "speakersForwardYawOffsetDegrees" : 0.0, + "basestationPowerManagement" : false, + "neverKillProcesses" : false, + "renderTargetMultiplier" : 1.0, + "allowAsyncReprojection" : true, + "allowInterleavedReprojection" : true, + "forceReprojection" : false, + "forceFadeOnBadTracking" : true, + "defaultMirrorView" : 0, + "showMirrorView" : false, + "startMonitorFromAppLaunch" : true, + "startCompositorFromAppLaunch" : true, + "startDashboardFromAppLaunch" : true, + "startOverlayAppsFromDashboard" : true, + "enableHomeApp" : false, + "setInitialDefaultHomeApp" : false, + "CycleBackgroundImageTimeSec" : -1, + "retailDemo" : false, + "panelMask" : true, + "panelMaskVignette" : true, + "panelMaskVignetteWidth" : 2.0 + }, + "userinterface" : { + "StatusAlwaysOnTop" : true, + "screenshots" : true, + "screenshotType" : 2 + }, + "notifications" : { + "DoNotDisturb" : false + }, + "keyboard" : { + "TutorialCompletions" : 0, + "ScaleX" : 1.1, + "ScaleY" : 1.1, + "OffsetLeftX" : 0.0, + "OffsetRightX" : 0.0, + "OffsetY" : 0.0, + "Smoothing" : false + }, + "perfcheck" : { + "heuristicActive" : true, + "warnInHMD" : false, + "warnOnlyOnce" : false, + "allowTimingStore" : false, + "saveTimingsOnExit" : false, + "perfTestData" : 0.0 + }, + "collisionBounds" : { + "CollisionBoundsStyle" : 0, + "CollisionBoundsGroundPerimeterOn" : false, + "CollisionBoundsCenterMarkerOn" : false, + "CollisionBoundsPlaySpaceOn" : false, + "CollisionBoundsFadeDistance" : 0.7, + "CollisionBoundsColorGammaR" : 0, + "CollisionBoundsColorGammaG" : 255, + "CollisionBoundsColorGammaB" : 255, + "CollisionBoundsColorGammaA" : 153 + }, + "camera" : { + "enableCamera" : false, + "enableCameraInDashboard" : false, + "enableCameraForCollisionBounds" : false, + "enableCameraForRoomView" : false, + "cameraBoundsColorGammaR" : 0, + "cameraBoundsColorGammaG" : 255, + "cameraBoundsColorGammaB" : 255, + "cameraBoundsColorGammaA" : 153, + "cameraBoundsStrength" : 78 + }, + "audio" : { + "onPlaybackDevice" : "", + "onRecordDevice" : "", + "onPlaybackMirrorDevice" : "", + "offPlaybackDevice" : "", + "offRecordDevice" : "", + "viveHDMIGain" : true + }, + "power" : { + "powerOffOnExit" : true, + "turnOffScreensTimeout" : 5.0, + "turnOffControllersTimeout" : 300.0, + "returnToWatchdogTimeout" : 0.0, + "autoLaunchSteamVROnButtonPress" : true + }, + "modelskins" : { + }, + "dashboard" : { + "enableDashboard" : true, + "arcadeMode" : false + }, + "version" : "1", + "jsonid" : "vrsettings" +} diff --git a/Code/triad_openvr/images/simple_xcoord_plot.png b/Code/triad_openvr/images/simple_xcoord_plot.png new file mode 100644 index 0000000..9947996 Binary files /dev/null and b/Code/triad_openvr/images/simple_xcoord_plot.png differ diff --git a/Code/triad_openvr/tracker_test.py b/Code/triad_openvr/tracker_test.py new file mode 100644 index 0000000..83ccbb4 --- /dev/null +++ b/Code/triad_openvr/tracker_test.py @@ -0,0 +1,26 @@ +import triad_openvr +import time +import sys + +v = triad_openvr.triad_openvr() +v.print_discovered_objects() + +if len(sys.argv) == 1: + interval = 1/250 +elif len(sys.argv) == 2: + interval = 1/float(sys.argv[1]) +else: + print("Invalid number of arguments") + interval = False + +if interval: + while(True): + start = time.time() + txt = "" + for each in v.devices["tracker_1"].get_pose_euler(): + txt += "%.4f" % each + txt += " " + print("\r" + txt, end="") + sleep_time = interval-(time.time()-start) + if sleep_time>0: + time.sleep(sleep_time) \ No newline at end of file diff --git a/Code/triad_openvr/triad_openvr.py b/Code/triad_openvr/triad_openvr.py new file mode 100644 index 0000000..0800b53 --- /dev/null +++ b/Code/triad_openvr/triad_openvr.py @@ -0,0 +1,299 @@ +import time +import sys +import openvr +import math +import json + +from functools import lru_cache + +# Function to print out text but instead of starting a new line it will overwrite the existing line +def update_text(txt): + sys.stdout.write('\r'+txt) + sys.stdout.flush() + +#Convert the standard 3x4 position/rotation matrix to a x,y,z location and the appropriate Euler angles (in degrees) +def convert_to_euler(pose_mat): + yaw = 180 / math.pi * math.atan2(pose_mat[1][0], pose_mat[0][0]) + pitch = 180 / math.pi * math.atan2(pose_mat[2][0], pose_mat[0][0]) + roll = 180 / math.pi * math.atan2(pose_mat[2][1], pose_mat[2][2]) + x = pose_mat[0][3] + y = pose_mat[1][3] + z = pose_mat[2][3] + return [x,y,z,yaw,pitch,roll] + +#Convert the standard 3x4 position/rotation matrix to a x,y,z location and the appropriate Quaternion +def convert_to_quaternion(pose_mat): + # Per issue #2, adding a abs() so that sqrt only results in real numbers + r_w = math.sqrt(abs(1+pose_mat[0][0]+pose_mat[1][1]+pose_mat[2][2]))/2 + r_x = (pose_mat[2][1]-pose_mat[1][2])/(4*r_w) + r_y = (pose_mat[0][2]-pose_mat[2][0])/(4*r_w) + r_z = (pose_mat[1][0]-pose_mat[0][1])/(4*r_w) + + x = pose_mat[0][3] + y = pose_mat[1][3] + z = pose_mat[2][3] + return [x,y,z,r_w,r_x,r_y,r_z] + +#Define a class to make it easy to append pose matricies and convert to both Euler and Quaternion for plotting +class pose_sample_buffer(): + def __init__(self): + self.i = 0 + self.index = [] + self.time = [] + self.x = [] + self.y = [] + self.z = [] + self.yaw = [] + self.pitch = [] + self.roll = [] + self.r_w = [] + self.r_x = [] + self.r_y = [] + self.r_z = [] + + def append(self,pose_mat,t): + self.time.append(t) + self.x.append(pose_mat[0][3]) + self.y.append(pose_mat[1][3]) + self.z.append(pose_mat[2][3]) + self.yaw.append(180 / math.pi * math.atan(pose_mat[1][0] /pose_mat[0][0])) + self.pitch.append(180 / math.pi * math.atan(-1 * pose_mat[2][0] / math.sqrt(pow(pose_mat[2][1], 2) + math.pow(pose_mat[2][2], 2)))) + self.roll.append(180 / math.pi * math.atan(pose_mat[2][1] /pose_mat[2][2])) + r_w = math.sqrt(abs(1+pose_mat[0][0]+pose_mat[1][1]+pose_mat[2][2]))/2 + self.r_w.append(r_w) + self.r_x.append((pose_mat[2][1]-pose_mat[1][2])/(4*r_w)) + self.r_y.append((pose_mat[0][2]-pose_mat[2][0])/(4*r_w)) + self.r_z.append((pose_mat[1][0]-pose_mat[0][1])/(4*r_w)) + +def get_pose(vr_obj): + return vr_obj.getDeviceToAbsoluteTrackingPose(openvr.TrackingUniverseStanding, 0, openvr.k_unMaxTrackedDeviceCount) + + +class vr_tracked_device(): + def __init__(self,vr_obj,index,device_class): + self.device_class = device_class + self.index = index + self.vr = vr_obj + + @lru_cache(maxsize=None) + def get_serial(self): + return self.vr.getStringTrackedDeviceProperty(self.index, openvr.Prop_SerialNumber_String) + + def get_model(self): + return self.vr.getStringTrackedDeviceProperty(self.index, openvr.Prop_ModelNumber_String) + + def get_battery_percent(self): + return self.vr.getFloatTrackedDeviceProperty(self.index, openvr.Prop_DeviceBatteryPercentage_Float) + + def is_charging(self): + return self.vr.getBoolTrackedDeviceProperty(self.index, openvr.Prop_DeviceIsCharging_Bool) + + + def sample(self,num_samples,sample_rate): + interval = 1/sample_rate + rtn = pose_sample_buffer() + sample_start = time.time() + for i in range(num_samples): + start = time.time() + pose = get_pose(self.vr) + rtn.append(pose[self.index].mDeviceToAbsoluteTracking,time.time()-sample_start) + sleep_time = interval- (time.time()-start) + if sleep_time>0: + time.sleep(sleep_time) + return rtn + + def get_pose_euler(self, pose=None): + if pose == None: + pose = get_pose(self.vr) + if pose[self.index].bPoseIsValid: + return convert_to_euler(pose[self.index].mDeviceToAbsoluteTracking) + else: + return None + + def get_pose_matrix(self, pose=None): + if pose == None: + pose = get_pose(self.vr) + if pose[self.index].bPoseIsValid: + return pose[self.index].mDeviceToAbsoluteTracking + else: + return None + + def get_velocity(self, pose=None): + if pose == None: + pose = get_pose(self.vr) + if pose[self.index].bPoseIsValid: + return pose[self.index].vVelocity + else: + return None + + def get_angular_velocity(self, pose=None): + if pose == None: + pose = get_pose(self.vr) + if pose[self.index].bPoseIsValid: + return pose[self.index].vAngularVelocity + else: + return None + + def get_pose_quaternion(self, pose=None): + if pose == None: + pose = get_pose(self.vr) + if pose[self.index].bPoseIsValid: + return convert_to_quaternion(pose[self.index].mDeviceToAbsoluteTracking) + else: + return None + + def controller_state_to_dict(self, pControllerState): + # This function is graciously borrowed from https://gist.github.com/awesomebytes/75daab3adb62b331f21ecf3a03b3ab46 + # docs: https://github.com/ValveSoftware/openvr/wiki/IVRSystem::GetControllerState + d = {} + d['unPacketNum'] = pControllerState.unPacketNum + # on trigger .y is always 0.0 says the docs + d['trigger'] = pControllerState.rAxis[1].x + # 0.0 on trigger is fully released + # -1.0 to 1.0 on joystick and trackpads + d['trackpad_x'] = pControllerState.rAxis[0].x + d['trackpad_y'] = pControllerState.rAxis[0].y + # These are published and always 0.0 + # for i in range(2, 5): + # d['unknowns_' + str(i) + '_x'] = pControllerState.rAxis[i].x + # d['unknowns_' + str(i) + '_y'] = pControllerState.rAxis[i].y + d['ulButtonPressed'] = pControllerState.ulButtonPressed + d['ulButtonTouched'] = pControllerState.ulButtonTouched + # To make easier to understand what is going on + # Second bit marks menu button + d['menu_button'] = bool(pControllerState.ulButtonPressed >> 1 & 1) + # 32 bit marks trackpad + d['trackpad_pressed'] = bool(pControllerState.ulButtonPressed >> 32 & 1) + d['trackpad_touched'] = bool(pControllerState.ulButtonTouched >> 32 & 1) + # third bit marks grip button + d['grip_button'] = bool(pControllerState.ulButtonPressed >> 2 & 1) + # System button can't be read, if you press it + # the controllers stop reporting + return d + + def get_controller_inputs(self): + result, state = self.vr.getControllerState(self.index) + return self.controller_state_to_dict(state) + + def trigger_haptic_pulse(self, duration_micros=1000, axis_id=0): + """ + Causes devices with haptic feedback to vibrate for a short time. + """ + self.vr.triggerHapticPulse(self.index ,axis_id, duration_micros) + +class vr_tracking_reference(vr_tracked_device): + def get_mode(self): + return self.vr.getStringTrackedDeviceProperty(self.index,openvr.Prop_ModeLabel_String).decode('utf-8').upper() + def sample(self,num_samples,sample_rate): + print("Warning: Tracking References do not move, sample isn't much use...") + +class triad_openvr(): + def __init__(self, configfile_path=None): + # Initialize OpenVR in the + self.vr = openvr.init(openvr.VRApplication_Other) + self.vrsystem = openvr.VRSystem() + + # Initializing object to hold indexes for various tracked objects + self.object_names = {"Tracking Reference":[],"HMD":[],"Controller":[],"Tracker":[]} + self.devices = {} + self.device_index_map = {} + poses = self.vr.getDeviceToAbsoluteTrackingPose(openvr.TrackingUniverseStanding, 0, + openvr.k_unMaxTrackedDeviceCount) + + # Loading config file + if configfile_path: + try: + with open(configfile_path, 'r') as json_data: + config = json.load(json_data) + except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available + print('config.json not found.') + exit(1) + + # Iterate through the pose list to find the active devices and determine their type + for i in range(openvr.k_unMaxTrackedDeviceCount): + if poses[i].bDeviceIsConnected: + device_serial = self.vr.getStringTrackedDeviceProperty(i,openvr.Prop_SerialNumber_String).decode('utf-8') + for device in config['devices']: + if device_serial == device['serial']: + device_name = device['name'] + self.object_names[device['type']].append(device_name) + self.devices[device_name] = vr_tracked_device(self.vr,i,device['type']) + else: + # Iterate through the pose list to find the active devices and determine their type + for i in range(openvr.k_unMaxTrackedDeviceCount): + if poses[i].bDeviceIsConnected: + self.add_tracked_device(i) + + def __del__(self): + openvr.shutdown() + + def get_pose(self): + return get_pose(self.vr) + + def poll_vr_events(self): + """ + Used to poll VR events and find any new tracked devices or ones that are no longer tracked. + """ + event = openvr.VREvent_t() + while self.vrsystem.pollNextEvent(event): + if event.eventType == openvr.VREvent_TrackedDeviceActivated: + self.add_tracked_device(event.trackedDeviceIndex) + elif event.eventType == openvr.VREvent_TrackedDeviceDeactivated: + #If we were already tracking this device, quit tracking it. + if event.trackedDeviceIndex in self.device_index_map: + self.remove_tracked_device(event.trackedDeviceIndex) + + def add_tracked_device(self, tracked_device_index): + i = tracked_device_index + device_class = self.vr.getTrackedDeviceClass(i) + if (device_class == openvr.TrackedDeviceClass_Controller): + device_name = "controller_"+str(len(self.object_names["Controller"])+1) + self.object_names["Controller"].append(device_name) + self.devices[device_name] = vr_tracked_device(self.vr,i,"Controller") + self.device_index_map[i] = device_name + elif (device_class == openvr.TrackedDeviceClass_HMD): + device_name = "hmd_"+str(len(self.object_names["HMD"])+1) + self.object_names["HMD"].append(device_name) + self.devices[device_name] = vr_tracked_device(self.vr,i,"HMD") + self.device_index_map[i] = device_name + elif (device_class == openvr.TrackedDeviceClass_GenericTracker): + device_name = "tracker_"+str(len(self.object_names["Tracker"])+1) + self.object_names["Tracker"].append(device_name) + self.devices[device_name] = vr_tracked_device(self.vr,i,"Tracker") + self.device_index_map[i] = device_name + elif (device_class == openvr.TrackedDeviceClass_TrackingReference): + device_name = "tracking_reference_"+str(len(self.object_names["Tracking Reference"])+1) + self.object_names["Tracking Reference"].append(device_name) + self.devices[device_name] = vr_tracking_reference(self.vr,i,"Tracking Reference") + self.device_index_map[i] = device_name + + def remove_tracked_device(self, tracked_device_index): + if tracked_device_index in self.device_index_map: + device_name = self.device_index_map[tracked_device_index] + self.object_names[self.devices[device_name].device_class].remove(device_name) + del self.device_index_map[tracked_device_index] + del self.devices[device_name] + else: + raise Exception("Tracked device index {} not valid. Not removing.".format(tracked_device_index)) + + def rename_device(self,old_device_name,new_device_name): + self.devices[new_device_name] = self.devices.pop(old_device_name) + for i in range(len(self.object_names[self.devices[new_device_name].device_class])): + if self.object_names[self.devices[new_device_name].device_class][i] == old_device_name: + self.object_names[self.devices[new_device_name].device_class][i] = new_device_name + + def print_discovered_objects(self): + for device_type in self.object_names: + plural = device_type + if len(self.object_names[device_type])!=1: + plural+="s" + print("Found "+str(len(self.object_names[device_type]))+" "+plural) + for device in self.object_names[device_type]: + if device_type == "Tracking Reference": + print(" "+device+" ("+self.devices[device].get_serial()+ + ", Mode "+self.devices[device].get_model()+ + ", "+self.devices[device].get_model()+ + ")") + else: + print(" "+device+" ("+self.devices[device].get_serial()+ + ", "+self.devices[device].get_model()+")") diff --git a/Code/triad_openvr/udp_emitter.py b/Code/triad_openvr/udp_emitter.py new file mode 100644 index 0000000..d1aa874 --- /dev/null +++ b/Code/triad_openvr/udp_emitter.py @@ -0,0 +1,30 @@ +import triad_openvr +import time +import sys +import struct +import socket + +sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) +server_address = ('10.0.1.48', 8051) + +v = triad_openvr.triad_openvr() +v.print_discovered_objects() + +if len(sys.argv) == 1: + interval = 1/250 +elif len(sys.argv) == 2: + interval = 1/float(sys.argv[1]) +else: + print("Invalid number of arguments") + interval = False + +if interval: + while(True): + start = time.time() + txt = "" + data = v.devices["tracker_1"].get_pose_quaternion() + sent = sock.sendto(struct.pack('d'*len(data), *data), server_address) + print("\r" + txt, end="") + sleep_time = interval-(time.time()-start) + if sleep_time>0: + time.sleep(sleep_time) \ No newline at end of file diff --git a/Code/triad_openvr/udp_receiver.cs b/Code/triad_openvr/udp_receiver.cs new file mode 100644 index 0000000..e4338cb --- /dev/null +++ b/Code/triad_openvr/udp_receiver.cs @@ -0,0 +1,58 @@ +//The following code can be used to receive pose data from udp_emitter.py and use it to track an object in unity + +using UnityEngine; +using System; +using System.Net; +using System.Net.Sockets; +using System.Threading; + +public class udp_tracked_object : MonoBehaviour { + Thread receiveThread; + UdpClient client; + private Double[] float_array; + private int port = 8051; + + // Use this for initialization + void Start () { + float_array = new Double[7]; + receiveThread = new Thread(new ThreadStart(ReceiveData)); + receiveThread.IsBackground = true; + receiveThread.Start(); + } + + // Update is called once per frame + void Update () { + transform.position = new Vector3((float) float_array[0], (float)float_array[1], (float)float_array[2]); + transform.rotation = new Quaternion((float)float_array[3], (float)float_array[4], (float)float_array[5], (float)float_array[6]); + } + + void OnApplicationQuit() + { + if (receiveThread != null) + receiveThread.Abort(); + client.Close(); + } + + // receive thread + private void ReceiveData() + { + port = 8051; + client = new UdpClient(port); + print("Starting Server"); + while (true) + { + try + { + IPEndPoint anyIP = new IPEndPoint(IPAddress.Any, 0); + byte[] data = client.Receive(ref anyIP); + for (int i = 0; i < data.Length; i++) + float_array[i] = BitConverter.ToDouble(data, i * 8); + } + catch (Exception err) + { + print(err.ToString()); + } + } + } + +} diff --git a/Docs/Image/15灯基站.png b/Docs/Image/15灯基站.png new file mode 100644 index 0000000..663d0b2 Binary files /dev/null and b/Docs/Image/15灯基站.png differ diff --git a/Docs/Image/9灯基站.png b/Docs/Image/9灯基站.png new file mode 100644 index 0000000..bd9bd15 Binary files /dev/null and b/Docs/Image/9灯基站.png differ diff --git a/Docs/Image/Lighthouse扫描过程.webp b/Docs/Image/Lighthouse扫描过程.webp new file mode 100644 index 0000000..cc5f8f9 Binary files /dev/null and b/Docs/Image/Lighthouse扫描过程.webp differ diff --git a/Docs/Image/Tracker1.0 Mainboard.png b/Docs/Image/Tracker1.0 Mainboard.png new file mode 100644 index 0000000..7a45698 Binary files /dev/null and b/Docs/Image/Tracker1.0 Mainboard.png differ diff --git a/Docs/Image/Tracker1.0主板2.png b/Docs/Image/Tracker1.0主板2.png new file mode 100644 index 0000000..38aab0a Binary files /dev/null and b/Docs/Image/Tracker1.0主板2.png differ diff --git a/Docs/Image/Tracker2.0主板.png b/Docs/Image/Tracker2.0主板.png new file mode 100644 index 0000000..95ee361 Binary files /dev/null and b/Docs/Image/Tracker2.0主板.png differ diff --git a/Docs/Image/Tracker2.0主板1.png b/Docs/Image/Tracker2.0主板1.png new file mode 100644 index 0000000..34c04bd Binary files /dev/null and b/Docs/Image/Tracker2.0主板1.png differ diff --git a/Docs/Image/Tracker2.0主板2.png b/Docs/Image/Tracker2.0主板2.png new file mode 100644 index 0000000..dbba2a6 Binary files /dev/null and b/Docs/Image/Tracker2.0主板2.png differ diff --git a/Docs/Image/Tracker2.0主板3.png b/Docs/Image/Tracker2.0主板3.png new file mode 100644 index 0000000..c21186b Binary files /dev/null and b/Docs/Image/Tracker2.0主板3.png differ diff --git a/Docs/Image/Tracker3.0-主板.png b/Docs/Image/Tracker3.0-主板.png new file mode 100644 index 0000000..474c273 Binary files /dev/null and b/Docs/Image/Tracker3.0-主板.png differ diff --git a/Docs/Image/Tracker3.0-主板2.png b/Docs/Image/Tracker3.0-主板2.png new file mode 100644 index 0000000..2928bc8 Binary files /dev/null and b/Docs/Image/Tracker3.0-主板2.png differ diff --git a/Docs/Image/Tracker3.0-光敏模组.png b/Docs/Image/Tracker3.0-光敏模组.png new file mode 100644 index 0000000..db04f5f Binary files /dev/null and b/Docs/Image/Tracker3.0-光敏模组.png differ diff --git a/Docs/Image/Tracker3.0-手柄TS4631.png b/Docs/Image/Tracker3.0-手柄TS4631.png new file mode 100644 index 0000000..af49f0a Binary files /dev/null and b/Docs/Image/Tracker3.0-手柄TS4631.png differ diff --git a/Docs/Image/Tracker3.0.png b/Docs/Image/Tracker3.0.png new file mode 100644 index 0000000..ab818a9 Binary files /dev/null and b/Docs/Image/Tracker3.0.png differ diff --git a/Docs/Image/Tracker拆解图.png b/Docs/Image/Tracker拆解图.png new file mode 100644 index 0000000..5b80bd3 Binary files /dev/null and b/Docs/Image/Tracker拆解图.png differ diff --git a/Docs/Image/TundraTracker-TS4631.png b/Docs/Image/TundraTracker-TS4631.png new file mode 100644 index 0000000..e9f5545 Binary files /dev/null and b/Docs/Image/TundraTracker-TS4631.png differ diff --git a/Docs/Image/positioning-principle.png b/Docs/Image/positioning-principle.png new file mode 100644 index 0000000..04e1ef4 Binary files /dev/null and b/Docs/Image/positioning-principle.png differ diff --git a/Docs/Image/基站2.0原理1.png b/Docs/Image/基站2.0原理1.png new file mode 100644 index 0000000..c8685c3 Binary files /dev/null and b/Docs/Image/基站2.0原理1.png differ diff --git a/Docs/Image/基站2.0原理拆解.png b/Docs/Image/基站2.0原理拆解.png new file mode 100644 index 0000000..44abc32 Binary files /dev/null and b/Docs/Image/基站2.0原理拆解.png differ diff --git a/Docs/Image/基站2.0扫描过程.webp b/Docs/Image/基站2.0扫描过程.webp new file mode 100644 index 0000000..3361cf5 Binary files /dev/null and b/Docs/Image/基站2.0扫描过程.webp differ diff --git a/Docs/Image/基站包装盒.png b/Docs/Image/基站包装盒.png new file mode 100644 index 0000000..90f4574 Binary files /dev/null and b/Docs/Image/基站包装盒.png differ diff --git a/Docs/Image/基站拆解图.png b/Docs/Image/基站拆解图.png new file mode 100644 index 0000000..4f578dc Binary files /dev/null and b/Docs/Image/基站拆解图.png differ diff --git a/Docs/Image/基站背面-信息.png b/Docs/Image/基站背面-信息.png new file mode 100644 index 0000000..e7a4197 Binary files /dev/null and b/Docs/Image/基站背面-信息.png differ diff --git a/Docs/Image/基站背面-样机.png b/Docs/Image/基站背面-样机.png new file mode 100644 index 0000000..82d45b4 Binary files /dev/null and b/Docs/Image/基站背面-样机.png differ diff --git a/Docs/Lighthouse工作原理.md b/Docs/Lighthouse工作原理.md new file mode 100644 index 0000000..d1ed8f3 --- /dev/null +++ b/Docs/Lighthouse工作原理.md @@ -0,0 +1,120 @@ +# Lighthouse定位原理 + + 2016年,Valve携手HTC一同发布了跨时代般的VR头显:HTC VIVE,随之而来的是一套全新的空间定位技术:命名为Lighthouse的定位系统,具备高精度、高响应、轻便易用等特点,至今仍是VR全身追踪玩家首选的追踪设备。本文章详细解析Lighthouse初代系统(1.0基站系统)与不完全解析第二代系统(2017年6月发布的2.0基站系统)基站原理与追踪器定位原理,为方便理解将以1.0基站指代初代系统,2.0基站指代第二代系统。 + +## 一、Lighthouse定位原理: + +### 1.1 Lighthouse定位架构 + + 每个1.0基站包含一个称为同步闪光灯的红外灯阵列和俩组马达(集成激光发射器)。系统内20ms为一个扫描周期。基站的定位原理如下图所示: + + + + 一个周期的定位过程:1、首先基站的红外灯阵列闪烁一次,发射一组红外光同步光覆盖整个扫描范围,当追踪器*1 上一组光敏(每个追踪器含有多组光敏)传感器组件搭载的光电转换芯片接收到该端光敏传感器捕捉到的来自基站的红外同步光信息后,启动全部传感器与惯性系统(IMU)并将数据清零。2、基站的X轴马达横向发射激光(旋转10ms闪烁一次后停止)3、基站的红外灯阵列再闪烁一次,追踪器捕捉到第二次红外光同步信息后将准备捕捉Y轴光数据 4、Y轴马达纵向发射激光(旋转10ms闪烁一次后停止),至基站下次红外灯阵列闪烁后代表一个扫描周期结束。如下图所示: + +![](./Image/Lighthouse扫描过程.webp) + +### 1.2 Lighthouse基站运行模式 + + 1.0基站共有三种运行模式(通过按钮切换):单独A模式,有线A、B模式,无线B、C模式。一个空间内只运行一个基站时,设置为A模式或B模式均可单独运行。一个空间内运行双基站,当俩个基站设置为A模式与B模式(有线模式)时,由A模式的基站作为主基站。将线缆连接俩个基站后,B模式的基站将与A模式基站通过线缆同步扫描周期(同步红外光阵列点亮时间与马达转速、激光发射器点亮间)当基站处于B、C模式(无线模式)时,由B模式的基站作为主基站,C模式的基站上的一组光敏捕捉到B模式基站的红外同步光后,通过分析一段时间内捕捉到的红外同步光的频率便可与B基站同步扫描周期(同步红外光阵列点亮时间与马达转速、激光发射器点亮时间)当一个空间内运行双基站时,由追踪器判定使用哪一个基站的激光:当多组光敏同时捕捉到某一个基站的光信息最多时,优先使用该组基站的激光参与融合计算。 + +## 二、 Lighthouse基站拆解 + +### 2.1 基站的面板拆解 + + 1.0基站有俩种型号,分别为9灯与15灯基站(最早生产15灯基站,2017年后生产的基站基本为9灯基站),9灯基站(红外光同步阵列含9颗灯珠)、15灯基站(红外光同步阵列含15颗灯珠)。9灯基站与15灯基站(区别为红外同步阵列灯珠数量)定位效果与原理相同,9灯阵列灯珠亮度更高。 + + + +