localize.py 91 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311
  1. # -*- coding: utf-8 -*-
  2. import sys
  3. import time
  4. from collections import namedtuple, defaultdict
  5. import logging;
  6. import math
  7. import datetime
  8. import Queue
  9. import tempfile
  10. import threading, thread
  11. import hashlib
  12. from zipfile import ZipFile
  13. import urllib
  14. import pylab
  15. from matplotlib import pyplot as plt
  16. import numpy as np
  17. # from scipy.misc import pilutil
  18. import scipy.misc as pilutil
  19. from PIL import Image, ImageDraw
  20. from scipy import ndimage
  21. from concurrent import futures
  22. from utils import normalizeLogarithmic, normalizeDeviceAdaption, loadLocations
  23. import utils.accelerated as speed
  24. from utils import daemonthread
  25. from utils.path import path
  26. from utils.volumeimage import VolumeImage
  27. from utils.objparser import Mesh
  28. from utils.xml import StringToTree, subelement, TreeToFile, TreeToString
  29. from utils.materials import materialsAsXml
  30. from utils.url import getOpener
  31. from intersect import Mesh2D
  32. # threaded access from volview gui
  33. try:
  34. from pyface.api import GUI
  35. except ImportError:
  36. GUI = None
  37. lock = threading.Lock()
  38. log = logging.getLogger('lws')
  39. mpart_opener = getOpener(enableMultipart=True)
  40. opener = getOpener()
  41. #~ davalues = [(-90.0, 0.1), (-80.0, -4.8), (-70.0, -1.1), (-60.0, -1.4), (-50.0, 4.6), (-40.0, 8.6), (-30.0, 4.8), (0.0, -0.7)]
  42. #~ davalues = [(-90.0, 7.7), (-85.0, -6.2), (-80.0, -3.7), (-75.0, -4.8), (-70.0, -3.3), (-65.0, -8.4), (-60.0, -4.2), (-55.0, -4.8), (-50.0, -4.5), (-45.0, 2.0), (-40.0, -0.2), (-30.0, -4.4), (-20.0, -6.1), (0.0, 1.0)]
  43. def errorsFromLists(p1, p2, mode='3d'):
  44. errors = []
  45. if mode == '3d':
  46. for (x1, y1, z1), (x2, y2, z2) in zip(p1, p2):
  47. error = ((x2-x1)**2 + (y2-y1)**2 + (z2-z1)**2)**0.5
  48. errors.append(error)
  49. else:
  50. for (x1, y1, z1), (x2, y2, z2) in zip(p1, p2):
  51. error = ((x2-x1)**2 + (y2-y1)**2)**0.5
  52. errors.append(error)
  53. return errors
  54. class AccessPoint(object):
  55. def __init__(self, name, x, y, z, volumeImagePath, davalues):
  56. t = time.time()
  57. self.name = name
  58. self.x = x
  59. self.y = y
  60. self.z = z
  61. self.davalues = list(davalues)
  62. self.vi = None
  63. self.dbdata = None
  64. self.loaded = False
  65. # interpolate name or use plain
  66. self.datfile = path((volumeImagePath % name) if '%s' in volumeImagePath else volumeImagePath)
  67. if not self.datfile.exists():
  68. log.warn('%s does not exists' % self.datfile.abspath())
  69. else:
  70. try:
  71. self.vi = VolumeImage(self.datfile)
  72. # volume image data in decibel values
  73. #~ self.dbdata = self.vi.asDecibelData(-130)
  74. self.dbdata = speed.toNormalizedDecibelwithDeviceAdaption(self.vi.data, davalues)
  75. #~ log.debug('loaded ap %s from %s in %.2f sec' % (name, self.datfile.abspath(), time.time() - t))
  76. self.loaded = True
  77. except Exception:
  78. log.exception('error during loading AP %s from %s' % (name, self.datfile.abspath()))
  79. raise
  80. Dimensions = namedtuple('Dimensions', 'tx ty tz sx sy sz')
  81. class Environment(object):
  82. def __init__(self, objfile=None, di=None, aps=(), davalues=None, locationfile=None, tmpdir=None, vi_path=None, bbox2d=None):
  83. ''' setup localization environment
  84. @param objfile: path to .obj file that hold the geometry
  85. @type objfile: str or path instance
  86. @param di: dimensions of scene - will be retrieved from APs if omitted
  87. @type di: Dimensions()
  88. @param aps: list of aps
  89. @type aps: [(apid, x, y, z), ] or [AccessPoint()]
  90. @param locationfile: path to configured locations
  91. @type locationfile: str or path instance
  92. @param vi_path: path to volumeimages - if path contains a "%s" the apid will be interpolated
  93. @param vi_path: str
  94. '''
  95. if tmpdir is None:
  96. self.tmpdir = path(tempfile.gettempdir())
  97. else:
  98. self.tmpdir = path(tmpdir)
  99. self.vi_path = vi_path
  100. log.debug('using vis from %s' % vi_path)
  101. t = time.time()
  102. self.objfile = path(objfile)
  103. if objfile:
  104. self.mesh = Mesh(objfile)
  105. log.debug('parsed objfile in %.1f secs' % (time.time() - t))
  106. # HARDCODE
  107. if bbox2d is None:
  108. # 2d mesh cut
  109. mesh_xlim = (-1, 58)
  110. mesh_ylim = (-1, 18)
  111. else:
  112. mesh_xlim = int(bbox2d[0]), int(bbox2d[1])
  113. mesh_ylim = int(bbox2d[2]), int(bbox2d[3])
  114. pixel_per_meter = 16
  115. self.mesh2d = Mesh2D(self.mesh, mesh_xlim, mesh_ylim, pixel_per_meter)
  116. # dimensions values of environment
  117. # will be autoloaded from volumeimages of APs if not given explicitly
  118. self.di = di
  119. self.aps = {}
  120. if len(aps) > 0:
  121. if not isinstance(aps[0], tuple):
  122. # already constructed APs
  123. for ap in aps:
  124. self.addAP(ap)
  125. else: # tuple-form (apid, x, y, z)
  126. assert vi_path is not None, 'vi_path must be given if Accesspoints should be constructed'
  127. assert davalues is not None, 'davalues must be given if Accesspoints should be constructed'
  128. t = time.time()
  129. #~ for apid, x, y, z in aps:
  130. #~ try:
  131. #~ self.addAP(AccessPoint(apid, x, y, z, vi_path, davalues))
  132. #~ except Exception:
  133. #~ pass
  134. with futures.ThreadPoolExecutor(max_workers=4) as executor:
  135. for apid, x, y, z in aps:
  136. f = executor.submit(AccessPoint, apid, x, y, z, vi_path, davalues)
  137. f.add_done_callback(lambda f: self.addAP(f.result()))
  138. log.debug('loaded %s [of %s] aps in %.3f secs' % (len(self.aps), len(aps) , time.time() - t))
  139. self.locationfile = locationfile
  140. if locationfile is not None:
  141. self.locid2location = loadLocations(locationfile)
  142. else:
  143. self.locid2location = None
  144. def getCutFile(self, z):
  145. lcf = self.tmpdir.joinpath(path('cut_%.0f.png' % z))
  146. with lock:
  147. if not lcf.exists():
  148. log.debug('building 2d cut for z: %.0f' % z)
  149. to_be_combined = []
  150. for objname in self.mesh2d.objnames:
  151. img = self.mesh2d.cut(objname, z)
  152. to_be_combined.append(pilutil.fromimage(img))
  153. all_objects = np.add.reduce(np.array(to_be_combined))
  154. log.debug('cache file to %s' % lcf.abspath())
  155. pilutil.imsave(lcf, all_objects)
  156. return lcf
  157. def addAP(self, ap):
  158. ''' adds AccessPoint volumeimage data by .dat file'''
  159. self.aps[ap.name] = ap
  160. # use dimensions of first added AP and assert all following dimensions are the same
  161. if self.di is not None and ap.vi is None:
  162. # probably aps without data
  163. return
  164. di = ap.vi.di
  165. di = Dimensions(sx=di.sx, sy=di.sy, sz=di.sz, tx=di.tx, ty=di.ty, tz=di.tz)
  166. if self.di is None:
  167. self.di = di
  168. else:
  169. assert self.di == di
  170. def replaceAPs(self, newVolumeImagePath):
  171. for name, ap in self.aps.items():
  172. self.aps[name] = AccessPoint(name, ap.x, ap.y, ap.z, volumeImagePath=newVolumeImagePath, davalues=ap.davalues)
  173. def translateToVoxel(self, x, y, z, cube_width=1):
  174. '''translate float real world coordinates into int coordinates
  175. for space (self.di.x, self.di.y, self.di.z)'''
  176. return (int(round((x - self.di.tx) / self.di.sx)) / cube_width,
  177. int(round((y - self.di.ty) / self.di.sy)) / cube_width,
  178. int(round((z - self.di.tz) / self.di.sz)) / cube_width)
  179. def translateToMeter(self, x, y, z):
  180. '''translate int coordinates into float real world coordinates
  181. for space (self.di.x, self.di.y, self.di.z)'''
  182. return (x * self.di.sx + self.di.tx,
  183. y * self.di.sy + self.di.ty,
  184. z * self.di.sz + self.di.tz)
  185. def cube2meter(self, x, y, z, cubewidth):
  186. #~ print x,y,z
  187. x = x * cubewidth# + cubewidth / 2.0
  188. y = y * cubewidth# + cubewidth / 2.0
  189. z = z * cubewidth# + cubewidth / 2.0
  190. #~ print x,y,z
  191. # estimated x, y, z in real world coords
  192. return self.translateToMeter(x, y, z)
  193. def translatePixelToMesh2dPixel(self, x, y):
  194. ''' translate self.vi int coordinates into mesh2d pixel coordinate space '''
  195. in_m_y, in_m_x, _ = self.translateToMeter(x, y, 0)
  196. return self.mesh2d.meter2pixel(in_m_x, in_m_y)
  197. def translateMeterToMesh2dPixel(self, x, y):
  198. ''' translate self.vi meter coordinates into mesh2d pixel coordinate space '''
  199. return self.mesh2d.meter2pixel(x, y)
  200. def translateMeterToMesh2dPixel(self, x, y):
  201. ''' translate self.vi meter coordinates into mesh2d pixel coordinate space '''
  202. return self.mesh2d.meter2pixel(x, y)
  203. def createMeasurement(self, x, y, z, ts, noise, sample_environment=0):
  204. values = {}
  205. for ap in self.aps.values():
  206. v = ap.dbdata[self.translateToVoxel(x, y, z)]
  207. if v <= -100:
  208. continue # ignore ap
  209. # add some samples from surrounding
  210. if sample_environment > 0:
  211. se = sample_environment
  212. additional = [(x+se,y,z),(x-se,y,z),(x,y+se,z),(x,y-se,z),(x,y,z+se),(x,y,z-se)]
  213. vs = []
  214. for _x, _y, _z in additional:
  215. _v = ap.dbdata[self.translateToVoxel(_x, _y, _z)]
  216. vs.append(_v)
  217. v = sum(vs + [v]) / (len(vs)+1)
  218. #~ if v < -80:
  219. #~ continue
  220. # add noise with variance = noise
  221. if noise > 0:
  222. v += np.random.normal(scale=noise)
  223. values[ap.name] = v
  224. return Measurement(ts, values, Position(x, y, z))
  225. def generateSyntheticMeasurementsAtPositions(self, positions, speed=0.6, noise=5, sample_environment=0):
  226. measurements = Measurements()
  227. curr_ts = 0
  228. _x, _y, _z = self.cube2meter(*self.translateToVoxel(*positions[0], cube_width=1), cubewidth=1)
  229. measurements.add(self.createMeasurement( _x, _y, _z, ts=curr_ts, noise=noise, sample_environment=sample_environment))
  230. for (x1, y1, z1), (x2, y2, z2) in zip(positions, positions[1:]):
  231. delta_v = (x2-x1, y2-y1, z2-z1) # delta vector
  232. distance = (delta_v[0]**2 + delta_v[1]**2 + delta_v[2]**2)**0.5 # euclidian distance
  233. curr_ts += distance / speed
  234. _x, _y, _z = self.cube2meter(*self.translateToVoxel(x2, y2, z2, cube_width=1), cubewidth=1)
  235. measurements.add(self.createMeasurement(_x, _y, _z, curr_ts, noise, sample_environment))
  236. return measurements
  237. def generateSyntheticMeasurementsFromCorners(self, corners, speed=0.6, granularity=0.5, noise=5):
  238. ''' Generate a synthetic series of measurements at coords
  239. by simulating a walk with speed in m/s.
  240. Granularity (in m) is used to interpolate the points between two coords
  241. '''
  242. measurements = Measurements()
  243. curr_ts = 0 # initialize time with 0
  244. for (x1, y1, z1), (x2, y2, z2) in zip(corners, corners[1:]):
  245. _x, _y, _z = self.cube2meter(*self.translateToVoxel(x1, y1, z1, cube_width=1), cubewidth=1)
  246. measurements.add(self.createMeasurement(_x, _y, _z, curr_ts, noise))
  247. delta_v = (x2-x1, y2-y1, z2-z1) # delta vector
  248. distance = (delta_v[0]**2 + delta_v[1]**2 + delta_v[2]**2)**0.5 # euclidian distance
  249. if distance > 0:
  250. num_intermediate = int(distance / granularity)
  251. for i in range(1, num_intermediate):
  252. _x, _y, _z = (
  253. x1 + (delta_v[0] * i / float(num_intermediate)),
  254. y1 + (delta_v[1] * i / float(num_intermediate)),
  255. z1 + (delta_v[2] * i / float(num_intermediate)))
  256. curr_distance = i / float(num_intermediate) * distance
  257. _curr_ts = curr_ts + curr_distance * speed**-1
  258. _x, _y, _z = self.cube2meter(*self.translateToVoxel(_x, _y, _z, cube_width=1), cubewidth=1)
  259. #~ _x, _y, _z = self.cube2meter(_x, _y, _z, cubewidth=1)
  260. measurements.add(self.createMeasurement(_x, _y, _z, _curr_ts, noise))
  261. curr_ts += distance * speed**-1
  262. else:
  263. # add 3 measurements if no movements
  264. measurements.add(self.createMeasurement(x1, y1, z1, curr_ts + 1, noise))
  265. measurements.add(self.createMeasurement(x1, y1, z1, curr_ts + 2, noise))
  266. measurements.add(self.createMeasurement(x1, y1, z1, curr_ts + 3, noise))
  267. curr_ts = curr_ts + 3
  268. return measurements
  269. def buildSimulations(self, scene_name, materials, ap2power, bbox, resolution,
  270. density, numphotons, disabledObjects, baseUrl, onResult=None):
  271. ''' build AP simulations using a set of raytracer params over the server HTTP interface
  272. this will refresh the current ap data
  273. '''
  274. oh = hashlib.sha1(self.objfile.bytes()).hexdigest()
  275. def queue():
  276. log.info('using %s materials' % len(materials.items()))
  277. for matname, mat in materials.items():
  278. log.info('%s: %s, %s' % (matname, mat.reflect, mat.alpha))
  279. log.info('using objectfile %s [hash: %s]' % (self.objfile.abspath(), oh))
  280. runids = {}
  281. for ap in self.aps.values():
  282. log.info('AP params: id: %s, power: %s x,y,z: %s, %s, %s' % (ap.name, ap2power[ap.name], ap.x, ap.y, ap.z))
  283. outname = '%s_%s' % (scene_name, ap.name)
  284. b = bbox
  285. qparams = urllib.urlencode(dict(
  286. scenename = scene_name,
  287. objhash = oh,
  288. materials = TreeToString(materialsAsXml(materials)),
  289. ap = str({'x': ap.x, 'y': ap.y, 'z': ap.z, 'power': ap2power[ap.name], 'id': ap.name, 'powerid': 'all'}),
  290. bbox = str({'x1': b.x1, 'y1': b.y1, 'z1': b.z1, 'x2': b.x2, 'y2': b.y2, 'z2': b.z2}),
  291. resolution = str({'x': resolution.x, 'y': resolution.y, 'z': resolution.z}),
  292. density = str(density),
  293. numphotons = str(numphotons),
  294. disabledObjects = ','.join(disabledObjects)
  295. ))
  296. log.info('queueing %s... ' % ap.name)
  297. info = opener.open(baseUrl + '/queueJob', data=qparams).read()
  298. if 'unknown objfile' in info:
  299. log.info('transmitting objfile... [%s]' % self.objfile.abspath())
  300. # transmit new version of objfile
  301. params = {'objfile': open(self.objfile), 'expectedhash': oh}
  302. mpart_opener.open(baseUrl + '/uploadobjfile', data=params)
  303. log.info('queueing %s ... ' % ap.name)
  304. # queue again
  305. info = opener.open(baseUrl + '/queueJob', data=qparams).read()
  306. try:
  307. infotree = StringToTree(info)
  308. except Exception:
  309. log.error('cannot parse:\n' + info)
  310. else:
  311. assert infotree.getroot().tag == 'success'
  312. runid = infotree.getroot().attrib['id']
  313. runids[runid] = ap
  314. log.debug('queueing sucessful [%s runs]' % len(runids))
  315. return runids
  316. def _retrieve(runid, apid):
  317. url = baseUrl + '/getJobZipData?runid=%s' % runid
  318. log.info('retrieving zipdata from %s...' % url)
  319. try:
  320. t = time.time()
  321. res = opener.open(url).read()
  322. log.info('got %.1f kb [%.1f kb/sec]' % (len(res) / 1024.0, (len(res) /1024.0) / (time.time() - t)))
  323. except Exception:
  324. log.exception('error during retrieving')
  325. return
  326. try:
  327. datfile = path(self.vi_path % apid)
  328. d = datfile.parent
  329. if not d.exists():
  330. d.mkdir()
  331. tempf = d.joinpath('%s_%s.zip' % (apid, runid))
  332. tempf.write_bytes(res)
  333. try:
  334. zf = ZipFile(tempf)
  335. except Exception:
  336. log.error('cannot open zipfile with contents:\n\n%s' % res[:1000])
  337. raise
  338. for n in zf.namelist():
  339. log.debug('extracting %s' % n)
  340. s = zf.read(n)
  341. f = d.joinpath(n)
  342. log.debug('storing %.1f kb to %s' % (len(s) / 1024.0, f.abspath()))
  343. f.write_bytes(s)
  344. assert datfile.exists(), '%s does not exist' % datfile.abspath()
  345. except Exception:
  346. log.exception('error during unzipping')
  347. def _check(runids):
  348. _getids = lambda s: {e for e in s.split(',') if e.strip()}
  349. startts = time.time()
  350. c = len(runids)
  351. while len(runids) > 0:
  352. _runids = ','.join(runids.keys())
  353. try:
  354. url = baseUrl + '/queryJobs?runids=%s' % _runids
  355. res = opener.open(url).read()
  356. tree = StringToTree(res)
  357. if onResult is not None:
  358. onResult(runids, tree)
  359. finished = _getids(tree.getroot().attrib['finished'])
  360. for runid in finished:
  361. apid = runids[runid].name
  362. log.info('run %s for ap %s is finished' % (runid, apid))
  363. runids.pop(runid)
  364. thread.start_new_thread(_retrieve, (runid, apid))
  365. except Exception:
  366. log.exception('error during querying jobs')
  367. time.sleep(1.0)
  368. log.info('quering finished, %s sims %.1f sec per sim' % (c, (time.time() - startts) / c))
  369. if self.vi_path is not None:
  370. self.replaceAPs(self.vi_path)
  371. runids = queue()
  372. thread.start_new_thread(_check, (runids, ))
  373. Position = namedtuple('Position', 'x y z')
  374. class Measurement(object):
  375. ''' holds rssi values for each AP at a given position '''
  376. def __init__(self, timestamp, apdata, pos=(0,0,0), speed=None, interpolated=True):
  377. self.pos = Position(*pos)
  378. self.apdata = apdata # key AP().name, value rssi
  379. self.timestamp = timestamp
  380. self.speed = speed
  381. self.interpolated = interpolated # to differentiate between interpolated/spawned and load measurements
  382. def __repr__(self):
  383. return str(self)
  384. def __str__(self):
  385. s = ' '.join('%s:%.3f' % (k, v) for k, v in sorted(self.apdata.items()))
  386. ts = ('%.1f' % self.timestamp) if isinstance(self.timestamp, (float, int)) else self.timestamp.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]
  387. pos = ('%.3f,%.3f,%.3f' % self.pos) if (self.pos.x, self.pos.y, self.pos.z) != (0,0,0) else 'n/a'
  388. speed = ' speed:%.3f' % self.speed if self.speed is not None else ''
  389. return 'ts:%s%s pos:%s %s' % (ts, speed, pos, s)
  390. class Measurements(list):
  391. ''' a series of measurements'''
  392. def __init__(self, measurements=None, fname=None):
  393. if measurements is not None:
  394. self[:] = measurements
  395. elif fname is not None:
  396. self.load(fname)
  397. def add(self, m):
  398. self.append(m)
  399. def plot(self, fname=None, interpolate=False):
  400. aspectRatio = 0.5
  401. dpi = 60
  402. width = 1000
  403. figsize = width / float(dpi), width / float(dpi) * aspectRatio
  404. fig = plt.figure(figsize=figsize)
  405. ax = plt.axes([0.08, 0.07, 0.90, 0.91], xlabel='Timestamp', ylabel=r'RSS')
  406. apids = set()
  407. for m in self:
  408. apids.update(m.apdata.keys())
  409. apids = list(apids)#[:3]
  410. for apid in apids:
  411. xs = []
  412. ys = []
  413. for m in self:
  414. if apid in m.apdata:
  415. xs.append(m.timestamp)
  416. ys.append(m.apdata[apid])
  417. xs = pylab.date2num(xs)
  418. ax.xaxis_date()
  419. if interpolate:
  420. xsi = pylab.linspace(xs[0], xs[-1], 200);
  421. ysi = pylab.stineman_interp(xsi, xs, ys, yp=None)
  422. ax.plot(xsi, ysi)
  423. else:
  424. ax.plot(xs, ys)
  425. ax.scatter(xs, ys,)
  426. #~ for m in self:
  427. #~ xs.append(m.timestamp)
  428. #~ ys.append(m.apdata.values())
  429. #~ plot(xs, self.asArray(self.apdata.keys()))
  430. leg = plt.legend(apids)
  431. ltext = leg.get_texts()
  432. llines = leg.get_lines()
  433. plt.setp(ltext, fontsize='x-small')
  434. plt.setp(llines, linewidth=0.7)
  435. leg.draw_frame(False)
  436. if fname is None:
  437. plt.show()
  438. else:
  439. fig.savefig(fname)
  440. def save(self, fname):
  441. with open(fname, 'w') as f:
  442. f.write(str(self))
  443. def loadFromString(self, s):
  444. self[:] = []
  445. lines = [l for l in s.strip().split('\n') if l]
  446. for l in lines:
  447. apdata = {}
  448. ts = None
  449. pos = Position(0, 0, 0)
  450. speed = None
  451. for s in l.split():
  452. if s.startswith('ts:'):
  453. if 'T' in s:
  454. if '.' in s[3:]:
  455. ts = datetime.datetime.strptime(s[3:], '%Y-%m-%dT%H:%M:%S.%f')
  456. else:
  457. ts = datetime.datetime.strptime(s[3:], '%Y-%m-%dT%H:%M:%S')
  458. else:
  459. ts = datetime.datetime.fromtimestamp(float(s[3:]))
  460. elif s.startswith('pos:'):
  461. pos = Position(*[float(e) for e in s[4:].split(',')])
  462. elif s.startswith('speed:'):
  463. speed = float(s[6:])
  464. else:
  465. name, value = s.split(':')
  466. #~ if float(value) > -80:
  467. apdata[name] = float(value)
  468. self.add(Measurement(ts, apdata, pos, speed=speed, interpolated=False))
  469. def load(self, fname):
  470. try:
  471. self.loadFromString(open(fname, 'r').read())
  472. except IOError:
  473. log.exception('cannot load %s' % path(fname).abspath())
  474. def spawn(self, cube_distance, walking_speed=1.5, interpolate_positions=True):
  475. ''' spawn a number new measurements between 2 original measurements '''
  476. assert len(self) > 1, len(self)
  477. newData = []
  478. for m1, m2 in zip(self, self[1:]):
  479. #~ assert m2.timestamp > m1.timestamp, '%s > %s' % (m2.timestamp, m1.timestamp)
  480. if m2.speed is not None:
  481. speed = m2.speed * 1.0
  482. else:
  483. speed = walking_speed * 1.5
  484. duration = (m2.timestamp - m1.timestamp).total_seconds()
  485. if interpolate_positions:
  486. delta_x = m2.pos.x - m1.pos.x
  487. delta_y = m2.pos.y - m1.pos.y
  488. delta_z = m2.pos.z - m1.pos.z
  489. max_distance_walked = duration * speed
  490. min_states_needed = int(max_distance_walked / cube_distance)
  491. if min_states_needed < 2:
  492. newData.append(m1)
  493. continue
  494. interval = duration / min_states_needed
  495. # get apids, that are in both measurements available
  496. apids = set(m1.apdata.keys()).intersection(m2.apdata.keys())
  497. newData.append(m1)
  498. for i in range(1, min_states_needed):
  499. seconds = int(interval * i)
  500. microseconds = 10**6 * ((interval * i) - seconds)
  501. ts = m1.timestamp + datetime.timedelta(seconds=seconds, microseconds=microseconds)
  502. apdata = {}
  503. for apid in apids:
  504. apdata[apid] = m1.apdata[apid] + (m2.apdata[apid] - m1.apdata[apid]) * (float(i) / min_states_needed)
  505. if interpolate_positions:
  506. pos = Position(m1.pos.x + i * delta_x / min_states_needed,
  507. m1.pos.y + i * delta_y / min_states_needed,
  508. m1.pos.z + i * delta_z / min_states_needed)
  509. else:
  510. pos = (0, 0, 0)
  511. #~ print pos
  512. newData.append(Measurement(ts, apdata, pos=pos))
  513. # the last has always to be added
  514. newData.append(m2)
  515. if len(newData) > len(self):
  516. #~ log.info('SPAWNING: increased measurements from %s to %s' % (len(self), len(newData)))
  517. return Measurements(newData)
  518. else:
  519. log.info('SPAWNING: not needed (%s < %s)' % (len(newData), len(self)))
  520. return Measurements(self)
  521. def __str__(self):
  522. res = []
  523. for m in self:
  524. res.append(str(m))
  525. return '\n'.join(res)
  526. def __getitem__(self, val):
  527. if isinstance(val, int):
  528. return super(Measurements, self).__getitem__(val)
  529. else: # slice
  530. return Measurements(super(Measurements, self).__getitem__(val))
  531. def asArray(self, apids, no_signal=1, unavailable=2):
  532. available_aps = set()
  533. for m in self:
  534. available_aps.update(m.apdata.keys())
  535. res = np.zeros((len(self), len(apids)), dtype=np.double) + no_signal
  536. #~ lastApData = defaultdict(lambda: -100)
  537. for i, m in enumerate(self):
  538. for j, apid in enumerate(apids):
  539. if not apid in available_aps:
  540. res[i, j] = unavailable
  541. elif apid in m.apdata:
  542. res[i, j] = m.apdata[apid]
  543. # else no_signal
  544. return res
  545. def interpolateLocations(self, locations, timestamps):
  546. ''' interpolate positions of measurements by evaluating "checkpoints" given in locations'''
  547. if len(timestamps) == 0 or len(self) == 0:
  548. return
  549. assert not isinstance(self[0].timestamp, float), 'does only work with datetime based timestamps'
  550. def _get(ts):
  551. try:
  552. return datetime.datetime.strptime(ts, '%Y-%m-%dT%H:%M:%S.%f')
  553. except ValueError:
  554. return datetime.datetime.strptime(ts, '%Y-%m-%dT%H:%M:%S')
  555. if isinstance(timestamps[0], basestring):
  556. timestamps = [_get(ts) for ts in timestamps]
  557. for m in self:
  558. if m.timestamp < timestamps[0]:
  559. # everything before first checkpoint timestamp is assumed to be at this checkpoint
  560. m.pos = Position(*locations[0])
  561. elif m.timestamp > timestamps[-1]:
  562. # everything after last checkpoint timestamp is assumed to be at this checkpoint
  563. m.pos = Position(*locations[-1])
  564. else:
  565. for loc1, ts1, loc2, ts2 in zip(locations, timestamps, locations[1:], timestamps[1:]):
  566. if ts1 < m.timestamp < ts2:
  567. # much time has passed between two location checkpoints
  568. loc1_loc2_duration = (ts2 - ts1).total_seconds()
  569. loc1_loc2_distance = ((loc2[0] - loc1[0])**2 + (loc2[1] - loc1[1])**2 + (loc2[2] - loc1[2])**2)**0.5
  570. # difference of measurement time to last checkpoint time
  571. delta_ts = (m.timestamp - ts1).total_seconds()
  572. # how much relative distance do we have wakled between two checkpoints
  573. relative_distance = delta_ts / loc1_loc2_duration
  574. x = loc1[0] + (loc2[0] - loc1[0]) * relative_distance
  575. y = loc1[1] + (loc2[1] - loc1[1]) * relative_distance
  576. z = loc1[2] + (loc2[2] - loc1[2]) * relative_distance
  577. m.pos = Position(x, y, z)
  578. m.speed = loc1_loc2_distance / loc1_loc2_duration
  579. break
  580. #~ print loc, ts
  581. assert m.pos.x != 0
  582. def interpolateFromLocationString(self, s, locid2location):
  583. locids, timestamps = zip(*[l.strip().split() for l in s.split('\n') if l.strip()])
  584. locs = [locid2location[int(locid[6:])] for locid in locids]
  585. locations = [(l.x, l.y, l.z) for l in locs]
  586. timestamps = [ts[3:] for ts in timestamps]
  587. self.interpolateLocations(locations, timestamps)
  588. def interpolateSignals(self):
  589. ''' interpolate missing ap values from neighboring measurements (via forwardscan)'''
  590. last_apdata = self[0].apdata
  591. last_ts = self[0].timestamp
  592. for i, m in enumerate(self[1:]):
  593. for apid in last_apdata:
  594. if apid in m.apdata:
  595. continue
  596. # forward scan
  597. duration = delta = None
  598. for j in range(i+1, len(self)):
  599. if apid in self[j].apdata:
  600. # found next signal
  601. duration = (self[j].timestamp - last_ts).total_seconds()
  602. delta = self[j].apdata[apid] - last_apdata[apid]
  603. break
  604. # interpolate signal from i+1 to j
  605. if duration is not None:
  606. for j in range(i+1, j):
  607. ratio = (self[j].timestamp - last_ts).total_seconds() / duration
  608. self[j].apdata[apid] = last_apdata[apid] + delta * ratio
  609. last_apdata = m.apdata
  610. last_ts = m.timestamp
  611. @staticmethod
  612. def fromTrackedMeasurements(locid2location, basePath, deviceid, pathid, runid):
  613. d = path(basePath).joinpath(deviceid, pathid)
  614. measurements = Measurements()
  615. measurements.load(d.joinpath('measurements_%s.txt' % runid))
  616. #~ for m in measurements:
  617. #~ print m.timestamp
  618. if measurements[0].pos == (0, 0, 0):
  619. locstring = d.joinpath('locations_%s.txt' % runid).text()
  620. measurements.interpolateFromLocationString(locstring, locid2location)
  621. else:
  622. pass # assume positions are stored in measurements
  623. return measurements
  624. def updateScalarField(env, gui, data, inverted=False, data_range=None):
  625. gui.scalar_field.mlab_source.scalars = data
  626. #~ _, _, z = env.translateToVoxel(0, 0, 1.5)
  627. #~ gui.ipw_z.ipw.slice_index = z
  628. slm = gui.ipw_z.module_manager.scalar_lut_manager
  629. #~ print dir(slm)
  630. if data_range is None:
  631. _min, _max = data.min(), data.max()
  632. print (_min, _max)
  633. slm.data_range = (_min, _max)
  634. else:
  635. slm.data_range = data_range
  636. #~ slm.data_range = (10, 35)
  637. slm.reverse_lut = inverted
  638. def calculateDistance(env, gui, _log, pos=2):
  639. '''build distance heatmap in gui for a position in a measurement path'''
  640. p = Measurements()
  641. p.load(r'N:\lws\tmp\path_iconia_0.txt')
  642. measurement = p[pos]
  643. all_deltas = []
  644. for name, rssi in measurement.apdata.items():
  645. if not name in ['107', '108', '109p2', 'gaia']:
  646. print 'skipping %s' % name
  647. continue
  648. ap = env.aps[name]
  649. print name, rssi
  650. deltas = ap.dbdata - rssi
  651. all_deltas.append(deltas)
  652. if len(all_deltas) == 0:
  653. print 'nothing measured'
  654. return
  655. print 'got all deltas'
  656. all_squared_deltas = [np.square(deltas) for deltas in all_deltas]
  657. print 'squared deltas'
  658. distances = np.sqrt(np.add.reduce(np.array(all_squared_deltas)))
  659. print 'got distances'
  660. min_dist = distances.min()
  661. max_dist = distances.max()
  662. updateScalarField(env, gui, distances, inverted=True, data_range=(min_dist+1, min_dist+25))
  663. return distances
  664. def gauss_nd_in_logspace(means, sq_sigmas, values, reduced=True):
  665. ''' n-dimensional gaussian with independent components in logspace'''
  666. s = 0
  667. num_emitted = 0
  668. for mu, sq_sigma, x in zip(means, sq_sigmas, values):
  669. if x == -2:
  670. continue
  671. elif x == -1:
  672. if mu > -100:
  673. s = s + 5
  674. num_emitted = num_emitted + 1
  675. continue
  676. #~ if sq_sigma > 5:
  677. #~ sq_sigma = 5
  678. #~ assert sq_sigma > 0, sq_sigma
  679. if not reduced:
  680. log_p = -1 * (-(x - mu)**2 / (2 * sq_sigma) - math.log(math.sqrt(2 * math.pi * sq_sigma)))
  681. #~ log_p = -(x - mu)**2 / (2 * sq_sigma) - math.log(sq_sigma)
  682. else:
  683. # pooled variance
  684. log_p = x - mu if x > mu else mu - x
  685. num_emitted += 1
  686. s += log_p
  687. #~ if num_emitted > 0:
  688. #~ s = s / num_emitted * 10
  689. return s
  690. def debuggable_viterbi_decode(localizer, path):
  691. ''' pure python version of HMM-decoder, only used for learning/debugging
  692. '''
  693. def _fmt(a):
  694. return ','.join('%.1f' % e for e in a)
  695. env = localizer.env
  696. transition_probs = localizer.transition_probs
  697. emission_probs = localizer.emission_probs
  698. s_shape_x, s_shape_y, s_shape_z = localizer.cubed_data_shape
  699. t_shape_x, t_shape_y, t_shape_z = transition_probs.shape[3:]
  700. numstates = s_shape_x * s_shape_y * s_shape_z
  701. assert numstates * len(path) < 100000, 'disable me, if you want to wait... [%sk states]' % (numstates * len(path) / 1000)
  702. all_probs = np.zeros((len(path), s_shape_x, s_shape_y, s_shape_z)) - 10**6
  703. back_track = np.zeros((len(path), s_shape_x, s_shape_y, s_shape_z, 3), dtype=np.int)
  704. f = open('r:/out_%s.txt' % 0, 'w')
  705. t = time.time()
  706. log.debug('calc initial prob for %sk states in col 0 [of %.2f mio states]' % (numstates / 1000, numstates * len(path) / 10.0**6))
  707. for sx in range(s_shape_x):
  708. for sy in range(s_shape_y):
  709. for sz in range(s_shape_z):
  710. means, sq_sigmas, rss_values = emission_probs[sx, sy, sz, 0], emission_probs[sx, sy, sz, 1], path[0]
  711. #~ print sx, sy, sz, sq_sigmas
  712. p_x_s = gauss_nd_in_logspace(means, sq_sigmas, rss_values)
  713. assert p_x_s > 0, '%s %s %s' % (means, sq_sigmas, rss_values)
  714. #~ print sx, sy, sz, em_prob
  715. in_m = '(%.1f, %.1f, %.1f)' % env.translateToMeter(sx * localizer.cubewidth, sy * localizer.cubewidth, sz * localizer.cubewidth)
  716. f.write('%s %s| p(x|s) %.1f mus: %s sigmas: %s xs: %s \n' % ( (sx, sy, sz), in_m, p_x_s, _fmt(means), _fmt(sq_sigmas), _fmt(rss_values)))
  717. all_probs[0, sx, sy, sz] = p_x_s
  718. #~ return []
  719. # we have to locate the (previous) state referenced by the transition index
  720. # if the transition_shape is (5, 5, 3) then (tx=2, ty2, tz=1) then previous state is the current state
  721. offset_x = t_shape_x / 2
  722. offset_y = t_shape_y / 2
  723. offset_z = t_shape_z / 2
  724. best = []
  725. for sx in range(s_shape_x):
  726. for sy in range(s_shape_y):
  727. for sz in range(s_shape_z):
  728. best.append((all_probs[0, sx, sy, sz], (sx, sy, sz)))
  729. f.write('\n')
  730. for p, coords in list(reversed(sorted(best)))[:20]:
  731. in_m = '(%.1f, %.1f, %.1f)' % env.translateToMeter(coords[0] * localizer.cubewidth, coords[1] * localizer.cubewidth, coords[2] * localizer.cubewidth)
  732. f.write('%s %s %.1f\n' % (coords, in_m, p))
  733. # xs -> emitted vector
  734. for i, rss_values in enumerate(path[1:][:], 1):
  735. f = open('r:/out_%s.txt' % i, 'w')
  736. log.debug('calc probs col %s of %s [%.1fk states/sec]' % (i, len(path), (numstates * i / 1000) / (time.time() - t + 0.000001)))
  737. for sx in range(s_shape_x):
  738. for sy in range(s_shape_y):
  739. for sz in range(s_shape_z):
  740. transitions = transition_probs[sx, sy, sz]
  741. means, sq_sigmas = emission_probs[sx, sy, sz, 0], emission_probs[sx, sy, sz, 1]
  742. # calc emission prob for vector with rss values
  743. p_x_s = gauss_nd_in_logspace(means, sq_sigmas, rss_values)
  744. min_cost = 10**99
  745. best_prev_state = (None, None, None)
  746. for tx in range(t_shape_x):
  747. prev_idx_x = sx + tx - offset_x
  748. if not 0 <= prev_idx_x < s_shape_x:
  749. continue
  750. for ty in range(t_shape_y):
  751. prev_idx_y = sy + ty - offset_y
  752. if not 0 <= prev_idx_y < s_shape_y:
  753. continue
  754. for tz in range(t_shape_z):
  755. prev_idx_z = sz + tz - offset_z
  756. if not 0 <= prev_idx_z < s_shape_z:
  757. continue
  758. prev_cost = all_probs[i-1, prev_idx_x, prev_idx_y, prev_idx_z]
  759. cost = prev_cost + transitions[tx, ty, tz]
  760. if cost < min_cost:
  761. min_cost = cost
  762. best_prev_state = (prev_idx_x, prev_idx_y, prev_idx_z)
  763. f.write('%s -> %s with p %.1f | p(x|s) %1f\n' % ( (sx, sy, sz), best_prev_state, min_cost, p_x_s ))
  764. all_probs[i, sx, sy, sz] = min_cost + p_x_s
  765. back_track[i, sx, sy, sz] = best_prev_state
  766. best = []
  767. for sx in range(s_shape_x):
  768. for sy in range(s_shape_y):
  769. for sz in range(s_shape_z):
  770. best.append((all_probs[i, sx, sy, sz], (sx, sy, sz)))
  771. f.write('\n')
  772. for p, coords in list(reversed(sorted(best)))[:5]:
  773. f.write('%s %.1f\n' % (coords, p))
  774. print '----'
  775. lastCol = i
  776. print 'lastcol_idx: %s' % lastCol
  777. print '----'
  778. # find best path
  779. min_cost = 10**6
  780. best_last_state = (None, None, None)
  781. for sx in range(s_shape_x):
  782. for sy in range(s_shape_y):
  783. for sz in range(s_shape_z):
  784. cost = all_probs[lastCol, sx, sy, sz]
  785. if cost < min_cost:
  786. best_last_state = sx, sy, sz
  787. min_cost = cost
  788. print 'best last state: (%s, %s, %s)' % best_last_state
  789. estimated_path = []
  790. curr_state = best_last_state
  791. curr_cost = min_cost
  792. for j in range(lastCol, -1, -1):
  793. estimated_path.append((curr_state, curr_cost))
  794. #~ print curr_state
  795. print j, curr_state[0], curr_state[1], curr_state[2]
  796. curr_cost = all_probs[j, curr_state[0], curr_state[1], curr_state[2]]
  797. if j > -1:
  798. curr_state = back_track[j, curr_state[0], curr_state[1], curr_state[2]]
  799. return list(reversed(estimated_path))
  800. class Localizer(object):
  801. def __init__(self, env, gui=None, num_threads=0, max_aps=None, verbose=True):
  802. self.env = env
  803. self.gui = gui
  804. self.num_threads = num_threads # 0=autodetect
  805. aps = [(apid, ap.dbdata) for apid, ap in self.env.aps.items() if ap.loaded]
  806. self.apdata_list = [ap[1] for ap in aps]
  807. self.apids = [ap[0] for ap in aps]
  808. # autocalced
  809. self.cubed_data_shape = (0, 0, 0)
  810. self.uncubed_data_shape = (0, 0, 0)
  811. self.blockedCubes = None
  812. if self.gui:
  813. gui.localizer = self
  814. self.walking_speed = self.gui.synthetic_measurements_speed if self.gui else 1.5
  815. self.max_aps = max_aps
  816. self.verbose = verbose
  817. self.decoder = None # holds the cython aspect
  818. self.decoder = None # holds the cython aspect for sequential processing
  819. def _setupStates(self):
  820. vi_dimensions = self.env.aps.values()[0].vi.di
  821. self.uncubed_data_shape = (vi_dimensions.x, vi_dimensions.y, vi_dimensions.z)
  822. t = time.time()
  823. #~ self.emission_probs = np.load('r:/eprobs.npy')
  824. self.emission_probs = speed.buildStatesWithEmissionProbs(self.uncubed_data_shape, self.apdata_list, self.cubewidth, 1, pooled_variance=2)
  825. #~ np.save('r:/eprobs.npy', self.emission_probs)
  826. self.cubed_data_shape = self.emission_probs.shape[:3]
  827. log.debug('build emission probs in %.3f sec' % (time.time() - t))
  828. self.blockedCubes = blockedCubes = self._buildTriangleCubeIntersection()
  829. speed.evalUnreachableAirCubes(blockedCubes, cube_height=self.env.di.sz * self.cubewidth, max_reachable_height=2.0)
  830. def analyzeEstimatedPath(self, estimated_path, cube_values=None):
  831. errors = []
  832. data = np.zeros(self.cubed_data_shape)
  833. for i, (x, y, z) in enumerate(estimated_path):
  834. in_m = self.env.translateToMeter(x * self.cubewidth, y * self.cubewidth, z * self.cubewidth)
  835. if self.verbose == 2:
  836. log.debug('%s %s (%.1f, %.1f, %.1f)' % (i, (x, y, z), in_m[0], in_m[1], in_m[2])) #, measurement[i].pos
  837. if i > 0:
  838. x1, y1, z1 = estimated_path[i-1]
  839. # will only work for 5,5,3 model
  840. if abs(x1 - x) == 2 or abs(y1 - y) == 2 or abs(x1 - x) == 2:
  841. if cube_values is not None:
  842. data[(x1 + x) / 2, (y1 + y) / 2, (z1 + z) / 2] = (cube_values[i-1] + cube_values[i]) / 2.0
  843. else:
  844. data[(x1 + x) / 2, (y1 + y) / 2, (z1 + z) / 2] = (len(estimated_path) * 5) + i * 10
  845. #~ print i, cube_values[i]
  846. if cube_values is not None:
  847. if i < len(cube_values):
  848. data[x, y, z] = cube_values[i]
  849. else:
  850. log.warn('index %s execeeds len(cube_values)' % i)
  851. else:
  852. data[x, y, z] = (len(estimated_path) * 5) + i * 10
  853. #~ data[x, y, z] = i * 10
  854. data = speed.uncube(data, self.uncubed_data_shape)
  855. if self.gui:
  856. gui = self.gui
  857. def doStuff():
  858. gui.scalar_field.mlab_source.scalars = data
  859. gui.surface_number_of_contours = 10
  860. gui.surface_view.visible = True
  861. #~ updateScalarField(self.env, gui, data)
  862. #~ _, _, z = self.env.translateToVoxel(0, 0, in_m[2])
  863. #~ gui.ipw_z.ipw.slice_index = z
  864. if gui.gui_thread_id != thread.get_ident():
  865. GUI.invoke_later(doStuff)
  866. else:
  867. doStuff()
  868. return
  869. def viewHistory(self, idx, count=None, store=False):
  870. ''' view the top hypothesis at a signal index
  871. use count for slicing with [count:]
  872. TODO: move since gui-specific'''
  873. gui = self.gui
  874. #~ data = np.zeros((gui.cube_res_x, gui.cube_res_y, gui.cube_res_z))
  875. #~ a = self.decoder.history[idx][count:]
  876. #~ min_p = np.min(a)
  877. #~ for (x, y, z, p) in a:
  878. #~ data[x, y, z] = p - min_p
  879. #~ uncubeddata = speed.uncube(data, (gui.res_x, gui.res_y, gui.res_z))
  880. if idx >= len(self.decoder.history):
  881. log.warn('idx %s >= %s' % (idx, len(self.decoder.history)))
  882. idx = len(self.decoder.history) - 1
  883. elif idx < 0:
  884. idx = 0
  885. count = count if count is not None else 10000
  886. count = min(count, len(self.decoder.history[0]))
  887. log.info('view history %s with %s...' % (idx, count))
  888. for i, (x, y, z, v) in enumerate(self.decoder.history[0][-10:]):
  889. log.info('top: %s xyz: %d,%d,%d, value: %.2f' % (10-i, x, y, z, v))
  890. uncubeddata = speed.viewPruningHistory(self.decoder.history, idx, self.uncubed_data_shape, self.cubewidth,
  891. count, view_values=True, raw_values=False)
  892. def doStuff():
  893. #~ self.gui.scalar_field.mlab_source.scalars = uncubeddata
  894. self.gui.scalar_field.mlab_source.set(scalars=uncubeddata)
  895. if store:
  896. print 'XXX', idx
  897. self.gui.scene.mlab.savefig('r:/out_%03d.png' % idx)
  898. log.info('view history done')
  899. GUI.invoke_later(doStuff)
  900. def _buildTriangleCubeIntersection(self):
  901. #_isblocking = lambda n: any(s in n.lower() for s in {'concrete', 'wall', 'window', 'stairs', 'fascade'})
  902. _isblocking = lambda n: any(s in n.lower() for s in {'walls_green','walls_red','toilet','roof','piles','floor','stairs', 'fascade',})
  903. _isUnpassable = lambda n: any(s in n.lower() for s in {'cupboard', 'table', 'hardware','railing','elevator','boxes'})
  904. _isNotblocking = lambda n: any(s in n.lower() for s in {'glassdoor','irondoor','doors'})
  905. blockingObjectNames = [n for n in self.env.mesh.objects.keys() if _isblocking(n)]
  906. unblockingObjectNames = [n for n in self.env.mesh.objects.keys() if _isNotblocking(n)]
  907. unpassableObjectNames = [n for n in self.env.mesh.objects.keys() if _isUnpassable(n)]
  908. print "blocking materials:" , blockingObjectNames
  909. CACHED = True
  910. CACHE_FILE = self.env.tmpdir.joinpath('blocked_cubes_%s.npy' % self.cubewidth)
  911. MIN_UNBLOCK_WIDTH = 3 # minimum cubewidth that triggers unblocking
  912. if CACHED and CACHE_FILE.exists():# and self.env.objfile.mtime < CACHE_FILE.mtime:
  913. try:
  914. return np.load(CACHE_FILE)
  915. except Exception:
  916. log.exception('error during loading')
  917. log.info('building blocked cubes... [%s]' % CACHE_FILE.abspath())
  918. vi_dimensions = self.env.aps.values()[0].vi.di
  919. #~ assert len(set(blockingObjectNames).intersection(unblockingObjectNames)) == 0
  920. t = time.time()
  921. #HACK for e1 model
  922. ADD_BLOCKING = [((-32.,-11.,0.2),(-32.,14.,0.2),(32.,14.,0.2)),((32.,-11.,0.2),(32.,14.,0.2),(-32.,-11.,0.2))]
  923. blts = list(ADD_BLOCKING)
  924. for name in blockingObjectNames:
  925. blts.extend(self.env.mesh.itertriangles(name))
  926. # HACK for umic model
  927. ADDITIONAL_UNBLOCKING = []
  928. #= [((38.16, 9.8, 0.1), (38.16, 9.8, 1.8), (37.5, 9.8, 0.1)),
  929. # ((37.0, 4.0, 0.1), (38.0, 3.9, 1.8), (38.0, 3.7, 0.1)),
  930. # ]
  931. unblts = list(ADDITIONAL_UNBLOCKING)
  932. if self.cubewidth > MIN_UNBLOCK_WIDTH:
  933. for name in unblockingObjectNames:
  934. unblts.extend(self.env.mesh.itertriangles(name))
  935. unpass = []
  936. for name in unpassableObjectNames:
  937. unpass.extend(self.env.mesh.itertriangles(name))
  938. bl_triangles = np.zeros((len(blts), 3, 3))
  939. unpass_triangles = np.zeros((len(unpass), 3, 3))
  940. unbl_triangles = np.zeros((len(unblts), 3, 3))
  941. all_ts = [(blts, bl_triangles),
  942. (unpass, unpass_triangles),
  943. (unblts, unbl_triangles)]
  944. for ts, np_ts in all_ts:
  945. for i, (p1, p2, p3) in enumerate(ts):
  946. np_ts[i, 0, 0] = p1[0]
  947. np_ts[i, 0, 1] = p1[1]
  948. np_ts[i, 0, 2] = p1[2]
  949. np_ts[i, 1, 0] = p2[0]
  950. np_ts[i, 1, 1] = p2[1]
  951. np_ts[i, 1, 2] = p2[2]
  952. np_ts[i, 2, 0] = p3[0]
  953. np_ts[i, 2, 1] = p3[1]
  954. np_ts[i, 2, 2] = p3[2]
  955. blockedCubes = speed.buildTriangleCubeIntersection(bl_triangles, unpass_triangles, unbl_triangles, self.cubed_data_shape, self.cubewidth, vi_dimensions)
  956. log.debug('evaluated blocked cubes in %.3f sec' % (time.time() - t))
  957. np.save(CACHE_FILE, blockedCubes)
  958. return blockedCubes
  959. def analyzePathSequence(self, tmpdir, path_sequence, path_sequence_seq, measurements, errors_2d, seq_errors_2d, name, errors_3d=None, seq_errors_3d=None):
  960. decoder = self.decoder
  961. c2m = self.env.cube2meter
  962. apids = self.apids
  963. ma = measurements.asArray(apids)
  964. eprobs = self.emission_probs
  965. #~ tprobs = self.transition_probs
  966. #~ ox, oy, oz = self.transition_shape / 2
  967. x, y, z = path_sequence[0]
  968. xs, ys, zs = path_sequence_seq[0]
  969. means, sigmas = eprobs[x, y, z][0], eprobs[x, y, z][1]
  970. ep = 0# decoder.getEmissionCost(eprobs[x, y, z, 0], ma[0])
  971. #~ ep = gauss_nd_in_logspace(means, sigmas, ma[0], reduced=True)
  972. tree = StringToTree('<path/>')
  973. #~ tree = StringToTree('<path tshape_x="%s" tshape_y="%s" tshape_z="%s"/>' % tuple(self.transition_shape))
  974. pel = tree.getroot()
  975. xm, ym, zm = c2m(x, y, z, self.cubewidth)
  976. x_seq, y_seq, z_seq = c2m(xs, ys, zs, self.cubewidth)
  977. interpolated = 'true' if measurements[0].interpolated else 'false'
  978. attribs = {'ep': round(ep, 2), 'tp': 0, 'sx': x, 'sy': y, 'sz': z,
  979. 'x': round(xm, 2), 'y': round(ym, 2), 'z': round(zm, 2),
  980. 'xseq': round(x_seq, 2), 'yseq': round(y_seq, 2), 'zseq': round(z_seq, 2),
  981. 'dx': 0, 'dy': 0, 'dz': 0, 'interpolated': interpolated,
  982. 'err2d': round(errors_2d[0], 3), 'seqerr2d': round(seq_errors_2d[0], 3),
  983. 'err3d': round(errors_3d[0], 3), 'seqerr3d': round(seq_errors_3d[0], 3),
  984. 'ts': 0,
  985. 'truex':measurements[0].pos.x,'truey':measurements[0].pos.y,'truez':measurements[0].pos.z}
  986. e = subelement(pel, 'pos', attribs=attribs)
  987. attribs = {'ap_%s' % apid: '%.1f/%.1f%s' % (eprobs[x, y, z, 0, i], eprobs[xs, ys, zs, 0, i], ma[0][i]) for i, apid in enumerate(apids)}
  988. subelement(e, 'apdata', attribs=attribs)
  989. first_ts = measurements[0].timestamp
  990. zipped = zip(path_sequence, path_sequence[1:], path_sequence_seq[1:], ma[1:], measurements[1:], errors_2d[1:], seq_errors_2d[1:], errors_3d[1:], seq_errors_3d[1:])
  991. for (x1, y1, z1), (x2, y2, z2), (x3, y3, z3), values, m, err2d, seqerr2d, err3d, seqerr3d in zipped:
  992. dx, dy, dz = x1-x2, y1-y2, z1-z2
  993. #~ tx, ty, tz = ox + dx, oy + dy, oz + dz
  994. tp = 0#tprobs[x2, y2, z2, tx, ty, tz]
  995. ep = 0#decoder.getEmissionCost(eprobs[x2, y2, z2, 0], values)
  996. #~ print round(ep, 1), round(tp), '%s,%s,%s' % (dx, dy, dz)
  997. interpolated = 'true' if m.interpolated else 'false'
  998. xm, ym, zm = c2m(x2, y2, z2, self.cubewidth)
  999. x_seq, y_seq, z_seq = c2m(x3, y3, z3, self.cubewidth)
  1000. attribs = {'ep': round(ep, 2), 'tp': round(tp, 2), 'sx': x2, 'sy': y2, 'sz': z2,
  1001. 'x': round(xm, 2), 'y': round(ym, 2), 'z': round(zm, 2),
  1002. 'xseq': round(x_seq, 2), 'yseq': round(y_seq, 2), 'zseq': round(z_seq, 2),
  1003. 'dx': dx, 'dy': dy, 'dz': dz, 'interpolated': interpolated,
  1004. 'err2d': round(err2d, 3), 'seqerr2d': round(seqerr2d, 3),
  1005. 'err3d': round(err3d, 3), 'seqerr3d': round(seqerr3d, 3),
  1006. 'ts': '%.1f' % (m.timestamp - first_ts).total_seconds(),
  1007. 'truex':m.pos.x,'truey':m.pos.y,'truez':m.pos.z}
  1008. e = subelement(pel, 'pos', attribs=attribs)
  1009. attribs = {'ap_%s' % apid: '%.1f/%.1f/%.1f' % (eprobs[x2, y2, z2, 0, i], eprobs[x3, y3, z3, 0, i], values[i]) for i, apid in enumerate(apids)}
  1010. subelement(e, 'apdata', attribs=attribs)
  1011. tree.getroot().attrib['err2d'] = '%.3f' % (sum(errors_2d) / len(errors_2d))
  1012. tree.getroot().attrib['seqerr2d'] = '%.3f' % (sum(seq_errors_2d) / len(seq_errors_2d))
  1013. TreeToFile(tree, tmpdir.joinpath('%s.xml' % name))
  1014. def params(self):
  1015. return 'undefined'
  1016. def toRealWordPaths(self, measurements, *paths):
  1017. real_path = []
  1018. other_paths = [[] for i in range(len(paths))]
  1019. zipped = zip(*paths)
  1020. for j, (m, other) in enumerate(zip(measurements, zipped)):
  1021. if j <= 2:
  1022. print other
  1023. if m.pos == (0,0,0):
  1024. continue
  1025. real_path.append((m.pos.x, m.pos.y, m.pos.z))
  1026. for i, o in enumerate(other):
  1027. xx = self.env.cube2meter(o[0], o[1], o[2], self.cubewidth)
  1028. other_paths[i].append(xx)
  1029. return (real_path,) + tuple(other_paths)
  1030. class HMMLocalizer(Localizer):
  1031. no_signal_delta = 0
  1032. def __init__(self, env, transition_shape=(5, 5, 3), cubewidth=3,
  1033. prune_to=1000, freespace_scan=-1, **kwargs):
  1034. super(HMMLocalizer, self).__init__(env, **kwargs)
  1035. self.cubewidth = cubewidth
  1036. self.transition_shape = np.array(transition_shape)
  1037. self.prune_to = prune_to
  1038. self.freespace_scan = freespace_scan
  1039. self.emission_probs = None
  1040. self.transition_probs = None
  1041. self._setupStates()
  1042. self.decoder = None
  1043. self.last_measurement = None
  1044. def _setupStates(self):
  1045. vi_dimensions = self.env.aps.values()[0].vi.di
  1046. self.uncubed_data_shape = (vi_dimensions.x, vi_dimensions.y, vi_dimensions.z)
  1047. t = time.time()
  1048. self.emission_probs = speed.buildStatesWithEmissionProbs(self.uncubed_data_shape, self.apdata_list, self.cubewidth, 1, pooled_variance=2)
  1049. self.cubed_data_shape = self.emission_probs.shape[:3]
  1050. log.debug('build emission probs in %.3f sec' % (time.time() - t))
  1051. self.blockedCubes = blockedCubes = self._buildTriangleCubeIntersection()
  1052. speed.evalUnreachableAirCubes(blockedCubes, cube_height=self.env.di.sz * self.cubewidth, max_reachable_height=1.8)
  1053. #~ blockedCubes[:, :, :] = 0
  1054. t = time.time()
  1055. self.transition_probs = speed.buildTransitionProbs(self.cubed_data_shape,
  1056. self.transition_shape, blockedCubes, self.cubewidth,
  1057. freespace_scan=self.freespace_scan)
  1058. if isinstance(self.prune_to, float):
  1059. # it's a float ratio, relative to num states
  1060. self.prune_to = int(self.prune_to * (self.cubed_data_shape[0] * self.cubed_data_shape[1] * self.cubed_data_shape[2]))
  1061. #~ for x, y, z in zip(*np.where(blockedCubes > 0)):
  1062. #~ self.emission_probs[x, y, z, :, :] = 0.000001
  1063. log.debug('build transition probs in %.3f sec' % (time.time() - t))
  1064. def analyzePath(self, cubes, measurements, clf=True):
  1065. '''TODO: gui-specific
  1066. return list of emission + transition cost
  1067. '''
  1068. eprobs = self.emission_probs
  1069. tprobs = self.transition_probs
  1070. ox, oy, oz = self.transition_shape / 2
  1071. x, y, z = cubes[0]
  1072. log_p = self.decoder.getEmissionCost(eprobs[x, y, z, 0], measurements[0])
  1073. #~ means, sigmas = eprobs[x, y, z][0], eprobs[x, y, z][1]
  1074. #~ log_p = gauss_nd_in_logspace(means, sigmas, measurements[0], reduced=True)
  1075. def _fmt(a):
  1076. return ','.join('%.1f' % e for e in a)
  1077. plot_xs = range(len(cubes))
  1078. plot_ys = [0]
  1079. deltas = [0]
  1080. for (x1, y1, z1), (x2, y2, z2), values in zip(cubes, cubes[1:], measurements[1:]):
  1081. dx, dy, dz = x2-x1, y2-y1, z2-z1
  1082. #~ if abs(dx) == 2 or abs(dy) == 2 or abs(dz) == 2:
  1083. #~ print 'jump 2'
  1084. #~ elif dx == 0 and dy == 0 and dz == 0:
  1085. #~ print 'jump 0'
  1086. tx, ty, tz = ox + dx, oy + dy, oz + dz
  1087. # Hmm, i dont understand why I use x1 instead of x2 here...
  1088. tp = tprobs[x1, y1, z1, tx, ty, tz]
  1089. #~ means, sigmas = eprobs[x2, y2, z2][0], eprobs[x2, y2, z2][1]
  1090. ep = self.decoder.getEmissionCost(eprobs[x2, y2, z2, 0], values)
  1091. #~ ep = gauss_nd_in_logspace(means, sigmas, values, reduced=True)
  1092. log_p = log_p + tp + ep
  1093. deltas.append(tp + ep)
  1094. plot_ys.append(log_p)
  1095. #~ print '%s %s %s tp: %.1f ep: %.1f total: %.1f' % (x2, y2, z2, tp, ep, log_p)
  1096. if GUI is not None:
  1097. def doStuff():
  1098. if clf:
  1099. self.gui.plot.clf()
  1100. ax1 = self.gui.plot.add_subplot(111)
  1101. ax1.grid()
  1102. #~ ax1.set_xlim(min(plot_xs), max(plot_xs))
  1103. #~ ax1.set_ylim(min(plot_ys), max(plot_ys))
  1104. ax1.plot(plot_xs[:len(plot_ys)], plot_ys[:len(plot_xs)])
  1105. GUI.invoke_later(doStuff)
  1106. return deltas
  1107. def averageEstimatedPaths(estimated_paths, SIZE=2900):
  1108. ''' combine a number of paths into a new path
  1109. * does not lead to interesting results
  1110. '''
  1111. summed = [None] * len(estimated_paths[0])
  1112. for i in range(SIZE):
  1113. estimated_path = estimated_paths[i]
  1114. for j, ((x, y, z), p) in enumerate(estimated_path):
  1115. if summed[j] is None:
  1116. summed[j] = [0, 0, 0]
  1117. summed[j][0] = summed[j][0] + x
  1118. summed[j][1] = summed[j][1] + y
  1119. summed[j][2] = summed[j][2] + z
  1120. for j, (x, y, z) in enumerate(summed):
  1121. summed[j][0] = int(x / SIZE)
  1122. summed[j][1] = int(y / SIZE)
  1123. summed[j][2] = int(z / SIZE)
  1124. estimated_path = [(e, 0) for e in summed]
  1125. return estimated_path
  1126. def localizeGui(self, start, end):
  1127. measurements = Measurements()
  1128. measurements.loadFromString(self.gui.synthetic_measurements)
  1129. measurements = Measurements(measurements[start: end])
  1130. t = time.time()
  1131. #~ print 'start viterbi decoding'
  1132. #~ estimated_path = debuggable_viterbi_decode(self, measurements)
  1133. res, measurements = self.evaluateMeasurements(measurements, interpolate=True)
  1134. estimated_path, seq_estimated_path, seq_avg_estimated_path = res['end'], res['seq'], res['seq_avg']
  1135. print estimated_path[-3:]
  1136. log.info('viterbi decode in %.3f sec' % (time.time() - t))
  1137. ## the estimated
  1138. costs = self.analyzePath(estimated_path, measurements.asArray(self.apids))
  1139. real_path, est_path, seq_est_path, seq_avg_path = self.toRealWordPaths(
  1140. measurements, estimated_path, seq_estimated_path, seq_avg_estimated_path)
  1141. errors = errorsFromLists(real_path, est_path)
  1142. err2d = errorsFromLists(real_path, est_path, mode='2d')
  1143. assert len(errors) == len(real_path)
  1144. log.info('avg_err_2d: %.2fm avg_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1145. cube_values = errors
  1146. cube_values = None
  1147. self.analyzeEstimatedPath(estimated_path, cube_values=cube_values)
  1148. errors = errorsFromLists(real_path, seq_est_path)
  1149. err2d = errorsFromLists(real_path, seq_est_path, mode='2d')
  1150. log.info('avg_seq_err_2d: %.2fm avg_seq_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1151. errors = errorsFromLists(real_path, seq_avg_path)
  1152. err2d = errorsFromLists(real_path, seq_avg_path, mode='2d')
  1153. log.info('avg_seq_avg_err_2d: %.2fm avg_seq_avg_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1154. ## the real
  1155. #~ deltas2 = self.analyzePath(getCubesFromPath(self.gui, self.env), measurements_a, clf=False)
  1156. #~ estimated_path2 = getCubesFromPath(self.gui, self.env)
  1157. #~ self.analyzeEstimatedPath(estimated_path2, cube_values=deltas2, verbose=False)
  1158. def evaluateMeasurements(self, measurements, interpolate=False):
  1159. # TODO: share decoder with threadlocal?
  1160. if interpolate:
  1161. measurements.interpolateSignals()
  1162. num_non_spawned = len(measurements)
  1163. cd = self.cubewidth * self.env.di.sx
  1164. #~ num_non_spawned = len(measurements)
  1165. measurements = measurements.spawn(walking_speed=self.walking_speed, cube_distance=cd)
  1166. if self.verbose:
  1167. log.info('walk: %sm/s, cd: %.3fm, measurements: %s/%s' % (self.walking_speed, cd, len(measurements), num_non_spawned))
  1168. self.decoder = decoder = speed.ViterbiDecoder(self.blockedCubes, self.emission_probs, self.transition_probs,
  1169. store_pruning=True, verbose=self.verbose, no_signal_delta=self.no_signal_delta)
  1170. measurements_a = measurements.asArray(self.apids)
  1171. if self.max_aps is not None and self.max_aps > -1:
  1172. for m in measurements_a:
  1173. values = []
  1174. for i in range(len(self.apids)):
  1175. values.append((m[i], i))
  1176. # descending sorted moving specials to end
  1177. top_value_idxs = [e[1] for e in sorted(values, key=lambda x: -100 if x[0] > 0 else -x[0])]
  1178. top_value_idxs = set(top_value_idxs[:self.max_aps])
  1179. for i in range(len(self.apids)):
  1180. if not i in top_value_idxs and m[i] < 0:
  1181. m[i] = 2
  1182. log.info('using %s of %s aps' % (self.max_aps, len(self.apids)))
  1183. estimated_path, seq_estimated_path, avg_seq_estimated_path = decoder.decode(measurements_a, self.prune_to, self.num_threads, full=False)
  1184. #~ self.analyzePath(estimated_path, measurements)
  1185. #~ for hist in decoder.history:
  1186. #~ seq_estimated_path = [tuple(hist[-1][:3]) for hist in decoder.history]
  1187. return {'end': estimated_path, 'seq': seq_estimated_path, 'seq_avg': avg_seq_estimated_path}, measurements
  1188. def reset(self):
  1189. self.decoder = None
  1190. self.last_measurement = None
  1191. def evaluateNext(self, measurements):
  1192. if self.decoder is None:
  1193. self.decoder = decoder = speed.ViterbiDecoder(self.blockedCubes, self.emission_probs, self.transition_probs,
  1194. store_pruning=True, verbose=self.verbose, no_signal_delta=self.no_signal_delta)
  1195. continueDecoding = False
  1196. else:
  1197. decoder = self.decoder
  1198. continueDecoding = True
  1199. if self.last_measurement is not None:
  1200. measurements.insert(0, self.last_measurement)
  1201. cd = self.cubewidth * self.env.di.sx
  1202. measurements = measurements.spawn(walking_speed=self.walking_speed, cube_distance=cd)
  1203. measurements = Measurements(measurements[1:])
  1204. self.last_measurement = measurements[-1]
  1205. measurements_a = measurements.asArray(self.apids)
  1206. res = decoder.decode(measurements_a, self.prune_to, self.num_threads, full=False, continueDecoding=continueDecoding)
  1207. return {'end': res[0], 'seq': res[1], 'seq_avg': res[2]}, measurements
  1208. def gauss_nd(means, sq_sigmas, values):
  1209. p = 1
  1210. for mu, sq_sigma, x in zip(means, sq_sigmas, values):
  1211. if x == 2:
  1212. continue
  1213. elif x == 1: # no measurement
  1214. continue
  1215. p = p * math.sqrt(2 * math.pi * sq_sigma) * math.exp(-(x - mu)**2 / (2 * sq_sigma))
  1216. return p
  1217. def debuggable_particle_filter(measurements, emission_probs, num_particles, cubed_data_shape, seed=1234):
  1218. seq_res = []
  1219. sigmas = [9] * len(measurements[0])
  1220. np.random.seed(seed)
  1221. particles = np.dstack(np.random.randint(0, size, num_particles) for size in cubed_data_shape)[0]
  1222. weights = np.zeros((num_particles, ), dtype=np.double)
  1223. SIZE_POOL = 10000
  1224. sampling_pool = np.random.multivariate_normal([0,0,0], [[5,0,0],[0,5,0],[0,0,2]], SIZE_POOL)
  1225. sampling_pool_idx = 0
  1226. for j, xs in enumerate(measurements):
  1227. print 'm: %s of %s' % (j, len(measurements))
  1228. w_max = 0
  1229. w_news = []
  1230. for p in particles:
  1231. if p[0] == -1:
  1232. continue
  1233. mus = emission_probs[p[0], p[1], p[2], 0]
  1234. #~ w_new = w * gauss_nd(mus, sigmas, xs)
  1235. w_new = gauss_nd(mus, sigmas, xs)
  1236. w_news.append(w_new)
  1237. if w_new > w_max:
  1238. w_max = w_new
  1239. p_best = p
  1240. seq_res.append(p_best)
  1241. # renormalize
  1242. sum_w_new = sum(w_news)
  1243. for i, w in enumerate(w_news):
  1244. weights[i] = w / sum_w_new
  1245. new_particles = np.zeros((num_particles, 3))
  1246. total_spawned = 0
  1247. for p, w in zip(particles, weights):
  1248. num_spawn_particles = int(round(num_particles * w, 0))
  1249. # prevent overflow, due to rounding
  1250. if total_spawned + num_spawn_particles > num_particles:
  1251. num_spawn_particles = num_particles - total_spawned
  1252. idx = total_spawned
  1253. num_spawned = 0
  1254. while num_spawned < num_spawn_particles:
  1255. dx, dy, dz = sampling_pool[sampling_pool_idx % SIZE_POOL]
  1256. sampling_pool_idx += 1
  1257. new_x = p[0] + int(round(dx, 0))
  1258. new_y = p[1] + int(round(dy, 0))
  1259. new_z = p[2] + int(round(dz, 0))
  1260. if (0 <= new_x < cubed_data_shape[0]
  1261. and 0 <= new_y < cubed_data_shape[1]
  1262. and 0 <= new_z < cubed_data_shape[2]):
  1263. new_particles[idx, 0] = new_x
  1264. new_particles[idx, 1] = new_y
  1265. new_particles[idx, 2] = new_z
  1266. idx += 1
  1267. num_spawned += 1
  1268. total_spawned += num_spawn_particles
  1269. for i in range(total_spawned, num_particles):
  1270. particles[i][0] = -1
  1271. particles = new_particles
  1272. print seq_res
  1273. return seq_res, seq_res
  1274. def viewPFTransitionsAtPos(gui, pflocalizer):
  1275. t = time.time()
  1276. sampling_pool = pflocalizer.buildSamplingPool()
  1277. pool_size = len(sampling_pool)
  1278. blockedCubes = pflocalizer.blockedCubes
  1279. px = gui.state_x
  1280. py = gui.state_y
  1281. pz = gui.state_z
  1282. s_shape_x, s_shape_y, s_shape_z = pflocalizer.cubed_data_shape
  1283. sample_histogram = np.zeros(pflocalizer.cubed_data_shape)
  1284. local_block_map = np.zeros(pflocalizer.cubed_data_shape)
  1285. lround = lambda x: int(round(x, 0))
  1286. UN_BLOCKED = 0
  1287. OVER_SAMPLE = 5
  1288. idx = 0
  1289. num_spawn_particles = 1000
  1290. num_spawned = 0
  1291. sampling_pool_idx = 0
  1292. for i in range(num_spawn_particles * OVER_SAMPLE): # limit retries
  1293. if num_spawned == num_spawn_particles:
  1294. break
  1295. curr_pool_idx = sampling_pool_idx % pool_size
  1296. dx = sampling_pool[curr_pool_idx, 0]
  1297. dy = sampling_pool[curr_pool_idx, 1]
  1298. dz = sampling_pool[curr_pool_idx, 2]
  1299. sampling_pool_idx += 1
  1300. dxi = lround(dx)
  1301. dyi = lround(dy)
  1302. dzi = lround(dz)
  1303. new_px = px + dxi
  1304. new_py = py + dyi
  1305. new_pz = pz + dzi
  1306. # check if we are still in the allowed space
  1307. if not (0 <= new_px < s_shape_x and 0 <= new_py < s_shape_y and 0 <= new_pz < s_shape_z):
  1308. continue # try again
  1309. if local_block_map[new_px, new_py, new_pz] == 1:
  1310. continue # try again
  1311. # check if destination is blocked
  1312. if blockedCubes[new_px, new_py, new_pz] != UN_BLOCKED:
  1313. continue # try again
  1314. # scan space between p and new_p for BLOCKED cubes
  1315. # and reject sample if True
  1316. distance = math.sqrt(dxi * dxi + dyi * dyi + dzi * dzi) # euclidian
  1317. if distance == 0:
  1318. continue
  1319. step_x = dxi / distance
  1320. step_y = dyi / distance
  1321. step_z = dzi / distance
  1322. curr_x = px
  1323. curr_y = py
  1324. curr_z = pz
  1325. curr_x_i = int(curr_x); curr_y_i = int(curr_y); curr_z_i = int(curr_z)
  1326. blocked_path = 0
  1327. for ii in range(int(distance)):
  1328. #~ while not(curr_x_i == new_px or curr_y_i == new_py or curr_z_i == new_pz):
  1329. curr_x = curr_x + step_x
  1330. curr_y = curr_y + step_y
  1331. curr_z = curr_z + step_z
  1332. curr_x_i = int(curr_x); curr_y_i = int(curr_y); curr_z_i = int(curr_z)
  1333. #~ print curr_x, curr_y, curr_z, ':', dx, dy, dz, ':', px, py, pz
  1334. if 0 <= curr_x_i < s_shape_x and 0 <= curr_y_i < s_shape_y and 0 <= curr_z_i < s_shape_z:
  1335. if blockedCubes[curr_x_i, curr_y_i, curr_z_i] != UN_BLOCKED:
  1336. blocked_path = 1
  1337. local_block_map[curr_x_i, curr_y_i, curr_z_i] = 1
  1338. break
  1339. if blocked_path == 1:
  1340. # mark all cubes after the blocked hit also also blocked
  1341. for ii in range(ii, int(distance)):
  1342. curr_x = curr_x + step_x
  1343. curr_y = curr_y + step_y
  1344. curr_z = curr_z + step_z
  1345. curr_x_i = int(curr_x); curr_y_i = int(curr_y); curr_z_i = int(curr_z)
  1346. if 0 <= curr_x_i < s_shape_x and 0 <= curr_y_i < s_shape_y and 0 <= curr_z_i < s_shape_z:
  1347. local_block_map[curr_x_i, curr_y_i, curr_z_i] = 1
  1348. #~ print 'blocked..., resample'
  1349. continue # try again
  1350. log.info('got %s' % num_spawned)
  1351. # ok, we got a valid sample
  1352. #~ new_particles[idx, 0] = new_px
  1353. #~ new_particles[idx, 1] = new_py
  1354. #~ new_particles[idx, 2] = new_pz
  1355. sample_histogram[new_px, new_py, new_pz] += 1
  1356. idx += 1
  1357. num_spawned += 1
  1358. if i == num_spawn_particles * OVER_SAMPLE - 1:
  1359. log.info('OVER_SAMPLE limit reached')
  1360. log.info('needed %s samplings in %.3f secs' % (sampling_pool_idx, time.time() - t))
  1361. log.info('uncube...')
  1362. data = speed.uncube(sample_histogram, pflocalizer.uncubed_data_shape)
  1363. def doStuff():
  1364. log.info('visualize...')
  1365. gui.scalar_field.mlab_source.scalars = data
  1366. #~ gui.surface_number_of_contours = 10
  1367. #~ gui.surface_view.visible = True
  1368. GUI.invoke_later(doStuff)
  1369. class PFLocalizer(Localizer):
  1370. def __init__(self, env, cubewidth, with_history=False, num_particles=20000, transition_sigmas=(4.0, 4.0, 2.0),
  1371. avg_over_particles=50, do_blocking=True, smooth=1.0, emission_sigma=5.0,
  1372. turnoff_blocking=1e3, replace_below=0, max_replace=100, **kwargs):
  1373. super(PFLocalizer, self).__init__(env, **kwargs)
  1374. self.num_particles = num_particles
  1375. self.cubewidth = cubewidth
  1376. self._setupStates()
  1377. self.with_history = with_history
  1378. self.transition_sigmas = transition_sigmas
  1379. self.do_blocking = do_blocking
  1380. self.smooth = smooth
  1381. self.emission_sigma = emission_sigma
  1382. self.avg_over_particles = avg_over_particles
  1383. self.turnoff_blocking = turnoff_blocking
  1384. self.replace_below = replace_below
  1385. self.max_replace = max_replace
  1386. def buildSamplingPool(self):
  1387. cube_distance = self.cubewidth * self.env.di.sx
  1388. # sigma in real space (meter)
  1389. sigma_x, sigma_y, sigma_z = self.transition_sigmas
  1390. #adapt sigma to cube space
  1391. sigma_x_cube = sigma_x / cube_distance
  1392. sigma_y_cube = sigma_y / cube_distance
  1393. sigma_z_cube = sigma_z / cube_distance
  1394. return np.random.multivariate_normal([0,0,0], [[sigma_x_cube,0,0],[0,sigma_y_cube,0],[0,0,sigma_z_cube]], 20000)
  1395. def params(self):
  1396. vals = (self.cubewidth, self.num_particles, self.transition_sigmas, self.smooth,
  1397. self.do_blocking, self.emission_sigma, self.avg_over_particles,
  1398. self.turnoff_blocking, self.replace_below, self.max_replace)
  1399. return 'cubewidth:%s num_particles:%s transition_sigmas:%s smooth:%s do_blocking:%s emission_sigma:%s avg_over_particles:%s turnoff_blocking:%s replace_below:%s max_replace:%s' % vals
  1400. def localizeGui(self, start, end):
  1401. #~ return
  1402. measurements = Measurements()
  1403. #~ self.do_blocking = False
  1404. measurements.loadFromString(self.gui.synthetic_measurements)
  1405. measurements = Measurements(measurements[start: end])
  1406. res, measurements = self.evaluateMeasurements(measurements)
  1407. estimated_path, seq_estimated_path, seq_avg_estimated_path = res['end'], res['seq'], res['seq_avg']
  1408. if res['seq_avg'] is not None:
  1409. seq_avg_estimated_path = res['seq_avg']
  1410. else:
  1411. seq_avg_estimated_path = [(0,0,0)] * len(estimated_path)
  1412. print estimated_path[-3:]
  1413. real_path, est_path, seq_path, seq_avg_path = self.toRealWordPaths(
  1414. measurements, estimated_path, seq_estimated_path, seq_avg_estimated_path)
  1415. errors = errorsFromLists(real_path, est_path)
  1416. err2d = errorsFromLists(real_path, est_path, mode='2d')
  1417. assert len(errors) == len(real_path)
  1418. log.info('avg_err_2d: %.2fm avg_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1419. self.analyzeEstimatedPath(estimated_path, cube_values=None)
  1420. errors = errorsFromLists(real_path, seq_path)
  1421. err2d = errorsFromLists(real_path, seq_path, mode='2d')
  1422. log.info('avg_seq_err_2d: %.2fm avg_seq_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1423. if res['seq_avg'] is not None:
  1424. errors = errorsFromLists(real_path, seq_avg_path)
  1425. err2d = errorsFromLists(real_path, seq_avg_path, mode='2d')
  1426. log.info('avg_seq_avg_err_2d: %.2fm avg_seq_avg_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1427. def evaluateMeasurements(self, measurements, interpolate=False):
  1428. if interpolate:
  1429. measurements.interpolateSignals()
  1430. num_non_spawned = len(measurements)
  1431. #~ measurements = measurements.spawn(cube_distance=self.cubewidth * self.env.di.sx)
  1432. #~ print measurements
  1433. measurements_a = measurements.asArray(self.apids)
  1434. # this has to be moved
  1435. np.random.seed(1234)
  1436. sampling_pool = self.buildSamplingPool()
  1437. self.turnoff_blocking = True
  1438. if self.verbose:
  1439. log.info('measurements: %s/%s, num_particles: %s, sigmas: %s, blocking: %s' % (len(measurements_a), num_non_spawned, self.num_particles, self.transition_sigmas, not self.turnoff_blocking))
  1440. self.decoder = pf = speed.ParticleFilter(self.blockedCubes, self.emission_probs,
  1441. sampling_pool, verbose=self.verbose, with_history=self.with_history)
  1442. t = time.time()
  1443. (estimated_path,
  1444. seq_estimated_path,
  1445. seq_avg_estimated_path) = pf.decode(measurements_a,
  1446. self.num_particles,
  1447. do_blocking=self.do_blocking,
  1448. average_over_particles=self.avg_over_particles,
  1449. smooth=self.smooth,
  1450. replace_below=self.replace_below,
  1451. max_replace=self.max_replace,
  1452. sigma=self.emission_sigma,
  1453. turnoff_blocking=self.turnoff_blocking)
  1454. #~ for p1, p2 in zip(seq_estimated_path, seq_avg_estimated_path):
  1455. #~ print p1, ':', p2
  1456. #~ print seq_res
  1457. #~ assert seq_res[4][1] == 32
  1458. #~ print time.time() - t
  1459. #~ import os; os._exit(0)
  1460. #~ seq_res, seq_res = debuggable_particle_filter(measurements, self.emission_probs, self.num_particles, self.cubed_data_shape)
  1461. return {'end': estimated_path, 'seq': seq_estimated_path, 'seq_avg': seq_avg_estimated_path}, measurements
  1462. class LMSELocalizer(Localizer):
  1463. def __init__(self, env, cubewidth, with_history=False, **kwargs):
  1464. super(LMSELocalizer, self).__init__(env, **kwargs)
  1465. self.with_history = with_history
  1466. self.cubewidth = cubewidth
  1467. self._setupStates()
  1468. def _setupStates(self):
  1469. vi_dimensions = self.env.aps.values()[0].vi.di
  1470. self.uncubed_data_shape = (vi_dimensions.x, vi_dimensions.y, vi_dimensions.z)
  1471. t = time.time()
  1472. #~ self.emission_probs = np.load('r:/eprobs.npy')
  1473. self.emission_probs = speed.buildStatesWithEmissionProbs(self.uncubed_data_shape, self.apdata_list, self.cubewidth, 1, pooled_variance=2)
  1474. #~ np.save('r:/eprobs.npy', self.emission_probs)
  1475. self.cubed_data_shape = self.emission_probs.shape[:3]
  1476. log.debug('build emission probs in %.3f sec' % (time.time() - t))
  1477. #~ self.blockedCubes = blockedCubes = self._buildTriangleCubeIntersection()
  1478. #~ speed.evalUnreachableAirCubes(blockedCubes, cube_height=self.env.di.sz * self.cubewidth, max_reachable_height=2.0)
  1479. def evaluateMeasurements(self, measurements, interpolate=False):
  1480. if interpolate:
  1481. measurements.interpolateSignals()
  1482. measurements_a = measurements.asArray(self.apids)
  1483. if self.verbose:
  1484. log.info('measurements: %s, interpolate: %s' % (len(measurements_a), interpolate))
  1485. self.decoder = lmse = speed.LMSE(self.emission_probs, with_history=self.with_history, verbose=self.verbose)
  1486. seq_estimated_path = lmse.decode(measurements_a)
  1487. return {'seq': seq_estimated_path}, measurements
  1488. def evaluateNext(self, measurements):
  1489. return self.evaluateMeasurements(measurements, interpolate=False)
  1490. def localizeGui(self, start, end):
  1491. measurements = Measurements()
  1492. measurements.loadFromString(self.gui.synthetic_measurements)
  1493. measurements = Measurements(measurements[start: end])
  1494. res, measurements = self.evaluateMeasurements(measurements)
  1495. seq_estimated_path = res['seq']
  1496. print seq_estimated_path[-3:]
  1497. seq_avg_estimated_path = seq_estimated_path
  1498. ## the estimated
  1499. real_path, seq_path, seq_avg_path = self.toRealWordPaths(
  1500. measurements, seq_estimated_path, seq_avg_estimated_path)
  1501. errors = errorsFromLists(real_path, seq_path)
  1502. err2d = errorsFromLists(real_path, seq_path, mode='2d')
  1503. log.info('avg_seq_err_2d: %.2fm avg_seq_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1504. print seq_estimated_path
  1505. self.analyzeEstimatedPath(seq_estimated_path, cube_values=None)
  1506. #~ if res['seq_avg'] is not None:
  1507. #~ errors = errorsFromLists(real_path, seq_avg_path)
  1508. #~ err2d = errorsFromLists(real_path, seq_avg_path, mode='2d')
  1509. #~ log.info('avg_seq_avg_err_2d: %.2fm avg_seq_avg_err_3d: %.2fm' % (sum(err2d) / len(err2d), sum(errors) / len(errors)))
  1510. def localizeGui(gui, _log, algo, start, end):
  1511. try:
  1512. global log; log = _log
  1513. env = Environment(gui.objfile, aps=getAPsFromGui(gui), vi_path=gui.apdatafiles, tmpdir=gui.tmpdir)
  1514. if algo == 'hmm':
  1515. localizer = HMMLocalizer(env, gui=gui, cubewidth=gui.cube_width, prune_to=gui.prune_to,
  1516. num_threads=gui.threads, freespace_scan=gui.freespace_scan)
  1517. elif algo == 'pf':
  1518. localizer = PFLocalizer(env, gui=gui, cubewidth=gui.cube_width, with_history=True)
  1519. elif algo == 'lmse':
  1520. localizer = LMSELocalizer(env, gui=gui, cubewidth=gui.cube_width, with_history=True)
  1521. localizer.localizeGui(start, end)
  1522. #~ localizer = StreamingHMMLocalizer(env, gui=gui, cubewidth=gui.cube_width, prune_to=gui.prune_to, num_threads=gui.threads)
  1523. #~ localizer.test()
  1524. except Exception:
  1525. log.exception('error happened')
  1526. def viewBlockedCubes(gui, _log):
  1527. try:
  1528. global log
  1529. log = _log
  1530. print '---'
  1531. env = Environment(gui.objfile, tmpdir=gui.tmpdir, aps=getAPsFromGui(gui), vi_path=gui.apdatafiles)
  1532. log.info('setup localizer...')
  1533. localizer = HMMLocalizer(env, gui=gui, cubewidth=gui.cube_width, prune_to=gui.prune_to, num_threads=gui.threads)
  1534. log.info('uncube...')
  1535. data = speed.uncube(localizer.blockedCubes.astype(np.double), localizer.uncubed_data_shape)
  1536. # aggregate all transition probs
  1537. #~ combined_transition_probs[:, :, :10] = 0
  1538. #~ combined_transition_probs[:, :, 11:] = 0
  1539. #~ data = speed.uncube(combined_transition_probs, self.uncubed_data_shape)
  1540. #~ print blockedCubes.shape
  1541. def doStuff():
  1542. log.info('visualize...')
  1543. gui.surface_view.visible = False
  1544. updateScalarField(env, gui, data)
  1545. GUI.invoke_later(doStuff)
  1546. except Exception:
  1547. log.exception('error happened')
  1548. def getAPsFromGui(gui, assertLoadable=True):
  1549. if not hasattr(gui, '_aps'):
  1550. gui._aps = {}
  1551. #~ gui._aps.clear()
  1552. device = gui.device_name
  1553. optrun = gui.optrun_session
  1554. # cache ap at gui - since aps cache their normalization values
  1555. # we build one for each device
  1556. for ap in gui.aps:
  1557. if not ap.used:
  1558. gui._aps.pop((optrun, device, ap.id), None)
  1559. else:
  1560. if (optrun, device, ap.id) in gui._aps:
  1561. continue
  1562. _ap = AccessPoint(name=ap.id, x=ap.x, y=ap.y, z=ap.z, volumeImagePath=gui.apdatafiles, davalues=gui.davalues.get(device, []))
  1563. if assertLoadable:
  1564. assert _ap.vi is not None
  1565. gui._aps[(optrun, device, ap.id)] = _ap
  1566. return [ap for (o, d, apid), ap in gui._aps.items() if d == device and o == optrun]
  1567. def getCubesFromPath(gui, env):
  1568. cubes = []
  1569. for p1, p2 in zip(gui.localization_path, gui.localization_path[1:]):
  1570. dx, dy, dz = p2.x - p1.x, p2.y - p1.y, p2.z - p1.z
  1571. dist = (dx**2 + dy**2 + dz**2)**0.5
  1572. one_step = gui.cube_width * gui.res_dx * 0.5
  1573. num_steps = int(dist / one_step)
  1574. if num_steps == 0:
  1575. continue
  1576. x_step, y_step, z_step = dx / num_steps, dy / num_steps, dz / num_steps
  1577. for i in range(0, num_steps):
  1578. rx, ry, rz = p1.x + i * x_step , p1.y + i * y_step, p1.z + i * z_step
  1579. x, y, z = env.translateToVoxel(rx, ry, rz, gui.cube_width)
  1580. #~ print i, x, y, z, '->', x, y, z
  1581. if x < gui.cube_res_x and y < gui.cube_res_y and z < gui.cube_res_z:
  1582. if not cubes or not(cubes[-1][0] == x and cubes[-1][1] == y and cubes[-1][2] == z):
  1583. cubes.append((x, y, z))
  1584. #~ print 123, len(cubes), env.di
  1585. return np.array(cubes)
  1586. def buildPathCubes(gui, env=None):
  1587. tx, ty, tz = gui.bbox.x1, gui.bbox.y1, gui.bbox.z1
  1588. dx, dy, dz = gui.res_dx, gui.res_dy, gui.res_dz
  1589. if env is None:
  1590. env = Environment(di=Dimensions(tx, ty, tz, dx, dy, dz))
  1591. data = np.zeros((gui.cube_res_x, gui.cube_res_y, gui.cube_res_z))
  1592. cubes = getCubesFromPath(gui, env)
  1593. #~ print '345', len(cubes)
  1594. for i, (x, y, z) in enumerate(cubes, max(1, int(len(cubes) * 1.0 / gui.surface_number_of_contours))):
  1595. data[x, y, z] = i
  1596. uncubeddata = speed.uncube(data, (gui.res_x, gui.res_y, gui.res_z))
  1597. gui.scalar_field.mlab_source.scalars = uncubeddata
  1598. return data
  1599. def localizeByDistance(env):
  1600. coords = [(31.3, 11.0, 1.2),
  1601. (38.0, 11.0, 1.2),
  1602. (37.8, 5.5, 1.2),
  1603. (50.1, 5.5, 1.2),
  1604. (50.0, 11.0, 1.2),
  1605. (42.0, 12.0, 1.2)]
  1606. #~ env.extractWalkableArea()
  1607. with_walkable = False
  1608. NOISE = 3.0
  1609. #~ measurement = env.generateSyntheticMeasurementsFromCorners(coords, noise=NOISE)
  1610. #~ measurement.save('r:/thepath.txt')
  1611. measurement = Measurements()
  1612. measurement.load(r'N:\lws\tmp\path_iconia_0.txt')
  1613. #~ measurement.load('r:/thepath.txt')
  1614. #~ asd
  1615. #~ measurement.plot()
  1616. #~ env.replaceAPs(VI_PATH_EMPTY)
  1617. l = Localizer(env)
  1618. estimated_path = []
  1619. real_path = []
  1620. for i, m in enumerate(measurement):
  1621. x, y, z = l.localize(m, with_walkable=with_walkable)
  1622. estimated_path.append((x, y, z))
  1623. real_path.append(m.pos)
  1624. print '--------------'
  1625. print i
  1626. print '--------------'
  1627. print x, y, z
  1628. #~ print 'wa:', env.walkable_area[env.translateToVoxel(x, y, z)]
  1629. if len(estimated_path) > 1:
  1630. zs = [p[2] for p in estimated_path]
  1631. avg_z = sum(zs) / len(zs) + 0.5
  1632. cutfile = env.getCutFile(avg_z)
  1633. img = Image.open(cutfile)
  1634. img = img.convert('RGBA')
  1635. draw = ImageDraw.Draw(img)
  1636. for (x1, y1, z1), (x2, y2, z2) in zip(estimated_path, estimated_path[1:]):
  1637. m2dx1, m2dy1 = env.translateMeterToMesh2dPixel(x1, y1)
  1638. m2dx2, m2dy2 = env.translateMeterToMesh2dPixel(x2, y2)
  1639. draw.line((m2dx1, m2dy1, m2dx2, m2dy2), fill='#FF0000')
  1640. #~ for (x1, y1, z1), (x2, y2, z2) in zip(estimated_path, real_path):
  1641. #~ m2dx1, m2dy1 = env.translateMeterToMesh2dPixel(x1, y1)
  1642. #~ m2dx2, m2dy2 = env.translateMeterToMesh2dPixel(x2, y2)
  1643. #~ draw.line((m2dx1, m2dy1, m2dx2, m2dy2), fill='#FF0000')
  1644. #~ # draw.text((m2dx2-20, m2dy2), '%.4f' % wa_modifier)
  1645. #~ for (x1, y1, z1), (x2, y2, z2) in zip(real_path, real_path[1:]):
  1646. #~ m2dx1, m2dy1 = env.translateMeterToMesh2dPixel(x1, y1)
  1647. #~ m2dx2, m2dy2 = env.translateMeterToMesh2dPixel(x2, y2)
  1648. #~ draw.line((m2dx1, m2dy1, m2dx2, m2dy2), fill='#00FF00')
  1649. #~ for name, ap in env.aps.items():
  1650. #~ m2dx, m2dy = env.translateMeterToMesh2dPixel(ap.x, ap.y)
  1651. #~ draw.ellipse((m2dx-2, m2dy-2, m2dx+2, m2dy+2), fill='#0000FF')
  1652. #~ draw.text((m2dx-10, m2dy-15), name, fill='#0000FF')
  1653. used_aps = ','.join(env.aps.keys())
  1654. draw.text((10, 10), 'z: %.1f aps: %s, noise: %s db' % (avg_z, used_aps, NOISE), fill='#CC1111')
  1655. #~ env.walkable_area[:,:,avg]
  1656. img.save('r:/out_%s_%s.png' % (NOISE, with_walkable))
  1657. errors = errorsFromLists(estimated_path, real_path)
  1658. open('r:\errors.txt', 'w').write('\n'.join(str(e) for e in errors))
  1659. def testPruningList():
  1660. t = time.time()
  1661. speed.testFastPruningInsertinsert(100000, 1800000)
  1662. print 'total:', time.time() - t
  1663. def viewSignalGradients(gui, _log):
  1664. global log; log = _log
  1665. aps = getAPsFromGui(gui)
  1666. uncubed_data_shape = (gui.resolution.x, gui.resolution.y, gui.resolution.z)
  1667. to_be_summed = []
  1668. for ap in aps:
  1669. if not ap.name in ['107']:
  1670. continue
  1671. CUT_OFF = -80
  1672. log.info('applying sobel on %s [cutoff: %s]' % (ap.name, CUT_OFF))
  1673. data = np.array(ap.dbdata)
  1674. sig_too_low = np.where(data < CUT_OFF)
  1675. data[sig_too_low] = CUT_OFF
  1676. to_be_summed.append(np.abs(ndimage.filters.sobel(data * -1)))
  1677. #~ ap_count = np.zeros(shape=ap.dbdata.shape, dtype=np.double)
  1678. #~ ap_count[np.where(ap.dbdata > -100)] = 1
  1679. #~ to_be_summed.append(ap_count)
  1680. log.info('reduce...')
  1681. data = np.add.reduce(np.array(to_be_summed))
  1682. log.info('clearing gradients in blocked cubes')
  1683. CUBE_WIDTH = 1
  1684. CACHE_FILE = gui.tmpdir.joinpath('blocked_cubes_%s.npy' % CUBE_WIDTH)
  1685. blocked_cubes = np.load(CACHE_FILE)
  1686. #~ blocked_cubes = speed.uncube(np.load(CACHE_FILE).astype(np.double), uncubed_data_shape)
  1687. #~ blocked_cubes = ndimage.maximum_filter(blocked_cubes, size=3)
  1688. #~ blocked_cubes = ndimage.maximum_filter1d(blocked_cubes, size=3, axis=0)
  1689. #~ blocked_cubes = ndimage.maximum_filter1d(blocked_cubes, size=3, axis=1)
  1690. blocked_cubes = ndimage.maximum_filter1d(blocked_cubes, size=7, axis=2)
  1691. blocked_idx = np.where(blocked_cubes > 0) # 0 == UN_BLOCKED
  1692. data[blocked_idx] = 0
  1693. #~ data = np.add.reduce(np.array(to_be_summed)) / len(to_be_summed)
  1694. if data.shape != uncubed_data_shape:
  1695. log.info('uncube...')
  1696. data = speed.uncube(data, uncubed_data_shape)
  1697. def doStuff():
  1698. log.info('visualize...')
  1699. gui.scalar_field.mlab_source.scalars = data
  1700. #~ gui.surface_number_of_contours = 10
  1701. #~ gui.surface_view.visible = True
  1702. GUI.invoke_later(doStuff)
  1703. def fullPruniningAnimation(localizer, step=1, count=1000, delay=0):
  1704. def _run():
  1705. time.sleep(delay)
  1706. for i in range(0, len(localizer.decoder.history), step):
  1707. localizer.viewHistory(i, count, store=True)
  1708. time.sleep(0.1)
  1709. daemonthread(target=_run)