#################################################################################################### # # Invoking X3D model self-test: # # $ python AnimatedViewpointRecorderPrototype.py # # Python package x3d.py package is available on PyPI for import. # This approach simplifies Python X3D deployment and use. # https://pypi.org/project/x3d # # Installation: # pip install x3d # or # python -m pip install x3d # # Developer options for loading x3d package in other Python programs: # # from x3d import * # preferred approach, terser source that avoids x3d.* class prefixes # # or # import x3d # traditional way to subclass x3d package, all classes require x3d.* prefix, # # but python source is very verbose, for example x3d.Material x3d.Shape etc. # # X3dToPython.xslt stylesheet insertPackagePrefix=true supports this option. # #################################################################################################### from x3d import * newModel=X3D(profile='Immersive',version='3.0', head=head( children=[ meta(content='AnimatedViewpointRecorderPrototype.x3d',name='title'), meta(content='Record camera position and orientation as user navigates, then filter values and produce output, both into the console output window and as a replayable node group. Future work: further filtering.',name='description'), meta(content='Don Brutzman, Ken Curtin, Duane Davis, Christos Kalogrias',name='creator'), meta(content='24 October 2003',name='created'), meta(content='12 October 2023',name='modified'), meta(content='AnimatedViewpointRecorderExample.x3d',name='reference'), meta(content='AnimatedViewpointRecorderSample.x3d',name='reference'), meta(content='http://www.realism.com/Web3D/Examples#WhereAmI',name='reference'), meta(content='http://www.realism.com/vrml/Example/WhereAmI/WhereAmI_Proto.wrl',name='reference'), meta(content='recording animated viewpoint tour',name='subject'), meta(content='https://savage.nps.edu/Savage/Tools/Authoring/AnimatedViewpointRecorderPrototype.x3d',name='identifier'), meta(content='X3D-Edit 4.0, https://savage.nps.edu/X3D-Edit',name='generator'), meta(content='../../license.html',name='license')]), Scene=Scene( # ==================== children=[ WorldInfo(title='AnimatedViewpointRecorderPrototype.x3d'), ProtoDeclare(appinfo='AnimatedViewpointRecorder captures view position and orientation tour to create a guided tour animation. The recording output goes to the browser console where the .x3d (or .x3dv) output can be cut/pasted for further use.',name='AnimatedViewpointRecorder', ProtoInterface=ProtoInterface( field=[ field(accessType='inputOnly',appinfo='Set start=true to commence recording viewpoint position/orientation.',name='start',type='SFBool'), field(accessType='inputOnly',appinfo='Set stop=true to finish recording viewpoint position/orientation. Resulting VRML is added to scene resulting X3D and VRML is output to console.',name='stop',type='SFBool'), field(accessType='initializeOnly',appinfo='default 0.1 seconds',name='samplingInterval',type='SFTime',value=0.1), field(accessType='initializeOnly',appinfo='whether to output .x3d syntax on browser console',name='outputX3D',type='SFBool',value=True), field(accessType='initializeOnly',appinfo='whether to output .x3d syntax on browser console',name='outputClassicVRML',type='SFBool',value=False), field(accessType='initializeOnly',appinfo='TODO not yet implemented',name='filterDeadTime',type='SFBool',value=False)]), ProtoBody=ProtoBody( children=[ Group( children=[ Group(DEF='NewViewpointGroup'), # it's a big old world out there! ProximitySensor(DEF='WhereSensor',size=(1000000000,1000000000,1000000000)), Script(DEF='RecordingScript',directOutput=True, field=[ field(accessType='inputOnly',name='start',type='SFBool'), field(accessType='inputOnly',name='stop',type='SFBool'), field(accessType='initializeOnly',appinfo='seconds',name='samplingInterval',type='SFTime'), field(accessType='initializeOnly',appinfo='whether to output .x3d syntax on browser console',name='outputX3D',type='SFBool'), field(accessType='initializeOnly',appinfo='whether to output .x3d syntax on browser console',name='outputClassicVRML',type='SFBool'), field(accessType='outputOnly',appinfo='persistent state variable',name='recordingInProgress',type='SFBool'), field(accessType='inputOnly',name='set_position',type='SFVec3f'), field(accessType='inputOnly',name='set_orientation',type='SFRotation'), field(accessType='initializeOnly',name='positionArray',type='MFVec3f'), field(accessType='initializeOnly',name='positionTimeArray',type='MFTime'), field(accessType='initializeOnly',name='orientationArray',type='MFRotation'), field(accessType='initializeOnly',name='orientationTimeArray',type='MFTime'), field(accessType='initializeOnly',appinfo='not yet implemented',name='filterDeadTime',type='SFBool'), field(accessType='initializeOnly',name='newViewpointGroup',type='SFNode', children=[ Group(USE='NewViewpointGroup')]), field(accessType='initializeOnly',appinfo='persistent holding variable',name='numberOfToursCreated',type='SFInt32',value=0), field(accessType='initializeOnly',appinfo='persistent holding variable',name='precedingPosition',type='SFVec3f',value=(0,0,0)), field(accessType='initializeOnly',appinfo='persistent holding variable',name='precedingOrientation',type='SFRotation',value=(0,1,0,0)), field(accessType='initializeOnly',appinfo='persistent holding variable',name='precedingPositionSampleTime',type='SFTime',value=0), field(accessType='initializeOnly',appinfo='persistent holding variable',name='precedingOrientationSampleTime',type='SFTime',value=0), field(accessType='initializeOnly',appinfo='normalization factor',name='r',type='SFFloat',value=1), field(accessType='initializeOnly',appinfo='track output of ProximitySensor',name='positionEventsReceived',type='SFBool',value=False), field(accessType='initializeOnly',appinfo='track output of ProximitySensor',name='orientationEventsReceived',type='SFBool',value=False)], IS=IS( connect=[ connect(nodeField='start',protoField='start'), connect(nodeField='stop',protoField='stop'), connect(nodeField='samplingInterval',protoField='samplingInterval'), connect(nodeField='outputX3D',protoField='outputX3D'), connect(nodeField='outputClassicVRML',protoField='outputClassicVRML'), connect(nodeField='filterDeadTime',protoField='filterDeadTime')]), sourceCode=""" ecmascript: function initialize() { positionArray = new MFVec3f (); orientationArray = new MFRotation (); positionTimeArray = new MFTime (); orientationTimeArray = new MFTime (); positionEventsReceived = false; orientationEventsReceived = false; } function roundoff (value, digits) { resolution = 1; for (i = 1; i <= digits; i++ ) { resolution *= 10; } return Math.round (value*resolution) / resolution; // round to resolution } function filterPositions() { // TODO } function filterOrientations() { // TODO } function set_position (eventValue, timestamp) { // Browser.println ('position=' + eventValue); // we are counting on an initialization eventValue being sent by ProximitySensor positionEventsReceived = true; if ( positionArray.length == 0 ) { positionArray[0] = eventValue; // initialize positionTimeArray[0] = timestamp; // initialize } precedingPositionSampleTime = positionTimeArray[ positionArray.length - 1 ]; // seconds duration since last valid sample if ( (timestamp - precedingPositionSampleTime) > samplingInterval ) { // append values to each array positionArray[positionArray.length] = eventValue; positionTimeArray[positionTimeArray.length] = timestamp; } precedingPosition = eventValue; } function set_orientation (eventValue, timestamp) { // we are counting on an initialization eventValue being sent by ProximitySensor orientationEventsReceived = true; if ( orientationArray.length == 0 ) { r = Math.sqrt (eventValue.x*eventValue.x + eventValue.y*eventValue.y + eventValue.z*eventValue.z); // Browser.println ('orientation=' + eventValue.toString() + ', r=' + r); // trace if (r != 0) { eventValue.x = eventValue.x / r; eventValue.y = eventValue.y / r; eventValue.z = eventValue.z / r; } orientationArray[0] = eventValue; // initialize orientationTimeArray[0] = timestamp; // initialize } precedingOrientationSampleTime = orientationTimeArray[ orientationTimeArray.length - 1 ]; // append sample values to each array if ( (timestamp - precedingOrientationSampleTime) > samplingInterval ) { orientationTimeArray[orientationTimeArray.length] = timestamp; // normalize SFRotation axis if needed r = Math.sqrt (eventValue.x*eventValue.x + eventValue.y*eventValue.y + eventValue.z*eventValue.z); // Browser.println ('orientation=' + eventValue.toString() + ', r=' + r); // trace if (r != 0) { eventValue.x = eventValue.x / r; eventValue.y = eventValue.y / r; eventValue.z = eventValue.z / r; // auto append to array, no need to allocate orientationArray[orientationArray.length] = eventValue; } else // illegal zero-magnitude axis returned by browser, so just use previous rotation { // auto append to array, no need to allocate orientationArray[orientationArray.length] = precedingOrientation; } } precedingOrientation = eventValue; } function start (eventValue, timestamp) { if (eventValue == false) return; // only accept start if eventValue == true if (recordingInProgress == true) return; // ignore repeated starts while already running recordingInProgress = true; // arrays need to be reinitialized from previous run initialize(); Browser.println (' '); } function stop (eventValue, timestamp) { if (eventValue == false) return; // only accept stop if eventValue == true if (recordingInProgress == false) { Browser.println (' '); return; } // ensure legal array lengths in case some events were never sent due to not moving if (positionEventsReceived == false) { Browser.println (''); return; } if (orientationEventsReceived == false) { Browser.println (''); return; } recordingInProgress = false; // preceding last values were at last sampleInterval (either set_position or set_orientation) // add one more to each array since they are not sent values by sensor when not changing positionArray[ positionArray.length] = precedingPosition; orientationArray[orientationArray.length] = precedingOrientation; positionTimeArray[ positionTimeArray.length] = timestamp; orientationTimeArray[orientationTimeArray.length] = timestamp; if (positionArray.length != positionTimeArray.length) { Browser.println (''); } if (orientationArray.length != orientationTimeArray.length) { Browser.println (''); } filterPositions(); filterOrientations(); // iff events are sent simultaneously, could use either array with start/stop times synchronized // however that might be a bad assumption... so reset start times to match if (positionTimeArray[0] > orientationTimeArray[0]) positionTimeArray[0] = orientationTimeArray[0]; if (positionTimeArray[0] < orientationTimeArray[0]) orientationTimeArray[0] = positionTimeArray[0]; startTime = positionTimeArray[0]; stopTime = positionTimeArray[positionTimeArray.length-1]; interval = stopTime - startTime; x3dString = ' ' + ' \n' + ' \n' + ' ' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n' + ' \n'; if (outputX3D) Browser.println (x3dString); vrmlString = '# ********** start recorded Animated Tour ' + numberOfToursCreated + ' using .x3dv syntax ********** \n' + 'Group {\n' + ' children [\n' + ' DEF AnimatedViewpointRecorderViewpoint' + numberOfToursCreated + ' Viewpoint {\n' + ' description \"Animated Tour ' + numberOfToursCreated + '\"\n' + ' orientation ' + orientationArray[0].x + ' ' + orientationArray[0].y + ' ' + orientationArray[0].z + ' ' + orientationArray[0].angle + '\n' + ' position ' + positionArray[0].x + ' ' + positionArray[0].y + ' ' + positionArray[0].z + '\n' + ' }\n' + ' DEF AnimatedViewpointRecorderTimer' + numberOfToursCreated + ' TimeSensor {\n' + ' cycleInterval ' + interval + '\n' + ' loop TRUE\n' + ' }\n' + ' DEF AnimatedViewpointRecorderPosition' + numberOfToursCreated + ' PositionInterpolator {\n' + ' key [\n'; for (counter = 0; counter < positionTimeArray.length; counter++) { vrmlString = vrmlString + roundoff(((positionTimeArray[counter] - positionTimeArray[0]) / interval),5) + ' \n'; } vrmlString = vrmlString + ' ]\n' + ' keyValue [\n'; for (counter = 0; counter < positionArray.length; counter++) { vrmlString = vrmlString + positionArray[counter].x + ' ' + positionArray[counter].y + ' ' + positionArray[counter].z + ', \n'; } vrmlString = vrmlString + ' ]\n' + ' }\n' + ' DEF AnimatedViewpointRecorderOrientation' + numberOfToursCreated + ' OrientationInterpolator {\n' + ' key [\n'; for (counter = 0; counter < orientationTimeArray.length; counter++) { vrmlString = vrmlString + roundoff(((orientationTimeArray[counter] - orientationTimeArray[0]) / interval),5) + ' \n'; } vrmlString = vrmlString + ' ]\n' + ' keyValue [\n'; for (counter = 0; counter < orientationArray.length; counter++) { vrmlString = vrmlString + orientationArray[counter].x + ' ' + orientationArray[counter].y + ' ' + orientationArray[counter].z + ' ' + orientationArray[counter].angle + ', \n'; } vrmlString = vrmlString + ' ]\n' + ' }\n' + ' Group {\n' + ' ROUTE AnimatedViewpointRecorderViewpoint' + numberOfToursCreated + '.bindTime TO AnimatedViewpointRecorderTimer' + numberOfToursCreated + '.startTime\n' + ' ROUTE AnimatedViewpointRecorderTimer' + numberOfToursCreated + '.fraction_changed TO AnimatedViewpointRecorderPosition' + numberOfToursCreated + '.set_fraction\n' + ' ROUTE AnimatedViewpointRecorderTimer' + numberOfToursCreated + '.fraction_changed TO AnimatedViewpointRecorderOrientation' + numberOfToursCreated + '.set_fraction\n' + ' ROUTE AnimatedViewpointRecorderPosition' + numberOfToursCreated + '.value_changed TO AnimatedViewpointRecorderViewpoint' + numberOfToursCreated + '.position\n' + ' ROUTE AnimatedViewpointRecorderOrientation' + numberOfToursCreated + '.value_changed TO AnimatedViewpointRecorderViewpoint' + numberOfToursCreated + '.orientation\n' + ' }\n' + ' ]\n' + '}\n'; Browser.println (); if (outputClassicVRML) Browser.println (vrmlString); numberOfToursCreated++; // TODO // newNode = new SFNode(vrmlString); // newViewpointGroup.children[numberOfToursCreated] = newNode; } """), Group(DEF='RouteHolder', children=[ ROUTE(fromField='position_changed',fromNode='WhereSensor',toField='set_position',toNode='RecordingScript'), ROUTE(fromField='orientation_changed',fromNode='WhereSensor',toField='set_orientation',toNode='RecordingScript'), ROUTE(fromField='recordingInProgress',fromNode='RecordingScript',toField='enabled',toNode='WhereSensor')])])])), # ==================== Background(groundColor=[(0.2,0.4,0.2)],skyColor=[(0.2,0.2,0.4)]), Viewpoint(description='Animated Viewpoint Recorder',position=(0,0,14)), Anchor(description='AnimatedViewpointRecorder Example',url=["AnimatedViewpointRecorderExample.x3d","https://savage.nps.edu/Savage/Tools/Authoring/AnimatedViewpointRecorderExample.x3d","AnimatedViewpointRecorderExample.wrl","https://savage.nps.edu/Savage/Tools/Authoring/AnimatedViewpointRecorderExample.wrl"], children=[ Shape( geometry=Text(string=["AnimatedViewpointRecorderPrototype","is a prototype definition file","","Click this text to see","AnimatedViewpointRecorderExample"], fontStyle=FontStyle(justify=["MIDDLE","MIDDLE"],size=1.2)), appearance=Appearance( material=Material(diffuseColor=(0.6,0.8,0.4))))])]) ) # X3D model complete #################################################################################################### # Self-test diagnostics #################################################################################################### print('Self-test diagnostics for AnimatedViewpointRecorderPrototype.py:') if metaDiagnostics(newModel): # built-in utility method in X3D class print(metaDiagnostics(newModel)) # display meta info, hint, warning, error, TODO values in this model # print('check newModel.XML() serialization...') newModelXML= newModel.XML() # test export method XML() for exceptions during export newModel.XMLvalidate() # print(newModelXML) # diagnostic try: # print('check newModel.VRML() serialization...') newModelVRML=newModel.VRML() # test export method VRML() for exceptions during export # print(prependLineNumbers(newModelVRML)) # debug print("Python-to-VRML export of VRML output successful", flush=True) except Exception as err: # usually BaseException # https://stackoverflow.com/questions/18176602/how-to-get-the-name-of-an-exception-that-was-caught-in-python print("*** Python-to-VRML export of VRML output failed:", type(err).__name__, err) if newModelVRML: # may have failed to generate print(prependLineNumbers(newModelVRML, err.lineno)) try: # print('check newModel.JSON() serialization...') newModelJSON=newModel.JSON() # test export method JSON() for exceptions during export # print(prependLineNumbers(newModelJSON)) # debug print("Python-to-JSON export of JSON output successful (under development)") except Exception as err: # usually SyntaxError print("*** Python-to-JSON export of JSON output failed:", type(err).__name__, err) if newModelJSON: # may have failed to generate print(prependLineNumbers(newModelJSON,err.lineno)) print("python AnimatedViewpointRecorderPrototype.py load and self-test diagnostics complete.")