1- import logging
2- import sys
31from datetime import datetime
42import pandas as pandas
53import pytz
64
75# configure logging
8- logger = logging .getLogger (__name__ )
9- logger .setLevel (logging .DEBUG )
10- handler = logging .StreamHandler (sys .stdout )
11- handler .setLevel (logging .DEBUG )
12- formatter = logging .Formatter (
13- '%(asctime)s - %(name)s - %(levelname)s - %(message)s' )
14- handler .setFormatter (formatter )
15- logger .addHandler (handler )
6+ # logger = logging.getLogger(__name__)
7+ # logger.setLevel(logging.DEBUG)
8+ # handler = logging.StreamHandler(sys.stdout)
9+ # handler.setLevel(logging.DEBUG)
10+ # formatter = logging.Formatter(
11+ # '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
12+ # handler.setFormatter(formatter)
13+ # logger.addHandler(handler)
1614
1715# template definitions
1816TEMPLATE_TIME_MS = '<TIME_MS>'
@@ -304,15 +302,21 @@ def remove_whitespace(given_str):
304302 return ' ' .join (given_str .split ())
305303
306304
307- def map_feather (file , sensors_to_map ):
305+ def map_feather (file , sensors_to_map , number_of_events , sample_rate ):
306+ counter = 0
308307 file = pandas .read_feather (file )
309308 for sensor in sensors_to_map :
310309 df = file [file ['Metric' ] == sensor ]
310+ df ['Sampling-Timestamp' ] = pandas .to_datetime (df ['Timestamp' ].astype (str ))
311+ df .set_index ('Sampling-Timestamp' , inplace = True )
312+ df .index = pandas .to_datetime (df .index )
311313 for index in df .index :
312- source = (df ['Sensor' ][index ])
313- metric = (df ['Metric' ][index ])
314- value = (df ['Value' ][index ])
315- timestamp = (df ['Timestamp' ][index ])
314+ # df_resampled = df.resample(f'{sample_rate}S').first().reset_index()
315+ df_resampled = df
316+ source = (df_resampled ['Sensor' ][index ])
317+ metric = (df_resampled ['Metric' ][index ])
318+ value = (df_resampled ['Value' ][index ])
319+ timestamp = (df_resampled ['Timestamp' ][index ])
316320 timestamp_str = str (timestamp )
317321 hour = timestamp_str [0 :2 ]
318322 minute = timestamp_str [3 :5 ]
@@ -323,7 +327,6 @@ def map_feather(file, sensors_to_map):
323327 else :
324328 millisecond = timestamp_str [9 :13 ] + 'Z'
325329
326-
327330 current_time = datetime .now ()
328331 year = current_time .strftime ("%Y" )
329332 month = current_time .strftime ("%m" )
@@ -340,13 +343,18 @@ def map_feather(file, sensors_to_map):
340343 'value' : value ,
341344 'timestamp' : time_value
342345 }
343- result = annotate_event (event )
344-
345- with open ('/home/kush/Code/feather-RDF-mapper/data/rdfData/participant6.nt' , 'a' ) as file :
346+ result = annotate_event (event )
347+ with open ('/home/kush/Code/feather-RDF-mapper/data/rdfData/accelerometer/acc-x-2min.nt' , 'a' ) as file :
346348 pass
347- file .write ('\n ' )
348- file .write (result )
349+ counter = counter + 1
350+ if counter > number_of_events :
351+ break
352+ else :
353+ file .write ('\n ' )
354+ file .write (result )
349355if __name__ == '__main__' :
350356 file = 'data/dataset_participant6.feather'
351- sensors = ['wearable.bvp' ]
352- map_feather (file = file , sensors_to_map = sensors )
357+ # sensors = ['wearable.bvp']
358+ # sensors = ['org.dyamand.types.health.SpO2']
359+ sensors = ['wearable.acceleration.x' ]
360+ map_feather (file = file , sensors_to_map = sensors , number_of_events = 120 * 32 , sample_rate = 2 )
0 commit comments