bicycle_statistics: Update pages differentially
Signed-off-by: Thomas Klaehn <thomas.klaehn@u-blox.com>
This commit is contained in:
parent
629794d5f2
commit
102642d4ba
@ -5,7 +5,6 @@ import threading
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from watchdog.observers import Observer
|
from watchdog.observers import Observer
|
||||||
|
|
||||||
from gpx2html import Gpx2Html
|
from gpx2html import Gpx2Html
|
||||||
from input_observer import InputObserver
|
from input_observer import InputObserver
|
||||||
|
|
||||||
@ -14,6 +13,7 @@ LOG_FILE = "/var/log/bicycle-statistics.log"
|
|||||||
LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s"
|
LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s"
|
||||||
|
|
||||||
logging.basicConfig(format=LOG_FORMAT, level=log_level, filename=LOG_FILE)
|
logging.basicConfig(format=LOG_FORMAT, level=log_level, filename=LOG_FILE)
|
||||||
|
#logging.basicConfig(format=LOG_FORMAT, level=log_level)
|
||||||
log = logging.getLogger('bicycle-statistics')
|
log = logging.getLogger('bicycle-statistics')
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
@ -26,7 +26,6 @@ def parse_args():
|
|||||||
"INFO".')
|
"INFO".')
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
class myLoop(threading.Thread):
|
class myLoop(threading.Thread):
|
||||||
def __init__(self, infolder, outfolder):
|
def __init__(self, infolder, outfolder):
|
||||||
super(myLoop, self).__init__()
|
super(myLoop, self).__init__()
|
||||||
@ -36,10 +35,9 @@ class myLoop(threading.Thread):
|
|||||||
self.observer = Observer()
|
self.observer = Observer()
|
||||||
self.in_obs = InputObserver(patterns=["*.gpx"])
|
self.in_obs = InputObserver(patterns=["*.gpx"])
|
||||||
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
gpx2html = Gpx2Html(self.infolder, self.outfolder)
|
gpx2html = Gpx2Html(self.infolder, self.outfolder, log)
|
||||||
gpx2html.process()
|
gpx2html.update()
|
||||||
|
|
||||||
self.observer.schedule(self.in_obs, self.infolder)
|
self.observer.schedule(self.in_obs, self.infolder)
|
||||||
self.observer.start()
|
self.observer.start()
|
||||||
@ -47,9 +45,8 @@ class myLoop(threading.Thread):
|
|||||||
new_file = self.in_obs.get_new_file()
|
new_file = self.in_obs.get_new_file()
|
||||||
log.info("new file triggered: {}".format(new_file))
|
log.info("new file triggered: {}".format(new_file))
|
||||||
if new_file:
|
if new_file:
|
||||||
gpx2html.process()
|
time.sleep(5) # ensure .gpx file instead of .part file appeared
|
||||||
log.info("new file processed: {}".format(new_file))
|
gpx2html.update()
|
||||||
|
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.run_condition = False
|
self.run_condition = False
|
||||||
@ -57,7 +54,6 @@ class myLoop(threading.Thread):
|
|||||||
self.observer.stop()
|
self.observer.stop()
|
||||||
self.observer.join()
|
self.observer.join()
|
||||||
|
|
||||||
|
|
||||||
def set_log_level(level):
|
def set_log_level(level):
|
||||||
global log_level
|
global log_level
|
||||||
if level == 'CRITICAL':
|
if level == 'CRITICAL':
|
||||||
@ -74,7 +70,6 @@ def set_log_level(level):
|
|||||||
log_level = logging.NOTSET
|
log_level = logging.NOTSET
|
||||||
log.setLevel(level=log_level)
|
log.setLevel(level=log_level)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
@ -92,6 +87,5 @@ def main():
|
|||||||
my_loop.join()
|
my_loop.join()
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
|
import glob
|
||||||
import sys
|
import sys
|
||||||
import gpx_parser
|
import gpx_parser
|
||||||
import matplotlib
|
import matplotlib
|
||||||
@ -11,6 +12,10 @@ import numpy
|
|||||||
import os
|
import os
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import collections
|
import collections
|
||||||
|
from gpx_parser import Tracks
|
||||||
|
|
||||||
|
MONTH_LABELS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
||||||
|
|
||||||
|
|
||||||
def plot_bar_chart(labels, ticklabels, values, title, xlabel, ylabel, filename, xtick_rotation=0):
|
def plot_bar_chart(labels, ticklabels, values, title, xlabel, ylabel, filename, xtick_rotation=0):
|
||||||
fig = plt.figure()
|
fig = plt.figure()
|
||||||
@ -41,43 +46,103 @@ def plot_bar_chart(labels, ticklabels, values, title, xlabel, ylabel, filename,
|
|||||||
plt.legend()
|
plt.legend()
|
||||||
plt.savefig(filename)
|
plt.savefig(filename)
|
||||||
|
|
||||||
|
|
||||||
class Gpx2Html(object):
|
class Gpx2Html(object):
|
||||||
MONTH_LABELS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
def __init__(self, infolder, outfolder, logger):
|
||||||
|
self.logger = logger
|
||||||
def __init__(self, infolder, outfolder):
|
|
||||||
self.infolder = infolder
|
self.infolder = infolder
|
||||||
self.outfolder = os.path.abspath(outfolder)
|
self.outfolder = os.path.abspath(outfolder)
|
||||||
self.distance_diag_file = 'distance.png'
|
|
||||||
self.distance_diag_abs = os.path.join(self.outfolder, self.distance_diag_file)
|
|
||||||
self.avg_spd_diag_file = 'avg_spd.png'
|
|
||||||
self.avg_spd_diag_abs = os.path.join(self.outfolder, self.avg_spd_diag_file)
|
|
||||||
self.last_n_days_diag_file = None
|
|
||||||
self.html_file = os.path.join(self.outfolder, 'index.html')
|
|
||||||
self.years_distance = list()
|
|
||||||
self.years_avg_spd = list()
|
|
||||||
self.years = list()
|
|
||||||
|
|
||||||
|
self.tracks = Tracks(logger)
|
||||||
|
self.update()
|
||||||
|
|
||||||
def plot_distance_diagram(self):
|
def update(self):
|
||||||
plot_bar_chart(self.years, self.MONTH_LABELS, self.years_distance,
|
infiles = glob.glob(os.path.join(self.infolder, '*.gpx'))
|
||||||
'Distance', 'Month', 'km', self.distance_diag_abs)
|
for filename in infiles:
|
||||||
|
self.tracks.add(filename)
|
||||||
|
|
||||||
|
self.logger.info("Begin update of png's/html...")
|
||||||
|
distances = list()
|
||||||
|
avg_speeds = list()
|
||||||
|
for year in self.tracks.years():
|
||||||
|
distances.append(self.tracks.distances(year))
|
||||||
|
avg_speeds.append(self.tracks.avg_speeds(year))
|
||||||
|
|
||||||
def plot_avg_spd_diagram(self):
|
plot_bar_chart(self.tracks.years(), MONTH_LABELS, distances,
|
||||||
plot_bar_chart(self.years, self.MONTH_LABELS, self.years_avg_spd,
|
'Distance', 'Month', 'km',
|
||||||
'Average Speed', 'Month', 'km/h', self.avg_spd_diag_abs)
|
os.path.join(self.outfolder, 'distance.png'))
|
||||||
|
|
||||||
|
plot_bar_chart(self.tracks.years(), MONTH_LABELS, avg_speeds,
|
||||||
|
'Average Speed', 'Month', 'km/h',
|
||||||
|
os.path.join(self.outfolder, 'avg_spd.png'))
|
||||||
|
|
||||||
def plot_last_n_days(self, day_count):
|
end_date = datetime.datetime.today()
|
||||||
plot_bar_chart(["Distance", "Average speed"], self.date_distance.keys(),
|
start_date = end_date - datetime.timedelta(days=14)
|
||||||
[self.date_distance.values(), self.date_avg_spd.values()],
|
last_n_tracks = self.tracks.tracks(start_date, end_date)
|
||||||
'Last {} days'.format(day_count), 'Date', 'km, km/h',
|
last_n_distances = dict()
|
||||||
os.path.join(self.outfolder, self.last_n_days_diag_file), 90)
|
last_n_durations = dict()
|
||||||
|
dates = pd.date_range(start_date.date(), end_date.date())
|
||||||
|
for date in dates:
|
||||||
|
for track in last_n_tracks:
|
||||||
|
if date.date() == track.start_time.date():
|
||||||
|
get = 0
|
||||||
|
try:
|
||||||
|
get = last_n_distances[date.date()]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
if get == 0:
|
||||||
|
last_n_distances[date.date()] = track.distance / 1000
|
||||||
|
else:
|
||||||
|
last_n_distances[date.date()] += track.distance / 1000
|
||||||
|
try:
|
||||||
|
get = last_n_durations[date.date()]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
if get == 0:
|
||||||
|
last_n_durations[date.date()] = track.duration.total_seconds()
|
||||||
|
else:
|
||||||
|
last_n_durations[date.date()] += track.duration.total_seconds()
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
get = last_n_distances[date.date()]
|
||||||
|
except KeyError:
|
||||||
|
last_n_distances[date.date()] = 0
|
||||||
|
try:
|
||||||
|
get = last_n_durations[date.date()]
|
||||||
|
except KeyError:
|
||||||
|
last_n_durations[date.date()] = 0
|
||||||
|
last_n_dist = list()
|
||||||
|
last_n_dur = list()
|
||||||
|
last_n_avg = list()
|
||||||
|
last_n_dates = list()
|
||||||
|
for date in dates:
|
||||||
|
try:
|
||||||
|
last_n_dist.append(last_n_distances[date.date()])
|
||||||
|
except KeyError:
|
||||||
|
last_n_dist.append(0)
|
||||||
|
try:
|
||||||
|
last_n_dur.append(last_n_durations[date.date()])
|
||||||
|
except KeyError:
|
||||||
|
last_n_dur.append(0)
|
||||||
|
date_str = "{0:04d}-{1:02d}-{2:02d}".format(date.year, date.month, date.day)
|
||||||
|
last_n_dates.append(date_str)
|
||||||
|
try:
|
||||||
|
if last_n_durations[date.date()] == 0:
|
||||||
|
last_n_avg.append(0)
|
||||||
|
else:
|
||||||
|
last_n_avg.append(last_n_distances[date.date()] /
|
||||||
|
(last_n_durations[date.date()] / 3600))
|
||||||
|
except KeyError:
|
||||||
|
last_n_avg.append(0)
|
||||||
|
|
||||||
|
plot_bar_chart(["Distance", "Average speed"], last_n_dates,
|
||||||
|
[last_n_dist, last_n_avg],
|
||||||
|
'Last 14 days', 'Date', 'km, km/h',
|
||||||
|
os.path.join(self.outfolder, 'last_14_days.png'), 90)
|
||||||
|
self.__write_html_file()
|
||||||
|
self.logger.info("End update of png's/html...")
|
||||||
|
|
||||||
def write_html_file(self):
|
def __write_html_file(self):
|
||||||
with open(self.html_file, 'w') as handle:
|
with open(os.path.join(self.outfolder, 'index.html'), 'w') as handle:
|
||||||
handle.write('<!DOCTYPE html>\n')
|
handle.write('<!DOCTYPE html>\n')
|
||||||
handle.write('<html>\n')
|
handle.write('<html>\n')
|
||||||
handle.write('<head>\n')
|
handle.write('<head>\n')
|
||||||
@ -99,102 +164,24 @@ class Gpx2Html(object):
|
|||||||
|
|
||||||
handle.write('<table>\n')
|
handle.write('<table>\n')
|
||||||
handle.write('<tr>\n')
|
handle.write('<tr>\n')
|
||||||
for year in self.years:
|
for year in self.tracks.years():
|
||||||
handle.write('<th>{}</th>\n'.format(year))
|
handle.write('<th>{}</th>\n'.format(year))
|
||||||
handle.write('</tr>\n')
|
handle.write('</tr>\n')
|
||||||
|
|
||||||
handle.write('<tr>\n')
|
handle.write('<tr>\n')
|
||||||
for i in range(len(self.years_distance)):
|
for year in self.tracks.years():
|
||||||
handle.write('<td>{} km</td>\n'.format(round(sum(self.years_distance[i]), 1)))
|
handle.write('<td>{} km</td>\n'.format(round(sum(self.tracks.distances(year)), 1)))
|
||||||
handle.write('</tr>\n')
|
handle.write('</tr>\n')
|
||||||
handle.write('</table>\n')
|
handle.write('</table>\n')
|
||||||
|
|
||||||
handle.write('</p>\n')
|
handle.write('</p>\n')
|
||||||
|
|
||||||
handle.write('<p>\n')
|
handle.write('<p>\n')
|
||||||
handle.write('<IMG SRC="{}" ALT="Distance">\n'.format(self.distance_diag_file))
|
handle.write('<IMG SRC="{}" ALT="Distance">\n'.format('distance.png'))
|
||||||
handle.write('<IMG SRC="{}" ALT="Distance">\n'.format(self.avg_spd_diag_file))
|
handle.write('<IMG SRC="{}" ALT="Distance">\n'.format('avg_spd.png'))
|
||||||
handle.write('<IMG SRC="{}" ALT="Distance">\n'.format(self.last_n_days_diag_file))
|
handle.write('<IMG SRC="{}" ALT="Distance">\n'.format('last_14_days.png'))
|
||||||
handle.write('</p>\n')
|
handle.write('</p>\n')
|
||||||
|
|
||||||
handle.write('</body>\n')
|
handle.write('</body>\n')
|
||||||
handle.write('<center>\n')
|
handle.write('<center>\n')
|
||||||
handle.write('</html>\n')
|
handle.write('</html>\n')
|
||||||
|
|
||||||
|
|
||||||
def process(self):
|
|
||||||
self.years[:] = []
|
|
||||||
self.years_avg_spd[:] = []
|
|
||||||
self.years_distance[:] = []
|
|
||||||
self.tracks = gpx_parser.Tracks(self.infolder)
|
|
||||||
for year in range(2017, datetime.datetime.now().year + 1):
|
|
||||||
month_avg_spd = {1:0, 2:0, 3:0, 4:0, 5:0, 6:0, 7:0, 8:0, 9:0, 10:0, 11:0, 12:0}
|
|
||||||
month_distance = {1:0, 2:0, 3:0, 4:0, 5:0, 6:0, 7:0, 8:0, 9:0, 10:0, 11:0, 12:0}
|
|
||||||
month_duration = {1:0, 2:0, 3:0, 4:0, 5:0, 6:0, 7:0, 8:0, 9:0, 10:0, 11:0, 12:0}
|
|
||||||
for month in range(1, 13):
|
|
||||||
if month == 12:
|
|
||||||
max_day = (datetime.date(year + 1, 1, 1) - datetime.timedelta(days=1)).day
|
|
||||||
else:
|
|
||||||
max_day = (datetime.date(year, month + 1, 1) - datetime.timedelta(days=1)).day
|
|
||||||
for day in range(1, max_day + 1):
|
|
||||||
date_tracks = self.tracks.get(year, month, day)
|
|
||||||
for track in date_tracks:
|
|
||||||
month_distance[month] += (track.distance / 1000) # km
|
|
||||||
month_duration[month] += track.duration.total_seconds() / 3600 # h
|
|
||||||
for i in range(1, 13):
|
|
||||||
if month_duration[i] > 0:
|
|
||||||
month_avg_spd[i] = month_distance[i] / month_duration[i]
|
|
||||||
self.years_distance.append(month_distance.values())
|
|
||||||
self.years_avg_spd.append(month_avg_spd.values())
|
|
||||||
self.years.append(str(year))
|
|
||||||
|
|
||||||
self.plot_distance_diagram()
|
|
||||||
self.plot_avg_spd_diagram()
|
|
||||||
|
|
||||||
# last n days
|
|
||||||
n = 14
|
|
||||||
end_date = datetime.date.today()
|
|
||||||
start_date = end_date - datetime.timedelta(days=n)
|
|
||||||
dates = pd.date_range(start_date, end_date)
|
|
||||||
|
|
||||||
self.date_distance = dict()
|
|
||||||
date_duration = dict()
|
|
||||||
self.date_avg_spd = dict()
|
|
||||||
|
|
||||||
for date in dates:
|
|
||||||
date_str = "{0:04d}-{1:02d}-{2:02d}".format(date.year, date.month, date.day)
|
|
||||||
date_tracks = self.tracks.get(date.year, date.month, date.day)
|
|
||||||
for track in date_tracks:
|
|
||||||
try:
|
|
||||||
current_dist = self.date_distance[date_str]
|
|
||||||
current_duration = date_duration[date_str]
|
|
||||||
except KeyError:
|
|
||||||
current_dist = 0
|
|
||||||
current_duration = 0
|
|
||||||
current_dist += track.distance / 1000
|
|
||||||
self.date_distance.update({date_str:current_dist})
|
|
||||||
current_duration += track.duration.total_seconds() / 3600
|
|
||||||
date_duration.update({date_str:current_duration})
|
|
||||||
# check for empty dates
|
|
||||||
try:
|
|
||||||
current_dist = self.date_distance[date_str]
|
|
||||||
current_duration = date_duration[date_str]
|
|
||||||
except KeyError:
|
|
||||||
self.date_distance.update({date_str:0})
|
|
||||||
date_duration.update({date_str:0})
|
|
||||||
|
|
||||||
date_duration = collections.OrderedDict(sorted(date_duration.items()))
|
|
||||||
|
|
||||||
for key, value in date_duration.items():
|
|
||||||
if value == 0:
|
|
||||||
self.date_avg_spd.update({key:0})
|
|
||||||
else:
|
|
||||||
avg_spd = self.date_distance[key] / value
|
|
||||||
self.date_avg_spd.update({key:avg_spd})
|
|
||||||
|
|
||||||
self.date_avg_spd = collections.OrderedDict(sorted(self.date_avg_spd.items()))
|
|
||||||
self.date_distance = collections.OrderedDict(sorted(self.date_distance.items()))
|
|
||||||
|
|
||||||
self.last_n_days_diag_file = "distance_last_{}_days.png".format(n)
|
|
||||||
self.plot_last_n_days(n)
|
|
||||||
self.write_html_file()
|
|
||||||
|
@ -5,6 +5,7 @@ import gpxpy
|
|||||||
import gpxpy.gpx
|
import gpxpy.gpx
|
||||||
from geopy import distance
|
from geopy import distance
|
||||||
from geopy import Point
|
from geopy import Point
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
class Segment(object):
|
class Segment(object):
|
||||||
start_time = None
|
start_time = None
|
||||||
@ -19,8 +20,7 @@ class Track(object):
|
|||||||
duration = None
|
duration = None
|
||||||
|
|
||||||
def __init__(self, raw_track):
|
def __init__(self, raw_track):
|
||||||
self.__raw = raw_track
|
for segment in raw_track.segments:
|
||||||
for segment in self.__raw.segments:
|
|
||||||
seg = Segment()
|
seg = Segment()
|
||||||
for i in range(1, len(segment.points)):
|
for i in range(1, len(segment.points)):
|
||||||
if self.start_time is None:
|
if self.start_time is None:
|
||||||
@ -48,39 +48,68 @@ class Track(object):
|
|||||||
|
|
||||||
|
|
||||||
class Tracks(object):
|
class Tracks(object):
|
||||||
track_list = None
|
__distance = dict()
|
||||||
|
__duration = dict()
|
||||||
|
__avg_speed = dict()
|
||||||
|
__tracks = list()
|
||||||
|
__files = list()
|
||||||
|
|
||||||
def __init__(self, folder):
|
def __init__(self, logger):
|
||||||
self.track_list = list()
|
self.logger = logger
|
||||||
gpx_list = glob.glob(os.path.join(folder, '*.gpx'))
|
|
||||||
for entry in gpx_list:
|
|
||||||
with open(entry, 'r') as gpx_handle:
|
|
||||||
gpx = gpxpy.parse(gpx_handle)
|
|
||||||
for raw_track in gpx.tracks:
|
|
||||||
self.track_list.append(Track(raw_track))
|
|
||||||
|
|
||||||
def get(self, year='all', month='all', day='all'):
|
def add(self, filename):
|
||||||
ret = list()
|
if filename not in self.__files:
|
||||||
if year == 'all':
|
self.logger.info("Adding file %s.", filename)
|
||||||
ret = self.track_list
|
with open(filename, 'r') as f:
|
||||||
elif month == 'all':
|
self.__files.append(filename)
|
||||||
pick_date = datetime.datetime(year=year, month=1, day=1)
|
gpx = gpxpy.parse(f)
|
||||||
for entry in self.track_list:
|
for raw in gpx.tracks:
|
||||||
if pick_date.year == entry.start_time.year:
|
track = Track(raw)
|
||||||
ret.append(entry)
|
self.__tracks.append(track)
|
||||||
elif day == 'all':
|
trk_month = track.start_time.month
|
||||||
pick_date = datetime.datetime(year=year, month=month, day=1)
|
trk_year = track.start_time.year
|
||||||
ret = list()
|
|
||||||
for entry in self.track_list:
|
if trk_year not in self.__distance:
|
||||||
if pick_date.year == entry.start_time.year and \
|
self.__distance[trk_year] = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0}
|
||||||
pick_date.month == entry.start_time.month:
|
self.__distance[trk_year][trk_month] += track.distance / 1000
|
||||||
ret.append(entry)
|
|
||||||
else:
|
if trk_year not in self.__duration:
|
||||||
pick_date = datetime.datetime(year=year, month=month, day=day)
|
self.__duration[trk_year] = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0}
|
||||||
ret = list()
|
self.__duration[trk_year][trk_month] += track.duration.total_seconds()
|
||||||
for entry in self.track_list:
|
|
||||||
if pick_date.year == entry.start_time.year and \
|
if trk_year not in self.__avg_speed:
|
||||||
pick_date.month == entry.start_time.month and \
|
self.__avg_speed[trk_year] = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0}
|
||||||
pick_date.day == entry.start_time.day:
|
self.__avg_speed[trk_year][trk_month] = self.__distance[trk_year][trk_month] / (self.__duration[trk_year][trk_month] / 3600)
|
||||||
ret.append(entry)
|
self.logger.info("Adding done.")
|
||||||
|
|
||||||
|
def years(self):
|
||||||
|
ret = None
|
||||||
|
try:
|
||||||
|
ret = sorted(self.__distance.keys())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def distances(self, year):
|
||||||
|
ret = 0
|
||||||
|
try:
|
||||||
|
ret = self.__distance[year].values()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def avg_speeds(self, year):
|
||||||
|
ret = None
|
||||||
|
try:
|
||||||
|
ret = self.__avg_speed[year].values()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def tracks(self, start_date, end_date):
|
||||||
|
tracks = list()
|
||||||
|
dates = pd.date_range(start_date.date(), end_date.date())
|
||||||
|
for track in self.__tracks:
|
||||||
|
if track.start_time.date() in dates:
|
||||||
|
tracks.append(track)
|
||||||
|
return tracks
|
||||||
|
2
setup.py
2
setup.py
@ -7,7 +7,7 @@ import shutil
|
|||||||
import stat
|
import stat
|
||||||
|
|
||||||
NAME = 'bicycle-statistics'
|
NAME = 'bicycle-statistics'
|
||||||
VERSION = '0.1.0'
|
VERSION = '0.2.0'
|
||||||
AUTHOR = 'Thomas Klaehn'
|
AUTHOR = 'Thomas Klaehn'
|
||||||
EMAIL = 'tkl@blackfinn.de'
|
EMAIL = 'tkl@blackfinn.de'
|
||||||
PACKAGES = ['bicycle_statistics', 'gpx_parser', 'gpx2html', 'input_observer']
|
PACKAGES = ['bicycle_statistics', 'gpx_parser', 'gpx2html', 'input_observer']
|
||||||
|
Loading…
Reference in New Issue
Block a user