WIP snapshot
parent
b40efa4bbb
commit
75f41fb9f5
|
|
@ -195,6 +195,7 @@ class ActivityMapper(Analyzer):
|
|||
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
|
||||
entry["board_id"])
|
||||
entry["extra_data"] = board_data
|
||||
entry["extra_data"]["activity_type"] = self.last_board_type
|
||||
entry['coordinate'] = self.new_coordinate()
|
||||
self.timeline.append(entry)
|
||||
return False
|
||||
|
|
@ -236,6 +237,8 @@ class ActivityMapper(Analyzer):
|
|||
self.track['properties'].update(props)
|
||||
self.tracks.append(self.track)
|
||||
self.track = self.new_track(props['end_timestamp'])
|
||||
if self.last_coordinate:
|
||||
self.track['coordinates'].append(self.last_coordinate)
|
||||
|
||||
def new_track(self, timestamp):
|
||||
return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}}
|
||||
|
|
|
|||
|
|
@ -182,6 +182,9 @@ if __name__ == '__main__':
|
|||
pyproj.Proj(init='EPSG:32633'))
|
||||
return transform(project, track).length
|
||||
|
||||
whitelist = ['16fc3117-61db-4f50-b84f-81de6310206f', '5e64ce07-1c16-4d50-ac4e-b3117847ea43',
|
||||
'90278021-4c57-464e-90b1-d603799d07eb', 'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771']
|
||||
|
||||
|
||||
def get_data_distance(store, relative_values=True, sort=True, show_errors=False):
|
||||
places = defaultdict(list)
|
||||
|
|
@ -211,9 +214,10 @@ if __name__ == '__main__':
|
|||
return places
|
||||
|
||||
|
||||
def get_all_data(store):
|
||||
def get_all_data(store, sort=False, relative=True):
|
||||
places = defaultdict(list)
|
||||
|
||||
simu_distribution = defaultdict(lambda: 0)
|
||||
#divisiors = {"time":60_000, "space":1000000}
|
||||
for log in store.get_all():
|
||||
if not log.analysis() == analyzers.ActivityMapper:
|
||||
continue
|
||||
|
|
@ -223,13 +227,14 @@ if __name__ == '__main__':
|
|||
if len(coords) > 1:
|
||||
distance = calc_distance(coords)
|
||||
else:
|
||||
distance = 0.1
|
||||
distance = 0.0
|
||||
result["space"][i['properties']['activity_type']] += distance
|
||||
duration = i['properties']['end_timestamp'] - i['properties']['start_timestamp']
|
||||
result["time"][i['properties']['activity_type']] += duration
|
||||
total_space = sum(result["space"].values())
|
||||
total_time = sum(result["time"].values())
|
||||
percentage = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
total = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
for i in result["space"]:
|
||||
if not total_space == 0:
|
||||
percentage[i]["space"] = result["space"][i] / total_space
|
||||
|
|
@ -239,22 +244,51 @@ if __name__ == '__main__':
|
|||
percentage[i]["time"] = result["time"][i] / total_time
|
||||
else:
|
||||
percentage[i]["time"] = 0
|
||||
for t in ("space","time"):
|
||||
#total[i][t] += (result[t][i] / divisiors[t])
|
||||
total[i][t] += result[t][i]
|
||||
print(percentage)
|
||||
if not 'error' in result:
|
||||
places[log.get()['instance']].append(percentage)
|
||||
if relative:
|
||||
value = percentage
|
||||
else:
|
||||
value = total
|
||||
places[log.get()['instance']].append(value)
|
||||
simus = defaultdict(lambda :0)
|
||||
for item in log.get()['boards']:
|
||||
if item["extra_data"]["activity_type"]=="simu":
|
||||
simus[item["board_id"]] += 1
|
||||
simu_distribution[len(simus)]+=1
|
||||
|
||||
if sort:
|
||||
for place in places:
|
||||
places[place] = sorted(places[place], key=lambda item: item['map']['time'])
|
||||
print(simu_distribution)
|
||||
return places
|
||||
|
||||
|
||||
def stack_data(keys, places):
|
||||
def stack_data(keys, places, type="time"):
|
||||
divisiors = {"time": 60_000, "space": 1000}
|
||||
divisiors = {"time": 1, "space": 1}
|
||||
dummy = [0] * len(keys)
|
||||
results = []
|
||||
sites = []
|
||||
for i in places:
|
||||
for i in sorted(places):
|
||||
if not i in whitelist:
|
||||
continue
|
||||
for j in places[i]:
|
||||
ordered = []
|
||||
for k in keys:
|
||||
ordered.append(j[k])
|
||||
results.append(ordered)
|
||||
if k in j:
|
||||
ordered.append(j[k][type]/divisiors[type])
|
||||
else:
|
||||
ordered.append(0)
|
||||
print(sum(ordered))
|
||||
if sum(ordered) > 0.9:
|
||||
#print(sum(ordered), 1-sum(ordered))
|
||||
#if sum(ordered)<1:
|
||||
# ordered[-2] = 1-sum(ordered[:-2], ordered[-1])
|
||||
results.append(ordered)
|
||||
results.append(dummy)
|
||||
sites.append(CONFIG_NAMES[i] if i in CONFIG_NAMES else "---")
|
||||
return results, sites
|
||||
|
|
@ -262,7 +296,8 @@ if __name__ == '__main__':
|
|||
|
||||
def plot_data(places, keys):
|
||||
results, sites = stack_data(keys, places)
|
||||
|
||||
dpi=86.1
|
||||
plt.figure(figsize=(1280/dpi, 720/dpi))
|
||||
size = len(results)
|
||||
print("{} elements total".format(size))
|
||||
ind = np.arange(size)
|
||||
|
|
@ -272,13 +307,16 @@ if __name__ == '__main__':
|
|||
# print(data)
|
||||
lines = []
|
||||
bottom = [0] * size
|
||||
plt.ticklabel_format(useMathText=False)
|
||||
for i in range(0, len(data)):
|
||||
lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
|
||||
for k, x in enumerate(data[i]):
|
||||
bottom[k] += x
|
||||
plt.legend(lines, keys)
|
||||
plt.title(", ".join(sites))
|
||||
plt.show()
|
||||
#plt.show()
|
||||
dpi=86
|
||||
plt.savefig("time_rel_{}.png".format(size), dpi=dpi,bbox_inches="tight")
|
||||
|
||||
|
||||
colors = {
|
||||
|
|
@ -314,13 +352,18 @@ if __name__ == '__main__':
|
|||
groups = defaultdict(list)
|
||||
keys = list(keys)
|
||||
keys.remove("other")
|
||||
ids = []
|
||||
for k in keys:
|
||||
for id in sorted(combined):
|
||||
if id not in whitelist:
|
||||
continue
|
||||
if not id in ids:
|
||||
ids.append(id)
|
||||
group = 0.0
|
||||
count = 0
|
||||
for item in combined[id]:
|
||||
if k in item:
|
||||
time = item[k]["time"]
|
||||
time = item[k]["time"]/1000
|
||||
distance = item[k]["space"]
|
||||
if time > 0:
|
||||
group += (distance / time)
|
||||
|
|
@ -331,24 +374,29 @@ if __name__ == '__main__':
|
|||
groups[k].append(group/count)
|
||||
else:
|
||||
groups[k].append(0.0)
|
||||
ind = np.arange(len(combined.keys()))
|
||||
print(ids)
|
||||
ind = np.arange(len(ids))
|
||||
width = .7 / len(groups)
|
||||
print(ind)
|
||||
print(json.dumps(groups, indent=1))
|
||||
bars = []
|
||||
dpi=10
|
||||
plt.figure(figsize=(1280/dpi, 720/dpi))
|
||||
fig, ax = plt.subplots()
|
||||
for k in groups:
|
||||
print(groups[k])
|
||||
if not len(groups[k]):
|
||||
groups[k].append(0)
|
||||
bars.append(ax.bar(ind, groups[k], width, color=colors[k]))
|
||||
ind = ind + width
|
||||
ind = ind + (width)
|
||||
bars.append(ax.bar((ind + width*len(groups)/2), groups[k], width, color=colors[k]))
|
||||
ax.set_xticks(ind + width / 2)
|
||||
ax.set_xticklabels(list([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in sorted(combined.keys())]))
|
||||
ax.set_xticklabels(list([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids]))
|
||||
plt.legend(bars, keys)
|
||||
print(combined.keys())
|
||||
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in sorted(combined.keys())])
|
||||
plt.show()
|
||||
print(combined.keys(), ids)
|
||||
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids])
|
||||
#plt.show()
|
||||
dpi=100
|
||||
plt.savefig("speed.png", dpi=dpi)
|
||||
|
||||
|
||||
# spatial_data = get_data_distance(store,relative_values=False)
|
||||
|
|
@ -361,13 +409,16 @@ if __name__ == '__main__':
|
|||
# IPython.embed()
|
||||
|
||||
#print(json.dumps(get_all_data(store)))
|
||||
# json.dump(get_all_data(store), open("combined.json", "w"))
|
||||
combined = json.load(open("combined.json"))
|
||||
#json.dump(get_all_data(store), open("combined.json", "w"))
|
||||
#combined = get_all_data(store, sort=True, relative=True)
|
||||
#json.dump(combined, open("combined_rel.json", "w"))
|
||||
#combined = json.load(open("combined_rel.json"))
|
||||
combined = json.load(open("combined_total.json"))
|
||||
plot_time_space_rel(combined, keys)
|
||||
|
||||
#plot_time_space_rel(temporal_data_rel, spatial_data_rel, keys)
|
||||
|
||||
# plot_data(data, keys)
|
||||
#plot_data(combined, keys)
|
||||
# plot_data(get_data_distance(store,relative_values=False), keys)
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue