Compare commits

..

No commits in common. "master" and "simu_flags" have entirely different histories.

73 changed files with 966 additions and 3094 deletions

2
.gitignore vendored
View File

@ -3,5 +3,5 @@ _*
!__init__.py
*.pyc
logs/
*data/
data/
plots/*

View File

@ -1,10 +0,0 @@
FROM alpine:edge
ADD ["requirements.txt", "/"]
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories && \
apk add --update --no-cache libpng freetype python3 libstdc++ libxml2 libxslt openblas geos libc-dev && \
apk add --update --no-cache --virtual .build-deps libpng-dev freetype-dev g++ python3-dev openblas-dev libxml2-dev libxslt-dev && \
pip3 --no-cache-dir install -r requirements.txt && \
apk del .build-deps && \
rm requirements.txt
USER guest

View File

@ -1,20 +0,0 @@
# Geogame Log Analyzer
## Setup (webui)
* First setup a reverse proxy (see *frontend/* for a reference with traefik)
* Start the framework
* `docker-compose up -d`
* Default config expects a docker network connection to traefik
* name: 'traefik_net'
* created when using the traefik reference from *frontend/*
* `docker network inspect traefik_net`
## log data
### set mtime of gpx files to the first date:
```
for i in */*; do touch -m -d "$(head -n 15 $i|grep time | head -n 1 |cut -d">" -f 3|cut -d"<" -f1)" $i; done
for i in */; do touch -m -d "$(head -n 15 $i/*.gpx|grep time | head -n 1 |cut -d">" -f 3|cut -d"<" -f1)" $i; done
```

View File

View File

@ -1,233 +0,0 @@
import logging
from collections import defaultdict, OrderedDict
from analysis.util import json_path
from . import Result, LogSettings, Analyzer, ResultStore
class WhitelistAnalyzer(Analyzer): #TODO
__name__ = "WhiteList"
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.key = settings.custom["whitelist"]["key"]
self.filter_objs = settings.custom["whitelist"]["objs"]
self.valid_entries = settings.custom["whitelist"]["valid"]
self.blocked = False
def result(self, store: ResultStore, name=None) -> None:
store.add(Result(type(self), {"blocked": self.blocked}, name=name))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.filter_objs:
if not json_path(entry, self.key) in self.valid_entries:
self.blocked = True
return self.blocked
class LocationAnalyzer(Analyzer):
"""
store spatial log entries
"""
__name__ = "Location"
log = logging.getLogger(__name__)
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.entries = []
def result(self, store: ResultStore, **kwargs) -> None:
#self.log.debug(len(self.entries))
store.add(Result(type(self), list(self.entries), name=kwargs['name']))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.settings.spatials:
self.entries.append(entry)
# self.log.debug(len(self.entries))
return False
class LogEntryCountAnalyzer(Analyzer):
#TODO: flexibler: z.b. min/max lat/long
"""
count occurrences of log entry types
"""
__name__ = "LogEntryCount"
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), dict(self.store)))
def process(self, entry: dict) -> bool:
self.store[entry[self.settings.type_field]] += 1
return False
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.store = defaultdict(lambda: 0)
class LogEntrySequenceAnalyzer(Analyzer):
"""
store sequence of all log entry types
"""
__name__ = "LogEntrySequence"
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), list(self.store)))
def process(self, entry: dict) -> bool:
entry_type = entry[self.settings.type_field]
self.store.append(entry_type)
return False
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.store = []
class ActionSequenceAnalyzer(LogEntrySequenceAnalyzer):
"""
find sequence of non-spatial log entry types
"""
__name__ = "ActionSequenceAnalyzer"
def process(self, entry: dict) -> bool:
entry_type = entry[self.settings.type_field]
if entry_type in self.settings.spatials:
return False
self.store.append(entry_type)
return False
class CategorizerStub(Analyzer):
"""
generate a new Category in a ResultStore
"""
def process(self, entry: dict) -> bool:
raise NotImplementedError()
__name__ = "Categorizer"
def result(self, store: ResultStore, name=None) -> None:
print(name if name else self.key)
store.new_category((name, self.key) if name else self.key)
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.key = "default"
class SimpleCategorizer(CategorizerStub):
def process(self, entry):
return False
class Store(Analyzer):
"""
Store the entire log
"""
__name__ = "Store"
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), list(self.store)))
def process(self, entry: dict) -> bool:
self.store.append(entry)
return False
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.store: list = []
class ProgressAnalyzer(Analyzer):
"""track spatial and ingame progress"""
__name__ = "ProgressAnalyzer"
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.spatial = OrderedDict()
self.board = OrderedDict()
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), {"spatials": self.spatial, "boards": self.board}))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.settings.spatials:
self.spatial[entry["timestamp"]] = {
'timestamp': entry['timestamp'],
'coordinates': json_path(entry, "location.coordinates"),
'accuracy': entry['accuracy']
}
if entry[self.settings.type_field] in self.settings.boards:
self.board[entry["timestamp"]] = entry
return False
class MetaDataAnalyzer(Analyzer):
"""collect metadata"""
__name__ = "MetaDataAnalyzer"
def result(self, store: ResultStore, name=None) -> None:
store.add(Result(type(self), dict(self.store)))
def process(self, entry: dict) -> bool:
if not "metadata" in self.settings.custom:
return False
for mdata in self.settings.custom["metadata"]:
key = self.settings.custom["metadata"]
if key in entry:
self.store[mdata] = json_path(entry, key)
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.store = {}
def write_logentry_count_csv(LogEntryCountCSV, store, render, analyzers):
global cat, data, lines, csvfile
LogEntryCountCSV.summary = None
for cat in store.get_categories():
data = store.get_category(cat)
render(analyzers.LogEntryCountAnalyzer, data, name=cat)
if LogEntryCountCSV.summary:
headers = []
lines = []
for name in LogEntryCountCSV.summary:
data = LogEntryCountCSV.summary[name]
for head in data:
if not head in headers:
headers.append(head)
line = [name]
for head in headers:
line.append(data[head]) if head in data else line.append(0)
lines.append(line)
import csv
with open('logentrycount.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_NONE)
writer.writerow(["name"] + [h.split(".")[-1] for h in headers])
for line in lines:
writer.writerow(line)
def write_simulation_flag_csv(store):
global csvfile, result, i
from datetime import datetime
import json
json.dump(store.serializable(), open("simus.json", "w"), indent=2)
with open("simus.csv", "w") as csvfile:
csvfile.write("instanceconfig,log,simu,answered,universe_state,selected_actions,timestamp,time\n")
for key in store.get_store():
csvfile.write("{}\n".format(key))
for result in store.store[key]:
csvfile.write(",{}\n".format(result.name))
for i in result.get():
csvfile.write(",,{},{},{},{},{},{}\n".format(
i['answers']['@id'],
i['answers']['answered'],
len(i['answers']['universe_state']) if i['answers']['universe_state'] else 0,
len(i['selected_actions']) if i['selected_actions'] else 0,
i['timestamp'],
str(datetime.fromtimestamp(i['timestamp'] / 1000))
))

View File

@ -1,26 +0,0 @@
import logging
from typing import List
from analysis.analyzers.analyzer import ResultStore
from .. import Result
logger = logging.getLogger(__name__)
class Render:
result_types = []
def render(self, results: List[Result], name=None) -> [str]:
raise NotImplementedError()
def render_store(self, store: ResultStore, name=None) -> str:
logging.getLogger(__name__).warning("using default implementation!")
for category in store.get_categories():
self.render(store.get_category(category), name=name)
def filter(self, results: List[Result]):
if len(self.result_types) == 0:
return results
return filter(self.__filter__, results)
def __filter__(self, obj: Result):
return obj.analysis() in self.result_types

View File

@ -1,165 +0,0 @@
import copy
import json
import logging
from typing import List
import datetime
import matplotlib.pyplot as plt
from analysis.analyzers import LogEntryCountAnalyzer
from analysis.util.meta_temp import GEOJSON_COORDINATES, GEOJSON_PATTERN, KML_PATTERN
from . import Render, Result
from analysis.analyzers import LocationAnalyzer
log = logging.getLogger(__name__)
class PrintRender(Render):
def render(self, results: List[Result], name=None):
print("\t" + "\n\t".join([str(r) for r in results]))
class JSONRender(Render):
def render(self, results: List[Result], name=None):
print(json.dumps([r.get() for r in self.filter(results)], indent=1))
class SpatialRender:
result_types = [LocationAnalyzer]
class TrackRender(SpatialRender, Render):
def render(self, results: List[Result], name=None):
data = []
log.debug(results)
for result in self.filter(results):
if len(result.get()) > 0:
data.append(
[[entry['location']['coordinates'][1], entry['location']['coordinates'][0]] for entry in
# TODO: configurable
result.get()])
dumps = json.dumps(data)
with open("track_data.js", "w") as out:
out.write("tracks=" + dumps + ";")
return dumps
def format_time(ts):
return datetime.datetime.fromtimestamp(ts / 1000).strftime("%Y-%m-%dT%H:%M:%S.%f")
class KMLRender(SpatialRender, Render):
def render(self, results: List[Result], name=None):
files = []
for result in self.filter(results):
times = ["<when>{time}</when>".format(time=format_time(entry["timestamp"])) for entry in result.get()]
coords = [
"<gx:coord>{long} {lat} 0.0</gx:coord>"
.format(
lat=entry['location']['coordinates'][1],
long=entry['location']['coordinates'][0])
for entry in result.get()
]
if name:
filename = str(name) + ".kml"
else:
filename = str(result.name) + ".kml"
print(filename)
with open(filename, "w") as out:
out.write(
KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
#with open(filename + ".json", "w") as out: #FIXME: why am I here??
# json.dump(result.get(), out, indent=1)
files.append(filename)
return files
class GeoJSON(SpatialRender, Render):
template = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {},
"geometry": {
"type": "LineString",
"coordinates": []
}
}
]
}
def make_geojson(self, src):
coordinates = []
times = []
for location in src:
#print(location)
coordinates.append(location["location"]["coordinates"])
times.append(location["timestamp"])
template = copy.deepcopy(self.template)
template["features"][0]["properties"] = {"times": times}
template["features"][0]["geometry"]["coordinates"] = coordinates
return template
def render(self, results: List[Result], name=None) -> [str]:
files = []
for result in self.filter(results):
if name:
filename = str(name) + ".geojson"
else:
filename = str(result.name) + ".geojson"
json = self.make_geojson(result.get())
with open(filename, "w") as out:
json.dump(self.template, out, indent=1)
files.append(filename)
return files
class HeatMapRender(TrackRender):
weight = 0.01
def render(self, results: List[Result], name=None):
raw = super(HeatMapRender, self).render(results)
data = []
for session in json.loads(raw):
data += [(entry[0], entry[1], self.weight) for entry in session]
dumps = json.dumps(data)
with open('heat_data.js', 'w') as out:
out.write("coords = " + dumps + ";")
return dumps
class LogEntryCountAnalyzerPlot(Render):
result_types = [LogEntryCountAnalyzer]
def render(self, results: List[Result], name=None):
raw_data = list(self.filter(results))[0].get()
print(raw_data)
labels = []
data = []
for x in sorted(raw_data.items()):
labels.append(str(x[0]).split(".")[-1])
data.append(x[1])
plt.bar(range(len(data)), list(data))
plt.xticks(range(len(data)), labels, rotation="vertical")
plt.tight_layout()
name = "plots/{}.png".format(name)
plt.savefig(name)
plt.cla()
plt.clf()
plt.close()
class LogEntryCountCSV(Render):
result_types = [LogEntryCountAnalyzer]
summary = None
def render(self, results: List[Result], name=None):
if self.summary is None:
return
for result in self.filter(results):
raw_data = result.get()
self.summary[name] = raw_data

View File

@ -1,395 +0,0 @@
import json
import numpy as np
import analysis.analyzers
from analysis import analyzers
from analysis.util.geo import calc_distance
def time_distribution(store):
# json.dump(store.serializable(), open("new.json", "w"), indent=1)
keys = [
"simu",
"question",
"image",
"audio",
"video",
"other",
"map"
]
import matplotlib.pyplot as plt
# results = []
places = defaultdict(list)
for log in store.get_all():
result = defaultdict(lambda: 0)
for i in log.get()['track']:
duration = i['properties']['end_timestamp'] - i['properties']['start_timestamp']
result[i['properties']['activity_type']] += duration
print(json.dumps(result, indent=4))
total = sum(result.values())
print(total)
percentage = defaultdict(lambda: 0)
minutes = defaultdict(lambda: 0)
for i in result:
percentage[i] = result[i] / total
minutes[i] = result[i] / 60_000
print(json.dumps(percentage, indent=4))
if not 'error' in result:
# places[log.get()['instance']].append(percentage)
places[log.get()['instance']].append(minutes)
for place in places:
places[place] = sorted(places[place], key=lambda item: item['map'])
dummy = [0] * len(keys)
results = []
sites = []
from util.meta_temp import CONFIG_NAMES
for i in places:
for j in places[i]:
ordered = []
for k in keys:
ordered.append(j[k])
results.append(ordered)
results.append(dummy)
sites.append(CONFIG_NAMES[i] if i in CONFIG_NAMES else "---")
size = len(results)
ind = np.arange(size)
width = 0.9
print(results)
data = list(zip(*results))
print(data)
lines = []
bottom = [0] * len(results)
for i in range(0, len(data)):
lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
for k, x in enumerate(data[i]):
bottom[k] += x
plt.legend(lines, keys)
plt.title(", ".join(sites))
plt.show()
# size = len(results)
# ind = np.arange(size)
# width = 0.9
# print(results)
# data = list(zip(*results))
# print(data)
# lines = []
# bottom = [0] * len(results)
# for i in range(0, len(data)):
# lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
# for k, x in enumerate(data[i]):
# bottom[k] += x
# plt.legend(lines, keys)
# plt.title("Zwei Spiele in Filderstadt (t1=237min; t2=67min)")
# plt.show()
# json.dump(store.serializable(), open("new.json", "w"), indent=1)
from collections import defaultdict
import matplotlib.pyplot as plt
from analysis.util.meta_temp import CONFIG_NAMES
keys = [
"simu",
"question",
"image",
"audio",
"video",
"other",
"map",
# "error"
]
loc_keys = [
"question",
"image",
"audio",
"video"
]
def get_data(store, relative_values=True, sort=True, show_errors=False):
places = defaultdict(list)
for log in store.get_all():
if not log.analysis() == analyzers.ActivityMapper:
continue
result = defaultdict(lambda: 0)
for i in log.get()['track']:
duration = i['properties']['end_timestamp'] - i['properties']['start_timestamp']
result[i['properties']['activity_type']] += duration
print(json.dumps(result, indent=4))
total = sum(result.values())
print(total)
percentage = defaultdict(lambda: 0)
minutes = defaultdict(lambda: 0)
for i in result:
percentage[i] = result[i] / total
minutes[i] = result[i] / 60_000
print(json.dumps(percentage, indent=4))
if not 'error' in result or show_errors:
if relative_values:
places[log.get()['instance']].append(percentage)
else:
places[log.get()['instance']].append(minutes)
if sort:
for place in places:
places[place] = sorted(places[place], key=lambda item: item['map'])
return places
whitelist = ['16fc3117-61db-4f50-b84f-81de6310206f', '5e64ce07-1c16-4d50-ac4e-b3117847ea43',
'90278021-4c57-464e-90b1-d603799d07eb', 'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771']
def get_data_distance(store, relative_values=True, sort=True, show_errors=False):
places = defaultdict(list)
for log in store.get_all():
if not log.analysis() == analyzers.ActivityMapper:
continue
result = defaultdict(lambda: 0)
for i in log.get()['track']:
coords = i['coordinates']
if len(coords) > 1:
distance = calc_distance(coords)
result[i['properties']['activity_type']] += distance
total = sum(result.values())
percentage = defaultdict(lambda: 0)
for i in result:
if not total == 0:
percentage[i] = result[i] / total
if not 'error' in result or show_errors:
if relative_values:
places[log.get()['instance']].append(percentage)
else:
places[log.get()['instance']].append(result)
if sort:
for place in places:
places[place] = sorted(places[place], key=lambda item: item['map'])
return places
def get_all_data(store, sort=False, relative=True):
places = defaultdict(list)
simu_distribution = defaultdict(lambda: 0)
# divisiors = {"time":60_000, "space":1000000}
for log in store.get_all():
if not log.analysis() == analyzers.ActivityMapper:
continue
result = defaultdict(lambda: defaultdict(lambda: 0))
for i in log.get()['features']:
coords = i["geometry"]['coordinates']
print(coords)
if len(coords) > 1:
distance = calc_distance(json.dumps(i["geometry"]))
else:
distance = 0.0
result["space"][i['properties']['activity_type']] += distance
duration = i['properties']['end_timestamp'] - i['properties']['start_timestamp']
result["time"][i['properties']['activity_type']] += duration
total_space = sum(result["space"].values())
total_time = sum(result["time"].values())
percentage = defaultdict(lambda: defaultdict(lambda: 0))
total = defaultdict(lambda: defaultdict(lambda: 0))
for i in result["space"]:
if not total_space == 0:
percentage[i]["space"] = result["space"][i] / total_space
else:
percentage[i]["space"] = 0
if not total_time == 0:
percentage[i]["time"] = result["time"][i] / total_time
else:
percentage[i]["time"] = 0
for t in ("space", "time"):
# total[i][t] += (result[t][i] / divisiors[t])
total[i][t] += result[t][i]
print(percentage)
if not 'error' in result:
if relative:
value = percentage
else:
value = total
places[log.get()["properties"]['instance']].append(value)
simus = defaultdict(lambda: 0)
for item in log.get()["properties"]['boards']:
if item["extra_data"]["activity_type"] == "simu":
simus[item["board_id"]] += 1
simu_distribution[len(simus)] += 1
if sort:
for place in places:
places[place] = sorted(places[place], key=lambda item: item['map']['time'])
print(simu_distribution)
return places
def stack_data(keys, places, type="space"):
divisiors = {"time": 60_000, "space": 1000}
# divisiors = {"time": 1, "space": 1}
dummy = [0] * len(keys)
results = []
sites = []
for i in sorted(places):
if not i in whitelist:
continue
place = sorted(places[i], key=lambda item: item['map'][type])
for j in place:
ordered = []
for k in keys:
if k in j:
ordered.append(j[k][type] / divisiors[type])
else:
ordered.append(0)
print(sum(ordered))
# if sum(ordered) > 0.9 and sum(ordered) < 4000 and sum(ordered)>10:
if sum(ordered) > 0.9 and sum(ordered) < 100:
# print(sum(ordered), 1-sum(ordered))
# if sum(ordered)<1:
# ordered[-2] = 1-sum(ordered[:-2], ordered[-1])
results.append(ordered)
results.append(dummy)
sites.append(CONFIG_NAMES[i] if i in CONFIG_NAMES else "---")
return results, sites
def plot_data(places, keys):
results, sites = stack_data(keys, places)
dpi = 86.1
plt.figure(figsize=(1280 / dpi, 720 / dpi))
size = len(results)
print("{} elements total".format(size))
ind = np.arange(size)
width = 1
# print(results)
data = list(zip(*results))
# print(data)
lines = []
bottom = [0] * size
plt.ticklabel_format(useMathText=False)
for i in range(0, len(data)):
lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
for k, x in enumerate(data[i]):
bottom[k] += x
plt.legend(lines, keys)
plt.title(", ".join(sites))
# plt.show()
dpi = 86
plt.savefig("space_abs_{}.png".format(size), dpi=dpi, bbox_inches="tight")
colors = {
"simu": "blue",
"question": "orange",
"image": "green",
"audio": "red",
"video": "purple",
"other": "brown",
"map": "violet",
# "error":"grey",
"tasks": "olive",
}
markers = [".", "o", "x", "s", "*", "D", "p", ",", "<", ">", "^", "v", "1", "2", "3", "4"]
def plot_time_space(time_data, space_data, keys):
# assuming time_data and space_data are in same order!
marker = 0
for id in time_data:
for k in keys:
for i in range(len(time_data[id])):
print(time_data[id][i][k], space_data[id][i][k])
plt.plot(time_data[id][i][k], space_data[id][i][k], color=colors[k], marker=markers[marker])
marker += 1
plt.show()
# plt.cla()
# plt.clf()
# plt.close()
def group_locationbased_tasks(data):
for id in data:
for log in data[id]:
loc = {"space": 0, "time": 0}
for k in log:
if k in loc_keys:
for i in ["space", "time"]:
loc[i] += log[k][i]
log["tasks"] = loc
def plot_time_space_rel(combined, keys):
groups = defaultdict(list)
keys = list(keys)
keys.remove("other")
for i in loc_keys:
keys.remove(i)
keys.append("tasks")
ids = []
group_locationbased_tasks(combined)
for k in keys:
for id in sorted(combined):
if id not in whitelist:
continue
if not id in ids:
ids.append(id)
group = 0.0
count = 0
for item in combined[id]:
if k in item:
time = item[k]["time"] / 1000
distance = item[k]["space"]
if time > 0:
group += (distance / time)
count += 1
else:
print("div by zero", distance, time)
if count > 0:
groups[k].append(group / count)
else:
groups[k].append(0.0)
print(ids)
ind = np.arange(len(ids))
width = .7 / len(groups)
print(ind)
print(json.dumps(groups, indent=1))
bars = []
dpi = 200
plt.figure(figsize=(1280 / dpi, 720 / dpi))
fig, ax = plt.subplots()
for k in groups:
print(groups[k])
if not len(groups[k]):
groups[k].append(0)
ind = ind + (width)
bars.append(ax.bar((ind + width * len(groups) / 2), groups[k], width, color=colors[k]))
ax.set_xticks(ind + width / 2)
ax.set_xticklabels(list([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids]))
kmh = plt.hlines((1 / 3.6), 0.3, 4.2, linestyles="dashed", label="1 km/h", linewidths=1)
plt.legend(bars + [kmh], keys + [kmh.get_label()])
print(combined.keys(), ids)
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids])
# plt.show()
dpi = 200
plt.savefig("speed2_2019.png", dpi=dpi)
# plot_time_space_rel(temporal_data_rel, spatial_data_rel, keys)
# plot_data(combined, keys)
# plot_data(get_data_distance(store,relative_values=False), keys)

View File

@ -1,70 +0,0 @@
import logging
from datetime import datetime
from lxml import etree
from .loader import Loader
log = logging.getLogger(__name__)
NS = {'gpx':"http://www.topografix.com/GPX/1/1"}
class NeoCartLoader(Loader):
def load(self, file: str):
src = open(file, "r")
parser = etree.XMLParser(recover=True)
tree = etree.parse(src, parser=parser)
self.entries = []
for point in tree.xpath("//gpx:trkpt", namespaces=NS):
try:
self.entries.append(self.parse_point(point))
except ValueError as e:
print(e, etree.tostring(point, pretty_print=True).decode())
log.exception(e)
def parse_point(self, point):
raw_lat = point.xpath("@lat")[0]
if raw_lat.count(".") > 1:
log.warning(f"recreate lat/lon from: {raw_lat}")
log.warn(etree.tostring(point, pretty_print=True).decode())
start_offset = 4
x = raw_lat[start_offset:].index(".")
offset = start_offset + x
raw_lon = raw_lat[offset:]
raw_lat = raw_lat[:offset]
else:
raw_lon = point.xpath("@lon")[0]
lat = float(raw_lat)
lon = float(raw_lon)
times = point.xpath("gpx:time",namespaces=NS)
assert len(times) == 1
time = times[0].text
dt = datetime.strptime(time, "%Y-%m-%dT%H:%M:%SZ")
timestamp = int(dt.timestamp() * 1000) # python3.6 has no timestamp_ns (yet)
events = point.xpath(".//gpx:event",namespaces=NS)
assert 0 <= len(events) <= 1
event = {}
if events:
event = dict(events[0].attrib)
if events[0].tail and events[0].tail.strip():
try:
# base case: trailing 'geoid="0"/>'
key, v = events[0].tail.strip().split("=")
value = v.split('"')[1]
event[key] = value
except:
event['__tail__'] = events[0].tail.strip()
return {
"location": {
"type": "Point",
"coordinates": [lon, lat]
},
"timestamp": timestamp,
"event": event,
"type": event['message'] if event else "location"
}
def get_entry(self) -> object:
for i in self.entries:
yield i

View File

@ -1,247 +0,0 @@
import json
import logging
from typing import List
from analysis import analyzers
from analysis.analyzers import get_renderer, render
from analysis.analyzers.analyzer import ResultStore
from analysis.analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
from analysis.analyzers.render import wip
from analysis.analyzers.render.default import LogEntryCountCSV, KMLRender, GeoJSON
from analysis.analyzers.render.wip import time_distribution, plot_data
from analysis.analyzers.settings import LogSettings, load_settings, parse_settings
from analysis.loaders import LOADERS
from analysis.util.processing import grep, run_analysis, src_file
from analysis.util.meta_temp import CONFIG_NAMES
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
log: logging.Logger = logging.getLogger(__name__)
logging.getLogger('requests').setLevel(logging.WARN)
logging.getLogger("urllib3").setLevel(logging.WARN)
logging.getLogger("shapely").setLevel(logging.WARN)
def urach_logs(log_ids, settings):
# return ["data/inst_{id}.{format}".format(id=log_id, format=settings.log_format) for log_id in log_ids]
return ["data/{id}.{format}".format(id=log_id, format=settings.log_format) for log_id in log_ids]
if __name__ == '__main__':
settings = {}
log_ids_gf = []
# settings: LogSettings = load_settings("biogames2.json")
# log_ids_urach: List[str] = urach_logs([
# # "34fecf49dbaca3401d745fb467",
# # "44ea194de594cd8d63ac0314be",
# # "57c444470dbf88605433ca935c",
# # "78e0c545b594e82edfad55bd7f",
# # "91abfd4b31a5562b1c66be37d9",
# # "597b704fe9ace475316c345903",
# # "e01a684aa29dff9ddd9705edf8",
# "597b704fe9ace475316c345903",
# "e01a684aa29dff9ddd9705edf8",
# "fbf9d64ae0bdad0de7efa3eec6",
# # "fbf9d64ae0bdad0de7efa3eec6",
# "fe1331481f85560681f86827ec", # urach
# # "fe1331481f85560681f86827ec"]
# "fec57041458e6cef98652df625",
# ]
# , settings)
# log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
# "a192ff420b8bdd899fd28573e2", # eichstätt
# "3a3d994c04b1b1d87168422309", # stadtökologie
# "fe1331481f85560681f86827ec", # urach
# "96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
# ],
# "/home/clemens/git/ma/test/src",
# settings)
# log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
if False:
store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
# store: ResultStore = run_analysis(log_ids, settings, LOADERS)
if False:
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
r().render(store.get_all())
if False:
render(analyzers.LocationAnalyzer, store.get_all())
# print(json.dumps(store.serializable(), indent=1))
if False:
for cat in store.get_categories():
render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
# render(analyzers.ProgressAnalyzer, store.get_all())
if False:
from analysis.analyzers.postprocessing import graph
g = graph.Cache(settings)
g.run(store)
if False:
# render(analyzers.SimulationOrderAnalyzer, store.get_all())
for cat in store.get_categories():
data = store.get_category(cat)
render(analyzers.SimulationOrderAnalyzer, data, name=cat)
if False:
write_logentry_count_csv(LogEntryCountCSV, store, render, analyzers)
if False:
write_simulation_flag_csv(store)
if False:
time_distribution(store)
if False:
# spatial_data = get_data_distance(store,relative_values=False)
# temporal_data = get_data(store,relative_values=False)
# spatial_data_rel = get_data_distance(store,relative_values=True)
# temporal_data_rel = get_data(store,relative_values=True)
# temporal_data_rel = json.load(open("temporal_rel.json"))
# spatial_data_rel = json.load(open("spatial_rel.json"))
# import IPython
# IPython.embed()
# print(json.dumps(get_all_data(store)))
# json.dump(get_all_data(store), open("combined.json", "w"))
# combined = get_all_data(store, sort=True, relative=True)
# json.dump(combined, open("combined_rel.json", "w"))
# combined = json.load(open("combined_rel.json"))
combined = json.load(open("combined_total.json"))
# plot_time_space_rel(combined, keys)
plot_data(combined, wip.keys)
if False:
def store(x):
pass
settings: LogSettings = load_settings("../oeb_kml.json")
#log_ids = src_file("/app/log_data/oeb/oeb_2016_path")
log_ids = src_file("/app/log_data/oeb/oeb_paths")
#log_ids = log_ids[0:10]
print(log_ids)
store: ResultStore = run_analysis(log_ids, settings, LOADERS, ResultStore(key_index=1))
print("render")
kml = GeoJSON()
fields = store.get_categories()
artifacts = {key: kml.render(store.get_category(key)) for key in fields}
print(artifacts)
print("done")
def atrifact_to_length(filename):
g = json.load(open(filename))
from analysis.util.geo import calc_distance
return calc_distance(json.dumps(g), "features.0.geometry.coordinates")
def simplified_length(filename):
from analysis.util.geo import json_to_track,distance
g = json.load(open(filename))
track = json_to_track(json.dumps(g), "features.0.geometry.coordinates")
simplified = track.simplify(0.0002, preserve_topology=True)
from shapely.geometry import mapping
json.dump(mapping(simplified), open(f"{filename}.simplified.geojson","w"), indent=1)
return distance(simplified)
from collections import defaultdict
def get_lengths(artifacts, atrifact_to_length=atrifact_to_length):
stats = defaultdict(list)
for field in artifacts:
print(field, CONFIG_NAMES[field])
for i in artifacts[field]:
distance = atrifact_to_length(i)
warn = "\tLONG!" if distance > 10000 else "\tSHORT!" if distance < 1000 else ""
print(f"\t{i}\t{distance}{warn}")
stats[field].append(distance)
return stats
stats = get_lengths(artifacts)
import numpy as np
def quart_1(x):
return np.percentile(x, 25)
def quart_2(x):
return np.percentile(x, 50)
def quart_3(x):
return np.percentile(x, 75)
def quart_4(x):
return np.percentile(x, 100)
def print_stats(stats):
fns = [np.size, np.min, np.max, np.mean, np.median]#, quart_1, quart_2, quart_3, quart_4]
names = "\t".join([x.__name__ for x in fns] + ["id","name"])
print(names)
for i in stats:
stat = [f"{fn(stats[i]):.2f}" for fn in fns]
print("\t".join(stat + [i, CONFIG_NAMES[i]]))
def plot_stats(stats, filtered_stats, suffix=""):
import matplotlib.pyplot as plt
keys = sorted(stats.keys())
names = [CONFIG_NAMES[i] for i in keys]
values = [stats[i] for i in keys]
values_filtered = [filtered_stats[i] for i in keys]
fig, ax = plt.subplots()
ax.boxplot(values, labels=names, showfliers=False, showmeans=True, meanline=True)
fig.savefig(f"/app/log_data/oeb/plots/plot_raw{suffix}.png")
fig, ax = plt.subplots()
ax.boxplot(values_filtered, labels=names, showfliers=False, showmeans=True, meanline=True)
fig.savefig(f"/app/log_data/oeb/plots/plot_filtered{suffix}.png")
fig, ax = plt.subplots()
agg_data = values + values_filtered
agg_labels = names + [f"filtered(…{i[-4:]})" for i in names]
ax.boxplot(agg_data, labels=agg_labels, showfliers=False, showmeans=True, meanline=True)
fig.savefig(f"/app/log_data/oeb/plots/plot_combined{suffix}.png")
MIN = 1000
MAX = 100000
def filter(stats):
stats_filtered = defaultdict(list)
for i in stats:
stats_filtered[i] = [x for x in stats[i] if MIN < x < MAX]
return stats_filtered
stats_filtered = filter(stats)
stats_simple = get_lengths(artifacts, atrifact_to_length=simplified_length)
stats_filtered_simple = filter(stats_simple)
def summary(stats, stats_filtered, title):
print_stats(stats)
print(f"filter {MIN} < x < {MAX}")
print_stats(stats_filtered)
plot_stats(stats, stats_filtered, suffix=f"_{title}")
summary(stats, stats_filtered, "raw")
print("\nsimplified\n")
summary(stats_simple, stats_filtered_simple, "simplified")
if True:
settings: LogSettings = load_settings("time.json")
# log_ids = src_file("/app/log_data/oeb/oeb_2016_path")
log_ids = src_file("log_data/oeb/oeb_paths_host")
log_ids = src_file("/home/clemens/git/ma/test/src")
log_ids = src_file("/home/clemens/git/ma/project/log_data/neocartographer/index")
# log_ids = log_ids[0:10]
print(log_ids)
store: ResultStore = run_analysis(log_ids, settings, LOADERS, ResultStore(key_index=1))
results = {}
for cat in store.get_categories():
results[cat] = [result.get() for result in store.get_category(cat)]
with open("times_neo.json", "w") as out:
json.dump(results, out, indent=1)
####################
#for cat in store.get_categories():
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
# for analyzers in analyzers:
# if analyzers.name() in ["LogEntryCount", "ActionSequenceAnalyzer"]:
# print(json.dumps(analyzers.result(), indent=2))
# for analyzers in analyzers:
# if analyzers.name() in ["BoardDuration"]:
# print(json.dumps(analyzers.result(), indent=2))
# print(analyzers.render())
# coords = analyzers[1].render()
# with open("test.js", "w") as out:
# out.write("coords = "+coords)

View File

@ -1,33 +0,0 @@
import json
import pyproj
from shapely.geometry import LineString, mapping
from shapely.ops import transform
from functools import partial
from analysis.util import json_path
def distance(track):
project = partial(
pyproj.transform,
pyproj.Proj(init='EPSG:4326'),
pyproj.Proj(init='EPSG:32633'))
return transform(project, track).length
def json_to_track(geojson, path, load=True):
if load:
geojson = json.loads(geojson)
return LineString(json_path(geojson, path))
def calc_distance(geojson: str, path="coordinates", load=True):
track = json_to_track(geojson, path, load)
return distance(track)
def calc_distance_simplified(geojson, path="coordinates", load=True):
track = json_to_track(geojson, path, load)
simplified = track.simplify(0.0002, preserve_topology=True)
return distance(simplified)

View File

@ -1,28 +0,0 @@
from datetime import datetime as dt
SEP = "\",\""
LS = "\""
LE = "\""
NL = LS + "\n" + LE
def flat_dict_to_csv(data):
keys = set()
for i in data:
keys = keys.union(set(i.keys()))
keys = sorted(keys)
out = SEP.join(keys)
for i in data:
out += NL + SEP.join([escape(i.get(j, "")) for j in keys])
return LS + out + LE
def escape(value):
val = str(value)
val = val.replace(".", ",")
return val
def pretty_ts(timestamp, fmt="%Y-%m-%d %H:%M:%S"):
d = dt.fromtimestamp(int(timestamp)/1000.0)
return d.strftime(fmt)

View File

@ -1,69 +0,0 @@
import logging
from typing import List
from analysis.analyzers.analyzer import ResultStore, Analyzer
from analysis.analyzers.settings import LogSettings
log: logging.Logger = logging.getLogger(__name__)
def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
loader = loaders[settings.log_format]()
try:
loader.load(logfile)
except BaseException as e:
raise RuntimeError(e, logfile)
analyzers: List[Analyzer] = []
log.debug("build analyzers")
for analyzer in settings.analyzers:
analyzers.append(analyzer(settings))
log.debug("process entries")
for entry in loader.get_entry():
for analyzer in analyzers:
try:
if analyzer.process(entry):
break
except KeyError as e:
log.exception(e)
return analyzers
def run_analysis(log_ids: list, settings, loaders, result_store=None):
if not result_store:
result_store = ResultStore()
store: ResultStore = result_store
for log_id in log_ids:
log.info("LOG_ID: "+ str(log_id))
for analysis in process_log(log_id, settings, loaders):
log.info("* Result for " + analysis.name())
analysis.result(store, name=log_id)
return store
def load_ids(name: str):
log_ids = []
with open(name) as src:
for line in src:
line = line.strip()
log_ids.append(line)
return log_ids
def grep(log_ids, source, settings):
logs = []
with open(source) as src:
lines = src.readlines()
for id in log_ids:
for line in lines:
if id in line:
logs.append(line.strip())
return logs
def src_file(filename):
log_ids = []
with open(filename) as src:
for line in src:
line = line.strip()
log_ids.append(line)
return log_ids

View File

@ -3,19 +3,18 @@ from typing import List
from .analyzer import Analyzer, Result
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \
SimulationFlagsAnalyzer, GameField_InstanceCategorizer, BiogamesTasks, BiogamesDuration, GameFieldInstanceGroup
SimulationFlagsAnalyzer
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer, WhitelistAnalyzer
CategorizerStub, Store, ProgressAnalyzer
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
from .analyzer.mask import MaskSpatials
from .render import Render
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
LogEntryCountCSV, KMLRender, GeoJSON
LogEntryCountCSV
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
LocomotionActionRatioRender
#from .render.wip import get_all_data, plot_time_space_rel
__FALLBACK__ = PrintRender
__MAPPING__ = {
@ -42,7 +41,6 @@ __MAPPING__ = {
LocationAnalyzer: [
TrackRender,
HeatMapRender,
KMLRender,
],
ActivityMapper: [
ActivityMapperRender
@ -54,7 +52,7 @@ __MAPPING__ = {
StoreRender
],
SimulationOrderAnalyzer: [
#JSONRender,
JSONRender,
# SimulationOrderRender,
SimulationGroupRender
]

View File

@ -2,7 +2,7 @@ import logging
from collections import KeysView
from typing import Type, Sized, Collection
from analysis.analyzers.settings import LogSettings
from analyzers.settings import LogSettings
log: logging.Logger = logging.getLogger(__name__)
@ -29,16 +29,13 @@ class Result:
class ResultStore:
"""Store Results"""
def __init__(self, store_entry: Type[Collection] = list, store_action: callable = list.append, key_index=None) -> None:
def __init__(self, store_entry: Type[Collection] = list, store_action: callable = list.append) -> None:
self.store = {}
self.category = None
self.entry: Type[Collection] = store_entry
self.action: callable = store_action
self.key_index = key_index
def new_category(self, key) -> None:
if not self.key_index is None:
key = key[self.key_index]
self.category = key
if not key in self.store:
self.store[key] = self.entry()
@ -55,7 +52,7 @@ class ResultStore:
:return:
"""
result = []
for key in sorted(self.store):
for key in self.store:
result += self.store[key]
return result

View File

@ -3,8 +3,8 @@ from collections import defaultdict, namedtuple, OrderedDict
from types import SimpleNamespace
from typing import List, NamedTuple
from analysis.util import json_path, combinate
from analysis.util.download import download_board, get_board_data
from util import json_path, combinate
from util.download import download_board, get_board_data
from . import Result, LogSettings, Analyzer, ResultStore
from .default import CategorizerStub, Store
@ -133,48 +133,6 @@ class BiogamesCategorizer(CategorizerStub): # TODO: refactor
return False
class GameField_InstanceCategorizer(CategorizerStub): # TODO: refactor
__name__ = "BiogamesCategorizer"
def __init__(self, settings: LogSettings):
super().__init__(settings)
def process(self, entry: dict) -> bool:
if self.key is "default":
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
try:
self.key = json_path(entry, self.settings.custom['instance_config_id']) + "_" + entry[self.settings.custom['instance_id']] + "_" + str(entry["timestamp"])
except KeyError as e:
print(entry)
raise e
return False
class GameFieldInstanceGroup(Analyzer):
__name__ = "BiogamesGamefieldInstanceGroupAnalizer"
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.metadata = None
def process(self, entry: dict) -> bool:
if not self.metadata:
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
try:
self.metadata = {"instance_config_id": json_path(entry, self.settings.custom['instance_config_id']),
"instance_id": entry[self.settings.custom['instance_id']],
"timestamp": str(entry["timestamp"]),
"player_group_name": entry['player_group_name']
}
except KeyError as e:
print(entry)
raise e
return False
def result(self, store: ResultStore, **kwargs) -> None:
store.add(Result(type(self), self.metadata))
class ActivityMapper(Analyzer):
__name__ = "ActivityMapper"
classes = {
@ -182,16 +140,6 @@ class ActivityMapper(Analyzer):
"sequence.question.": "question",
"error": "error"
}
colors = {
"simu": "blue",
"question": "orange",
"image": "green",
"audio": "red",
"video": "purple",
"other": "brown",
"map": "violet",
"error": "grey"
}
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
@ -200,7 +148,6 @@ class ActivityMapper(Analyzer):
self.last_board = {}
self.last_board_type = "other"
self.last_coordinate = None
self.last_timestamp = None
self.tracks = []
self.track = None
self.instance_config_id: str = None
@ -210,35 +157,29 @@ class ActivityMapper(Analyzer):
self.State: NamedTuple = namedtuple("State", ["sequence", "events", "track", "timestamp"])
def result(self, store: ResultStore, **kwargs) -> None:
for board in self.timeline:
if board[self.settings.type_field] in self.settings.boards:
if board["extra_data"]["activity_type"] == "simu":
board["image"] = "simu.png"
continue
try:
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
self.settings.source)
if local_file:
board['image'] = local_file
else:
raise ValueError
except Exception as e:
board['image'] = "ERROR_FETCHING_FILE"
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
board["board_id"])
logger.exception(e)
else:
board["image"] = "map.png"
store.add(Result(type(self), {
"type": "FeatureCollection",
"features": self.tracks,
"properties": {
"instance": self.instance_config_id,
"boards": self.timeline,
"colors": self.colors,
},
}))
def result_old(self, store: ResultStore) -> None:
instance_config_id = self.instance_config_id
for active_segment in self.store: # active_segment → sequence or None (None → map active)
seq_data_url = "/game2/editor/config/{config_id}/sequence/{sequence_id}/".format(
config_id=instance_config_id,
sequence_id=active_segment.sequence,
)
source = self.settings.source
seq_data = source._get(seq_data_url).json()
# TODO: use sequence names
logger.warning(seq_data)
for event in active_segment.events:
if event[self.settings.type_field] in self.settings.boards:
sequence_id = active_segment.sequence
board_id = event["board_id"]
local_file = download_board(board_id, instance_config_id, sequence_id, source)
if local_file is not None:
event["image"] = local_file[16:]
store.add(Result(type(self), {"instance": instance_config_id, "store": [x._asdict() for x in self.store]}))
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), {"instance": self.instance_config_id, "track": self.tracks, "boards": self.timeline}))
def process(self, entry: dict) -> bool:
if self.track is None:
@ -254,7 +195,6 @@ class ActivityMapper(Analyzer):
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
entry["board_id"])
entry["extra_data"] = board_data
entry["extra_data"]["activity_type"] = self.last_board_type
entry['coordinate'] = self.new_coordinate()
self.timeline.append(entry)
return False
@ -262,15 +202,12 @@ class ActivityMapper(Analyzer):
def update_board_type(self, entry):
type = self.classify_entry(entry)
if not type == self.last_board_type:
self.add_track(activity_type=self.last_board_type, end_timestamp=entry['timestamp'])
self.add_track(activity_type=self.last_board_type,end_timestamp=entry['timestamp'])
self.last_board_type = type
def classify_entry(self, entry):
entry_type = entry[self.settings.type_field]
if self.filters.end(entry):
data = {"extra_data": {"activity_type": "map"}, "coordinate": self.new_coordinate()}
data.update(entry)
self.timeline.append(data)
return "map"
if not entry_type in self.settings.boards:
return self.last_board_type
@ -281,9 +218,9 @@ class ActivityMapper(Analyzer):
return self.classes[pattern]
if board_data['has_video']:
return "video"
elif board_data['has_audio']:
elif board_data['has_audio']:
return "audio"
elif board_data['has_image']:
elif board_data['has_image']:
return "image"
return "other"
@ -292,120 +229,18 @@ class ActivityMapper(Analyzer):
def add_location(self, entry):
coordinates = json_path(entry, self.settings.custom['coordinates'])
self.track["geometry"]['coordinates'].append(coordinates)
self.track['properties']['coordTimes'].append(entry['timestamp']) #FIXME
self.track['coordinates'].append(coordinates)
self.last_coordinate = coordinates
self.last_timestamp = entry['timestamp']
def add_track(self, **props):
self.track['properties'].update(props)
if "activity_type" in self.track['properties'] and self.track['properties']['activity_type'] in self.colors:
if not "stroke" in self.track['properties']:
self.track['properties']['stroke'] = self.colors[self.track['properties']['activity_type']]
self.tracks.append(self.track)
self.track = self.new_track(props['end_timestamp'])
if self.last_coordinate:
self.track["geometry"]['coordinates'].append(self.last_coordinate)
self.track['properties']['coordTimes'].append(self.last_timestamp)
def new_track(self, timestamp):
return {"type": "Feature", "geometry": {"type": "LineString", "coordinates": []}, "properties": {'start_timestamp': timestamp, 'coordTimes': []}}
return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}}
class BiogamesDuration(Analyzer):
__name__ = "BiogamesDuration"
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.first = None
self.last = None
self.sequences = defaultdict(list)
self.filters = SimpleNamespace()
self.filters.start = lambda entry: combinate(self.settings.custom["sequences2"]["start"], entry)
self.filters.end = lambda entry: combinate(self.settings.custom["sequences2"]["end"], entry)
self.sequence = None
self.sequence_start = None
self.cache = "None"
def process(self, entry: dict) -> bool:
if not self.first:
self.first = entry['timestamp']
self.last = entry['timestamp']
if not self.sequence and self.filters.start(entry):
self.sequence = entry['sequence_id']
self.sequence_start = entry['timestamp']
elif self.sequence and self.filters.end(entry):
self.sequences[f"{self.cache}+{self.sequence}"].append((self.sequence_start, entry['timestamp']))
self.sequences[f"only+{self.sequence}"].append((self.sequence_start, entry['timestamp']))
self.sequence = None
self.sequence_start = 0
self.cache = "None"
if entry['@class'] in self.settings.sequences['start']:
if entry['cache']:
self.cache = entry['cache']['@id']
else:
self.cache = "None"
return False
def result(self, store: ResultStore, name=None) -> None:
results = {"start": self.first, "end": self.last, "duration": self.last - self.first}
for sid in self.sequences:
seq = self.sequences[sid]
#print([end-start for start,end in seq])
results[f"sequence_{sid}_duration"] = sum([end-start for start,end in seq])
store.add(Result(type(self), results))
class BiogamesTasks(Analyzer):
__name__ = "BiogamesTasks"
DATA_CLASSES = ("de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion", )
BOARD_CLASSES = ("de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry",)
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.settings: LogSettings = settings
self.tasks = {}
self.first_board_view = {}
self.last_board = None
self.instance_config_id: str = None
def process(self, entry: dict) -> bool:
if self.instance_config_id is None:
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
self.instance_config_id = json_path(entry, self.settings.custom['instance_config_id'])
if self.is_task(entry):
task_id = entry['answers']['@id']
if task_id not in self.first_board_view:
logger.error("task_id is not in self.first_board_view!", task_id, entry)
else:
entry['__duration'] = entry['timestamp'] - self.first_board_view[task_id]['timestamp']
self.tasks[self.ids(task_id)] = entry
if self.is_board(entry):
self.first_board_view[entry['board_id']] = entry
return False
def result(self, store: ResultStore, name=None) -> None:
results = {}
for ids in self.tasks:
task = self.tasks[ids]
for action in task['selected_actions']:
if self.is_dollar_action(action):
results[ids] = {"duration": task['__duration'], "result": action['increment']}
store.add(Result(type(self), results))
def ids(self, task_id):
task = self.first_board_view[task_id]
return f"{self.instance_config_id}_{task['sequence_id']}_{task['board_id']}"
def is_task(self, entry) -> bool:
return entry['@class'] in self.DATA_CLASSES
def is_board(self, entry) -> bool:
return entry['@class'] in self.BOARD_CLASSES
def is_dollar_action(self, action):
return action['@class'] in ("de.findevielfalt.games.game2.instance.action.IncrementDiversityDollarAction")
class BiogamesStore(Store):
__name__ = "BiogamesStore"
@ -443,8 +278,8 @@ class InstanceConfig(Analyzer):
print(entry)
self.store["instance_id"] = json_path(entry, self.settings.custom["instance_config_id"])
def result(self, store: ResultStore, name=None):
store.add(Result(type(self), dict(self.store), name=name))
def result(self, store: ResultStore):
store.add(Result(type(self), dict(self.store)))
class SimulationOrderAnalyzer(Analyzer):
@ -455,8 +290,8 @@ class SimulationOrderAnalyzer(Analyzer):
self.store = defaultdict(lambda: -1) # TODO verify
self.order = []
def result(self, store: ResultStore, name=None) -> None:
store.add(Result(type(self), [self.store[sim] for sim in self.order], name=name))
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), [self.store[sim] for sim in self.order]))
def process(self, entry: dict) -> bool:
entry_type = entry[self.settings.type_field]
@ -470,7 +305,7 @@ class SimulationOrderAnalyzer(Analyzer):
class SimulationCategorizer(CategorizerStub): # TODO: refactor categorizer
__name__ = "SimulationCategorizer" # TODO: rename -.- (InstanceConfigIDCategorizer)
__name__ = "SimulationCategorizer"# TODO: rename -.- (InstanceConfigIDCategorizer)
def process(self, entry: dict) -> bool:
if self.key is "default":
@ -499,3 +334,5 @@ class SimulationFlagsAnalyzer(Analyzer):
def result(self, store: ResultStore, name=None) -> None:
store.add(Result(type(self), self.store, name=name))

View File

@ -0,0 +1,139 @@
import logging
from collections import defaultdict, OrderedDict
from util import json_path
from . import Result, LogSettings, Analyzer, ResultStore
class LocationAnalyzer(Analyzer):
"""
store spatial log entries
"""
__name__ = "Location"
log = logging.getLogger(__name__)
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.entries = []
def result(self, store: ResultStore) -> None:
self.log.debug(len(self.entries))
store.add(Result(type(self), list(self.entries)))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.settings.spatials:
self.entries.append(entry)
# self.log.debug(len(self.entries))
return False
class LogEntryCountAnalyzer(Analyzer):
#TODO: flexibler: z.b. min/max lat/long
"""
count occurrences of log entry types
"""
__name__ = "LogEntryCount"
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), dict(self.store)))
def process(self, entry: dict) -> bool:
self.store[entry[self.settings.type_field]] += 1
return False
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.store = defaultdict(lambda: 0)
class LogEntrySequenceAnalyzer(Analyzer):
"""
store sequence of all log entry types
"""
__name__ = "LogEntrySequence"
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), list(self.store)))
def process(self, entry: dict) -> bool:
entry_type = entry[self.settings.type_field]
self.store.append(entry_type)
return False
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.store = []
class ActionSequenceAnalyzer(LogEntrySequenceAnalyzer):
"""
find sequence of non-spatial log entry types
"""
__name__ = "ActionSequenceAnalyzer"
def process(self, entry: dict) -> bool:
entry_type = entry[self.settings.type_field]
if entry_type in self.settings.spatials:
return False
self.store.append(entry_type)
return False
class CategorizerStub(Analyzer):
"""
generate a new Category in a ResultStore
"""
def process(self, entry: dict) -> bool:
raise NotImplementedError()
__name__ = "Categorizer"
def result(self, store: ResultStore, name=None) -> None:
store.new_category(self.key)
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.key = "default"
class Store(Analyzer):
"""
Store the entire log
"""
__name__ = "Store"
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), list(self.store)))
def process(self, entry: dict) -> bool:
self.store.append(entry)
return False
def __init__(self, settings: LogSettings):
super().__init__(settings)
self.store: list = []
class ProgressAnalyzer(Analyzer):
"""track spatial and ingame progress"""
__name__ = "ProgressAnalyzer"
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.spatial = OrderedDict()
self.board = OrderedDict()
def result(self, store: ResultStore) -> None:
store.add(Result(type(self), {"spatials": self.spatial, "boards": self.board}))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.settings.spatials:
self.spatial[entry["timestamp"]] = {
'timestamp': entry['timestamp'],
'coordinates': json_path(entry, "location.coordinates"),
'accuracy': entry['accuracy']
}
if entry[self.settings.type_field] in self.settings.boards:
self.board[entry["timestamp"]] = entry
return False

View File

@ -1,4 +1,4 @@
from analysis import util
import util
from . import Analyzer, LogSettings, Result, ResultStore
@ -41,7 +41,7 @@ class LocomotionActionAnalyzer(Analyzer):
self.current_cache = None
self.last = None
def result(self, store: ResultStore, **kwargs) -> None:
def result(self, store: ResultStore) -> None:
if self.last is not None:
if self.current_cache is None:
self.locomotion.append(self.last - self.cache_time)
@ -50,14 +50,7 @@ class LocomotionActionAnalyzer(Analyzer):
self.last = None
locomotion = sum(self.locomotion)
action = sum(self.actions)
if action== 0:
action = 42
print("Division by zero") #FIXME
total = locomotion + action
if not self.last_timestamp:
self.last_timestamp = 0
if not self.instance_start:
self.instance_start = 0
store.add(Result(type(self), {
'locomotion_sum': locomotion,
'action_sum': action,

View File

@ -0,0 +1,17 @@
from typing import List
from .. import Result
class Render:
result_types = []
def render(self, results: List[Result], name=None):
raise NotImplementedError()
def filter(self, results: List[Result]):
if len(self.result_types) == 0:
return results
return filter(self.__filter__, results)
def __filter__(self, obj: Result):
return obj.analysis() in self.result_types

View File

@ -1,5 +1,4 @@
import json
import tempfile
from collections import defaultdict
from typing import List, Tuple
@ -10,13 +9,8 @@ from scipy.interpolate import interp1d
import networkx as nx
import itertools
from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer, LocationAnalyzer, BiogamesDuration, \
BiogamesTasks, GameFieldInstanceGroup
from analysis.analyzers.analyzer import ResultStore
from analysis.analyzers.render.default import GeoJSON
from analysis.util.geo import calc_distance, calc_distance_simplified
from analysis.util.meta_temp import CONFIG_NAMES, TASK_NAMES, CACHE_NAMES, SEQUENCE_NAMES
from analysis.util.output import flat_dict_to_csv, pretty_ts
from analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
from util.meta_temp import CONFIG_NAMES
from . import Render
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
@ -141,16 +135,16 @@ class BoardDurationHistRender(Render):
class BoardDurationBoxRender(Render):
result_types = [BoardDurationAnalyzer]
def render(self, results: List[Result], name=None) -> [str]:
def render(self, results: List[Result], name=None):
data = defaultdict(list)
for result in self.filter(results):
for board in result.get():
get = result.get()
for board in get:
duration = board['active'] if 'active' in board else 0
data[board['id']].append(duration)
data_tuples = [(key, data[key]) for key in sorted(data)]
data_tuples = sorted(data_tuples, key=lambda x: sum(x[1]))
plot(data_tuples, name=name)
return [name]
plot(data_tuples)
class ActivityMapperRender(Render):
@ -158,14 +152,11 @@ class ActivityMapperRender(Render):
def render(self, results: List[Result], name=None):
print(os.getcwd())
files = []
for result in self.filter(results):
data = result.get()
path = os.path.join("/tmp", data["properties"]['instance'] + "_" + str(name) + ".json")
with open(path, "w") as out:
json.dump(data, out, indent=1)
files.append(path)
return files
with open(os.path.join("static", "progress", "data", data['instance']), "w") as out:
json.dump(data["store"], out, indent=1)
return "ok"
class StoreRender(Render):
@ -194,73 +185,9 @@ class SimulationOrderRender(Render):
class SimulationGroupRender(Render):
def render(self, results: List[Result], name=None):
# data = [r.get() for r in self.filter(results)]
data = []
for r in self.filter(results):
raw = r.get()
if len(raw) < 6:
raw = [0] + raw
data.append(raw)
data = [r.get() for r in self.filter(results)]
print(name, len(data))
# graph_fit(list(data), name=name)
graph_plot(list(data), ylabel="simulation retries", title="sequential simulation retries", rotation=None,
name=name)
graph_plot(list(data), ylabel="simulation retries", title="sequential simulation retries", rotation=None, name=name)
#graph_fit(list(data), name=name)
result_types = [SimulationOrderAnalyzer]
class OEBRender(Render):
result_types = [LocationAnalyzer, BiogamesTasks, BiogamesDuration, GameFieldInstanceGroup]
timestamp_fields = ("timestamp", "start", "end")
def render(self, results: List[Result], name=None) -> [str]:
data = {}
for r in self.filter(results):
if r.analysis() is LocationAnalyzer:
geojson = GeoJSON()
json = geojson.make_geojson(r.get())
data[f"{r.analysis().__name__}__distance"] = calc_distance(json, "features.0.geometry.coordinates", load=False)
data[f"{r.analysis().__name__}__distance_simplified"] = calc_distance_simplified(json, "features.0.geometry.coordinates", load=False)
else:
for i in r.get():
a = r.analysis().__name__
value = r.get()[i]
if i in self.timestamp_fields:
value = pretty_ts(value)
key = f"{a}__{i}"
key = self.replace(key, i)
if type(value) is dict:
for j in value:
data[key+"__"+j] = value[j]
else:
data[key] = value
return data
def render_store(self, store: ResultStore, name=None) -> str:
data = []
for category in store.get_categories():
data.append(self.render(store.get_category(category)))
#import json
#print(json.dumps(data, indent=1))
csv = flat_dict_to_csv(data)
#print(csv)
if name:
filename = str(name) + ".csv"
else:
filename = "/tmp/biogames" + ".csv"
try:
with open(filename, "w") as out:
out.write(csv)
except PermissionError as e:
raise PermissionError(e, filename)
return filename
def replace(self, key, i):
if i in TASK_NAMES:
key = f"{TASK_NAMES[i]} ({key})"
if "sequence_" in i:
sid = i.split("_")[1]
cache, seq = sid.split("+")
cache = CACHE_NAMES.get(cache, cache)
seq = SEQUENCE_NAMES.get(seq, seq)
key = f"{cache}->{seq} {sid} duration"
return key

View File

@ -0,0 +1,86 @@
import json
import logging
from typing import List
import matplotlib.pyplot as plt
from analyzers import LogEntryCountAnalyzer
from . import Render, Result
from .. import LocationAnalyzer
log = logging.getLogger(__name__)
class PrintRender(Render):
def render(self, results: List[Result], name=None):
print("\t" + "\n\t".join([str(r) for r in results]))
class JSONRender(Render):
def render(self, results: List[Result], name=None):
print(json.dumps([r.get() for r in self.filter(results)], indent=1))
class TrackRender(Render):
result_types = [LocationAnalyzer]
def render(self, results: List[Result], name=None):
data = []
log.debug(results)
for result in self.filter(results):
if len(result.get()) > 0:
data.append(
[[entry['location']['coordinates'][1], entry['location']['coordinates'][0]] for entry in
# TODO: configurable
result.get()])
dumps = json.dumps(data)
with open("track_data.js", "w") as out:
out.write("tracks=" + dumps + ";")
return dumps
class HeatMapRender(TrackRender):
weight = 0.01
def render(self, results: List[Result], name=None):
raw = super(HeatMapRender, self).render(results)
data = []
for session in json.loads(raw):
data += [(entry[0], entry[1], self.weight) for entry in session]
dumps = json.dumps(data)
with open('heat_data.js', 'w') as out:
out.write("coords = " + dumps + ";")
return dumps
class LogEntryCountAnalyzerPlot(Render):
result_types = [LogEntryCountAnalyzer]
def render(self, results: List[Result], name=None):
raw_data = list(self.filter(results))[0].get()
print(raw_data)
labels = []
data = []
for x in sorted(raw_data.items()):
labels.append(str(x[0]).split(".")[-1])
data.append(x[1])
plt.bar(range(len(data)), list(data))
plt.xticks(range(len(data)), labels, rotation="vertical")
plt.tight_layout()
name = "plots/{}.png".format(name)
plt.savefig(name)
plt.cla()
plt.clf()
plt.close()
class LogEntryCountCSV(Render):
result_types = [LogEntryCountAnalyzer]
summary = None
def render(self, results: List[Result], name=None):
if self.summary is None:
return
for result in self.filter(results):
raw_data = result.get()
self.summary[name] = raw_data

View File

@ -1,17 +1,13 @@
import json
import logging
import sys
from clients.webclients import CLIENTS
from sources import SOURCES
log: logging.Logger = logging.getLogger(__name__)
def load_source(config):
if config["type"] in CLIENTS:
source = CLIENTS[config["type"]](**config)
source.login()
if config["type"] in SOURCES:
source = SOURCES[config["type"]]()
source.connect(**config)
return source
else:
log.warn(f"client {config['type']} not found!")
class LogSettings:
@ -32,15 +28,13 @@ class LogSettings:
self.boards = json_dict['boards']
for mod in json_dict['analyzers']:
for name in json_dict['analyzers'][mod]:
print(mod, name, getattr(sys.modules[mod], name))
print(mod, name)
self.analyzers.append(getattr(sys.modules[mod], name))
self.sequences = json_dict['sequences']
if 'custom' in json_dict:
self.custom = json_dict['custom']
if "source" in json_dict:
self.source = load_source(json_dict['source'])
if "render" in json_dict:
self.render = json_dict['render']
def __repr__(self):
return str({
@ -57,7 +51,3 @@ class LogSettings:
def load_settings(file: str) -> LogSettings:
return LogSettings(json.load(open(file)))
def parse_settings(config: str) -> LogSettings:
return LogSettings(json.loads(config))

View File

@ -14,8 +14,7 @@
"analyzers": {
"analyzers": [
"SimulationCategorizer",
"SimulationOrderAnalyzer",
"ActivityMapper"
"SimulationFlagsAnalyzer"
]
},
"dis":[
@ -24,7 +23,6 @@
"LogEntryCountAnalyzer",
"SimulationOrderAnalyzer",
"ProgressAnalyzer",
"SimulationCategorizer",
"InstanceConfig"],
"disabled_analyzers": [
"LocomotionActionAnalyzer",
@ -67,19 +65,14 @@
"action":"PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata":{
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
"coordinates": "location.coordinates"
},
"source":{
"type": "Biogames",
"url": "http://0.0.0.0:5000/game2/instance/log/list/",
"login_url": "http://localhost:5000/game2/auth/json-login",
"username": "ba",
"password": "853451",
"username": "dev",
"password": "dev",
"host":"http://0.0.0.0:5000"
}
}

View File

View File

@ -1,158 +0,0 @@
import json
import logging
import os
import shutil
import tempfile
import typing
import requests
log: logging.Logger = logging.getLogger(__name__)
class Client:
host: str = ""
cookies: typing.Dict[str, str] = {}
headers: typing.Dict[str, str] = {}
def url(self, path):
if self.host:
return self.host + path
return path
def get(self, url, **kwargs) -> requests.models.Response:
log.info("GET " + str(url))
return requests.get(self.url(url), cookies=self.cookies, headers=self.headers, **kwargs)
def post(self, url, data, **kwargs) -> requests.models.Response:
log.info("POST " + str(url))
return requests.post(self.url(url), data, cookies=self.cookies, headers=self.headers, **kwargs)
def download_file(self, url, target, **kwargs) -> bool:
with open(target, "wb") as out:
try:
download = self.get(url, stream=True, **kwargs)
shutil.copyfileobj(download.raw, out)
except Exception as e:
log.exception(e)
os.remove(target)
return False
return True
def download_files(self, urls, **kwargs) -> tempfile.TemporaryDirectory:
target = tempfile.TemporaryDirectory()
for path in urls:
filename = os.path.join(target.name, path.split("/")[-1])
self.download_file(path, filename, **kwargs)
return target
def login(self):
pass #TODO
def list(self):
pass #TODO
class BiogamesClient(Client):
config_fields: typing.Dict[str, typing.List[str]] = {
'login': ('username', 'password', 'host'),
'session': ('sessionid', 'csrftoken', 'host'),
}
login_url: str = "/game2/auth/json-login"
list_url: str = "/game2/instance/log/list/"
headers: typing.Dict[str, str] = {'Accept': 'application/json'}
def __init__(self, **kwargs):
match = {j: all([i in kwargs for i in self.config_fields[j]]) for j in self.config_fields}
valid = filter(lambda x: match[x], match)
if not valid:
raise ValueError("missing parameter (" + str(self.config_fields) + ")")
self.config = kwargs
self.cookies = {}
self.host = self.config['host']
if 'session' in valid:
self.cookies = kwargs
def login(self) -> bool:
csrf_request = self.get(self.list_url)
if not csrf_request.ok:
log.exception(ConnectionError("Unable to obtain CSRF token (" + str(csrf_request) + ")"))
return False
if not 'csrftoken' in self.cookies:
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
login_payload = {
'username': self.config['username'],
'password': self.config['password'],
'next': '',
'csrfmiddlewaretoken': 'csrftoken',
}
login = self.post(self.login_url, json.dumps(login_payload))
if not login.ok:
log.exception(ConnectionError("Unable to authenticate", login, login.text))
return False
self.cookies['sessionid'] = login.cookies['sessionid']
print(self.cookies)
return True
def list(self) -> dict:
print(self.cookies)
logs = self.get(self.list_url)
if not logs.ok:
raise ConnectionError("HTTP fail", logs, logs.text)
return logs.json()
def load_all_logs(self) -> tempfile.TemporaryDirectory:
return self.download_files([i["file_url"] for i in self.list()])
class GeogamesClient(Client):
config_fields = ("host",)
def __init__(self, **kwargs):
for field in self.config_fields:
if not field in kwargs:
raise ValueError(f"missing parameter: {field}")
self.host = kwargs['host']
self.path = "neocartographer"
self.config = kwargs
def login(self):
return True
def list(self):
logs = self.get(self.path)
data = logs.json()
prepared_logs = []
for log in data:
players = self.get(f"{self.path}/{log['name']}/").json()
for player in players:
prepared_logs.append({
'@id': f"{log['name']}/{player['name']}",
'start_date': player['mtime'],
'player_group_name': player['name'],
'file_url': f"{self.path}/{log['name']}/{player['name']}",
})
return prepared_logs
def download_files(self, urls, **kwargs) -> tempfile.TemporaryDirectory:
target = tempfile.TemporaryDirectory()
for path in urls:
filename = os.path.join(target.name, "-".join
(path.split("/")[-2:]))
self.download_file(path, filename, **kwargs)
return target
CLIENTS: typing.Dict[str, typing.Type[Client]] = {
"Biogames": BiogamesClient,
"Geogames": GeogamesClient,
}
if __name__ == '__main__':
# c = BiogamesClient(host="http://biodiv", username="ba", password="853451")
# print(c.login())
# print(json.dumps(c.list(), indent=1))
# print(type(c.load_all_logs()))
# print(type(c.get("/")))
c = BiogamesClient(host="http://biodiv", **{'csrftoken': 'IgbwP83iEibW6RE7IADIFELYdbx0dvqQ',
'sessionid': 'zntsj09d92tjos1b6ruqjthlzv60xdin'})
print(json.dumps(c.list(), indent=1))

View File

@ -1,19 +0,0 @@
<script
src="https://code.jquery.com/jquery-3.2.1.min.js"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"></script>
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
<script src="my.js"></script>
<link href="style.css" rel="stylesheet"/>
<main>
<div class="mapDiv" id="mainMap"></div>
<div class="sequenceContainer">
<div class="sequence"></div>
</div>
</main>
<!--div style="font-size:0.1px;position:absolute;bottom:0;">OSM Logo: CC-BY-SA
http://wiki.openstreetmap.org/wiki/File:Mag_map-120x120.png
</div-->

Binary file not shown.

Before

Width:  |  Height:  |  Size: 191 KiB

View File

@ -1,110 +0,0 @@
function loadData() {
console.log($(location).attr('hash').substr(1));
$.getJSON($(location).attr('hash').substr(1), function (data) {
var images = {};
var tiles = {
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
maxNativeZoom: 19,
maxZoom: 24,
attribution: '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
}),
"esri sat": L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
maxNativeZoom: 19,
maxZoom: 24,
attribution: 'Tiles &copy; Esri &mdash; Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
}),
"google sat": L.tileLayer('https://{s}.google.com/vt/lyrs=s&x={x}&y={y}&z={z}', {
maxNativeZoom: 20,
maxZoom: 24,
subdomains: ['mt0', 'mt1', 'mt2', 'mt3']
})
};
var map = L.map("mainMap", {layers: [tiles.openstreetmap]});
function styleTrack(feature) {
var styles = {};
styles.color = data.properties.colors[feature.properties.activity_type];
return styles;
}
var highlighted = null;
function onClick(e) {
var start = e.target.feature.geometry.properties.start_timestamp;
var end = e.target.feature.geometry.properties.end_timestamp;
var changed = highlighted !== e.target.feature;
$.each(images, function (timestamp, board) {
if ((timestamp >= start && timestamp < end) && changed) {
board.image.first().addClass("highlight");
} else {
board.image.removeClass("highlight");
highlighted = null;
}
}
);
if (changed) {
highlighted = e.target.feature;
}
}
var coords = [];
function onEachFeature(feature, layer) {
layer.setStyle(styleTrack(feature));
layer.on('click', onClick);
if (feature.geometry.coordinates.length > 1) {
coords = coords.concat(feature.geometry.coordinates.map(function (p) {
return [p[1], p[0], 0.1];
}));
}
}
var track = L.geoJSON(data['features'], {
//style: styleTrack,
onEachFeature: onEachFeature
}).addTo(map);
map.fitBounds(track.getBounds());
//var heat = L.heatLayer(coords);
//L.control.layers(tiles, {"heatmap": heat}).addTo(map);
var list = $("<ul />");
var current = {
"pos": data.properties["boards"][0].coordinate.coordinates
};
var i = 0;
while (current.pos == undefined) {
i+=1;
current.pos = data.properties["boards"][i].coordinate.coordinates;
}
console.log(current);
var marker = L.marker([current.pos[1], current.pos[0]]).addTo(map);
$.each(data.properties["boards"], function (index, entry) {
//console.log(index, entry);
var item = $("<li>", {class: entry.extra_data.activity_type});
var container = $("<div>", {class: "board"});
var image = $("<img>", {src: entry.image.replace("static/progress/", "")});
image.attr("data-time", entry.timestamp);
image.hover(function () {
marker.setLatLng([entry.coordinate.coordinates[1], entry.coordinate.coordinates[0]]);
}, function () {
marker.setLatLng(current.pos.coordinates[1], current.pos.coordinates[0]);
});
image.click(function (e) {
current.board = image;
current.pos = entry.coordinate;
});
images[entry.timestamp] = {image: image, coordinate: entry.coordinate};
image.appendTo(container);
container.appendTo(item);
item.appendTo(list);
});
current.board = images[data.properties["boards"][1].timestamp];
list.appendTo(".sequence");
});
}
$(document).ready(function () {
loadData();
});

Binary file not shown.

Before

Width:  |  Height:  |  Size: 181 KiB

View File

@ -1,105 +0,0 @@
/*.mapDiv {
width: 1024px;
height: 768px;
}*/
.highlight {
/*what a nice way to highlight*/
display: none;
}
.simu {
background-color: blue;
}
.question {
background-color: orange;
}
.image {
background-color: green;
}
.audio {
background-color: red;
}
.video {
background-color: purple;
}
.other {
background-color: brown;
}
.map {
background-color: violet;
}
.error {
background-color: grey;
}
.board {
width: 32px;
height: 32px;
display: inline-block;
}
.board img {
max-width: 32px;
max-height: 32px;
position: absolute;
/*bottom: 0px;*/
}
.board:hover img{
max-width: 205px;
max-height: 295px;
z-index: 99;
top: 5px;
right:0px;
}
ul {
list-style-type: none;
overflow: auto;
overflow-y: hidden;
display: inline-block;
/*max-width:100%;
margin: 0 0 1em;
white-space: nowrap;
height:200px;*/
}
li {
display: inline-block;
vertical-align: top;
padding: 2px;
margin-bottom: 2px;
}
body{
height: 100%;
padding:0;
margin:0;
}
main{
display: flex;
flex-direction: column;
height:100%;
}
.mapDiv {
flex-grow:1;
}
.sequenceContainer{
flex-grow: 0;
min-height:300px;
padding-right: 210px;
position: relative;
}

View File

@ -1,63 +0,0 @@
version: "3"
services:
app:
image: docker.clkl.de/ma/celery:0.4.2
build: .
volumes:
- ./:/app
working_dir: /app/selector
command: python3 webserver.py
environment:
- PYTHONPATH=/app
- PYTHONUNBUFFERED=1
networks:
- default
- traefik_net
labels:
- "traefik.enable=true"
- "traefik.port=5000"
- "traefik.docker.network=traefik_net"
- "traefik.url.frontend.rule=Host:select.ma.potato.kinf.wiai.uni-bamberg.de"
celery:
image: docker.clkl.de/ma/celery:0.4.2
environment:
- PYTHONPATH=/app
- PYTHONUNBUFFERED=1
volumes:
- ./:/app
- ./data/results:/data/results
working_dir: /app
command: celery -A tasks.tasks worker --loglevel=info
redis:
image: redis:4-alpine
volumes:
- ./data/redis:/data
command: redis-server --appendonly yes
nginx:
image: nginx:1.13-alpine
volumes:
- ./data/results:/usr/share/nginx/html:ro
networks:
- traefik_net
labels:
- "traefik.enable=true"
- "traefik.port=80"
- "traefik.docker.network=traefik_net"
- "traefik.url.frontend.rule=Host:results.ma.potato.kinf.wiai.uni-bamberg.de"
log_data:
image: nginx:1.13-alpine
volumes:
- ./log_data/:/srv/:ro
- ./log_data.conf:/etc/nginx/conf.d/log_data.conf
networks:
traefik_net:
external:
name: traefik_net

View File

@ -62,7 +62,7 @@ distribution = defaultdict(lambda: 0)
finished_and_simu = defaultdict(list)
files = {}
actions_dist = defaultdict(list)
with open('/home/clemens/git/ma/test/src') as src:
with open('/home/agp8x/git/uni/ma/project/data/0000_ref') as src:
for line in src:
line = line.strip()
instance_id, log = get_json(line)

View File

@ -1,44 +0,0 @@
# Traefik reverse proxy for analysis framework
## Usage (default: http)
1. `cd traefik`
2. `editor docker-compose.yml`
* Adjust the *traefik.frontend.rule* label for the traefik dashboard
* Default (match any):
* traefik.localhost
* traefik.potato.kinf.wiai.uni-bamberg.de
* Adjust port mapping
* Default:
* 80 → 80
* Syntax: <host-port>:<container-port>
3. `docker-compose up -d`
4. `cd ../..`
5. `editor docker-compose.yml`
* adjust the *traefik.url.frontend.rule* labels for services *app* and *nginx*
* adjust the urls in *selector/config.py* accordingly
6. `docker-compose up -d`
7. You have a working analysis framework setup now
* Stop with `docker-compose down`
* Start with `docker-compose up -d`
## Usage (https)
1. Be on a host with port 80 available from the internet
2. Follw HTTP usage above up to step 2
3. Reconfigure docker-compose
* Add acme.json volume:
* Uncomment the line for the acme.json volume
* Adjust the host path
* Syntax: <host-path>:<container-path>
* Add port mapping for HTTPS
* Uncomment the line for the 443:443 port mapping
4. Create acme.json
* touch acme.json
* chmod 600 acme.json
5. Activate traefiks ACME module
* `mv config.toml config_http.toml`
* `mv config_acme.toml config.toml`
* `editor config.toml`
* Adjust the *acme.email* value
6. Continue with HTTP Usage steps 3 +

View File

@ -1,8 +0,0 @@
logLevel = "INFO"
[web]
address = ":8080"
[docker]
watch = true
exposedbydefault = false

View File

@ -1,26 +0,0 @@
logLevel = "INFO"
defaultEntryPoints = ["https", "http"]
[web]
address = ":8080"
[docker]
watch = true
exposedbydefault = false
[entryPoints]
[entryPoints.http]
address = ":80"
[entryPoints.http.redirect]
entryPoint = "https"
[entryPoints.https]
address = ":443"
[entryPoints.https.tls]
[acme]
email = "tls-admin@org.example"
storage = "acme.json"
entryPoint = "https"
OnHostRule = true
[acme.httpChallenge]
entryPoint = "http"

View File

@ -1,22 +0,0 @@
version: "3"
services:
traefik:
image: traefik:1.6
command: --configFile=/traefik.toml
volumes:
- ./config.toml:/traefik.toml
# - /srv/traefik/acme.json:/acme.json
- /var/run/docker.sock:/var/run/docker.sock
ports:
- 80:80
# - 443:443
networks:
- net
labels:
- "traefik.enable=true"
- "traefik.port=8080"
- "traefik.frontend.rule=Host:traefik.localhost,traefik.potato.kinf.wiai.uni-bamberg.de"
restart: on-failure:5
networks:
net:
driver: bridge

View File

@ -1,10 +1,8 @@
from .biogames import SQLiteLoader, ZipSQLiteLoader
from .loader import JSONLoader
from .neocart import NeoCartLoader
LOADERS = {
"json": JSONLoader,
"sqlite": SQLiteLoader,
"zip": ZipSQLiteLoader,
"neocartographer": NeoCartLoader,
"zip": ZipSQLiteLoader
}

237
log_analyzer.py Normal file
View File

@ -0,0 +1,237 @@
import json
import logging
from typing import List
import numpy as np
import analyzers
from analyzers import get_renderer, Analyzer, render, Store
from analyzers.analyzer import ResultStore
from analyzers.render.default import LogEntryCountCSV
from analyzers.settings import LogSettings, load_settings
from loaders import LOADERS
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
log: logging.Logger = logging.getLogger(__name__)
requests_log = logging.getLogger('requests')
requests_log.setLevel(logging.WARN)
def process_log(log_id: str, settings: LogSettings) -> List[Analyzer]:
logfile: str = "data/inst_{id}.{format}".format(id=log_id, format=settings.log_format)
logfile = log_id
loader = LOADERS[settings.log_format]()
try:
loader.load(logfile)
except BaseException as e:
raise RuntimeError(e)
analyzers: List[Analyzer] = []
log.debug("build analyzers")
for analyzer in settings.analyzers:
analyzers.append(analyzer(settings))
log.debug("process entries")
for entry in loader.get_entry():
for analyzer in analyzers:
try:
if analyzer.process(entry):
break
except KeyError as e:
log.exception(e)
return analyzers
if __name__ == '__main__':
settings: LogSettings = load_settings("biogames2.json")
log_ids: List[str] = [
"20d4244719404ffab0ca386c76e4b112",
"56d9b64144ab44e7b90bf766f3be32e3",
"dc2cdc28ca074715b905e4aa5badff10",
"e32b16998440475b994ab46d481d3e0c",
]
log_ids: List[str] = [
# "34fecf49dbaca3401d745fb467",
# "44ea194de594cd8d63ac0314be",
# "57c444470dbf88605433ca935c",
# "78e0c545b594e82edfad55bd7f",
# "91abfd4b31a5562b1c66be37d9",
"597b704fe9ace475316c345903",
"e01a684aa29dff9ddd9705edf8",
"fbf9d64ae0bdad0de7efa3eec6",
# "fe1331481f85560681f86827ec",
"fe1331481f85560681f86827ec"]
# "fec57041458e6cef98652df625", ]
log_ids = []
# with open("/home/clemens/git/ma/test/filtered") as src:
with open("/home/agp8x/git/uni/ma/project/data/0000_ref") as src:
for line in src:
line = line.strip()
log_ids.append(line)
store: ResultStore = ResultStore()
for log_id in log_ids:
for analysis in process_log(log_id, settings):
log.info("* Result for " + analysis.name())
# print(analysis.result())
# print(analysis.render())
analysis.result(store, name=log_id)
if False:
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
r().render(store.get_all())
if False:
render(analyzers.LocationAnalyzer, store.get_all())
# print(json.dumps(store.serializable(), indent=1))
if False:
render(analyzers.ActivityMapper, store.get_all())
render(analyzers.ProgressAnalyzer, store.get_all())
if False:
from analyzers.postprocessing import graph
g = graph.Cache(settings)
g.run(store)
if False:
# render(analyzers.SimulationOrderAnalyzer, store.get_all())
for cat in store.get_categories():
data = store.get_category(cat)
render(analyzers.SimulationOrderAnalyzer, data, name=cat)
if False:
LogEntryCountCSV.summary = None
for cat in store.get_categories():
data = store.get_category(cat)
render(analyzers.LogEntryCountAnalyzer, data, name=cat)
if LogEntryCountCSV.summary:
headers = []
lines = []
for name in LogEntryCountCSV.summary:
data = LogEntryCountCSV.summary[name]
for head in data:
if not head in headers:
headers.append(head)
line = [name]
for head in headers:
line.append(data[head]) if head in data else line.append(0)
lines.append(line)
import csv
with open('logentrycount.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_NONE)
writer.writerow(["name"] + [h.split(".")[-1] for h in headers])
for line in lines:
writer.writerow(line)
if True:
from datetime import datetime
json.dump(store.serializable(), open("simus.json", "w"), indent=2)
with open("simus.csv", "w") as csvfile:
csvfile.write("instanceconfig,log,simu,answered,universe_state,selected_actions,timestamp,time\n")
for key in store.get_store():
csvfile.write("{}\n".format(key))
for result in store.store[key]:
csvfile.write(",{}\n".format(result.name))
for i in result.get():
csvfile.write(",,{},{},{},{},{},{}\n".format(
i['answers']['@id'],
i['answers']['answered'],
len(i['answers']['universe_state']) if i['answers']['universe_state'] else 0,
len(i['selected_actions']) if i['selected_actions'] else 0,
i['timestamp'],
str(datetime.fromtimestamp(i['timestamp']/1000))
))
if False:
#json.dump(store.serializable(), open("new.json", "w"), indent=1)
from collections import defaultdict
keys = [
"simu",
"question",
"image",
"audio",
"video",
"other",
"map"
]
import matplotlib.pyplot as plt
#results = []
places = defaultdict(list)
for log in store.get_all():
result = defaultdict(lambda: 0)
for i in log.get()['track']:
duration = i['properties']['end_timestamp'] - i['properties']['start_timestamp']
result[i['properties']['activity_type']] += duration
print(json.dumps(result, indent=4))
total = sum(result.values())
print(total)
percentage = defaultdict(lambda :0)
minutes = defaultdict(lambda:0)
for i in result:
percentage[i]= result[i]/total
minutes[i] = result[i]/60_000
print(json.dumps(percentage,indent=4))
if not 'error' in result:
#places[log.get()['instance']].append(percentage)
places[log.get()['instance']].append(minutes)
for place in places:
places[place] = sorted(places[place], key=lambda item:item['map'])
dummy = [0]*len(keys)
results = []
sites = []
from util.meta_temp import CONFIG_NAMES
for i in places:
for j in places[i]:
ordered = []
for k in keys:
ordered.append(j[k])
results.append(ordered)
results.append(dummy)
sites.append(CONFIG_NAMES[i] if i in CONFIG_NAMES else "---")
size = len(results)
ind = np.arange(size)
width=0.9
print(results)
data = list(zip(*results))
print(data)
lines = []
bottom = [0]*len(results)
for i in range(0, len(data)):
lines.append(plt.bar(ind,data[i], bottom=bottom, width=width)[0])
for k,x in enumerate(data[i]):
bottom[k] += x
plt.legend(lines, keys)
plt.title(", ".join(sites))
plt.show()
#size = len(results)
#ind = np.arange(size)
#width = 0.9
#print(results)
#data = list(zip(*results))
#print(data)
#lines = []
#bottom = [0] * len(results)
#for i in range(0, len(data)):
# lines.append(plt.bar(ind, data[i], bottom=bottom, width=width)[0])
# for k, x in enumerate(data[i]):
# bottom[k] += x
#plt.legend(lines, keys)
#plt.title("Zwei Spiele in Filderstadt (t1=237min; t2=67min)")
#plt.show()
# for analyzers in analyzers:
# if analyzers.name() in ["LogEntryCount", "ActionSequenceAnalyzer"]:
# print(json.dumps(analyzers.result(), indent=2))
# for analyzers in analyzers:
# if analyzers.name() in ["BoardDuration"]:
# print(json.dumps(analyzers.result(), indent=2))
# print(analyzers.render())
# coords = analyzers[1].render()
# with open("test.js", "w") as out:
# out.write("coords = "+coords)

View File

@ -1,9 +0,0 @@
server {
listen 80;
server_name log_data;
location / {
root /srv/;
autoindex on;
autoindex_format json;
}
}

View File

@ -1,34 +0,0 @@
{
"logFormat": "neocartographer",
"entryType": "type",
"spatials": [
"location"
],
"actions": [],
"boards": [],
"analyzers": {
"analysis.analyzers": [
"SimpleCategorizer",
"LocationAnalyzer"
]
},
"sequences": {},
"custom": {
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Geogames",
"host": "http://log_data/",
"path": "neocartographer"
},
"render": [
"KMLRender"
]
}

View File

@ -1,66 +0,0 @@
{
"logFormat": "zip",
"entryType": "@class",
"spatials": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
],
"actions": [
"...QuestionAnswerEvent",
"...SimuAnswerEvent"
],
"boards": [
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
],
"analyzers": {
"analysis.analyzers": [
"SimulationCategorizer",
"LocationAnalyzer"
]
},
"sequences": {
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
}
},
"custom": {
"simulation_rounds": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
],
"simu_data": [
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
],
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
"instance_id": "instance_id",
"instance_config_id": "config.@id",
"sequences2": {
"id_field": "sequence_id",
"start": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "START"
},
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
},
"render": [
"KMLRender"
]
}

View File

@ -1,18 +1,7 @@
requests==2.18.4
numpy==1.14.2
numpy==1.13.1
matplotlib==2.1.0
#osmnx==0.6
osmnx==0.6
networkx==2.0
#pydot==1.2.3
scipy==1.0.1
#ipython==6.2.1
flask==0.12.2
celery==4.1.1
redis==2.10.6
lxml==4.2.1
shapely==1.6.4
pyproj==1.9.5.1
pydot==1.2.3
scipy==1.0.0

View File

View File

@ -1,252 +0,0 @@
KML = """{
"logFormat": "zip",
"entryType": "@class",
"spatials": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
],
"actions": [],
"boards": [
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
],
"analyzers": {
"analysis.analyzers": [
"BiogamesCategorizer",
"LocationAnalyzer"
]
},
"sequences": {
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
}
},
"custom": {
"simulation_rounds": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
],
"simu_data": [
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
],
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
"instance_id": "instance_id",
"instance_config_id": "config.@id",
"sequences2": {
"id_field": "sequence_id",
"start": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "START"
},
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
},
"render": [
"KMLRender"
]
}"""
ACTIVITY = """{
"logFormat": "zip",
"entryType": "@class",
"spatials": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
],
"actions": [],
"boards": [
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
],
"analyzers": {
"analysis.analyzers": [
"BiogamesCategorizer",
"ActivityMapper"
]
},
"sequences": {
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
}
},
"custom": {
"simulation_rounds": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
],
"simu_data": [
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
],
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
"instance_id": "instance_id",
"instance_config_id": "config.@id",
"sequences2": {
"id_field": "sequence_id",
"start": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "START"
},
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
},
"render": [
"ActivityMapper"
]
}"""
KML_geo = """{
"logFormat": "neocartographer",
"entryType": "type",
"spatials": [
"location"
],
"actions": [],
"boards": [],
"analyzers": {
"analysis.analyzers": [
"SimpleCategorizer",
"LocationAnalyzer"
]
},
"sequences": {},
"custom": {
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Geogames",
"host": "http://log_data/",
"path": "neocartographer"
},
"render": [
"KMLRender"
]
}"""
OEB = """{
"logFormat": "zip",
"entryType": "@class",
"spatials": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
],
"actions": [
"...QuestionAnswerEvent",
"...SimuAnswerEvent"
],
"boards": [
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
],
"analyzers": {
"analysis.analyzers": [
"BiogamesCategorizer",
"LocationAnalyzer",
"BiogamesDuration",
"BiogamesTasks",
"GameFieldInstanceGroup"
]
},
"sequences": {
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
}
},
"custom": {
"simulation_rounds": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
],
"simu_data": [
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
],
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
"instance_id": "instance_id",
"instance_config_id": "config.@id",
"sequences2": {
"id_field": "sequence_id",
"start": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "START"
},
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "https://biogames.kinf.wiai.uni-bamberg.de"
},
"render": [
"OEBRender"
]
}
"""
CONFIGS = {
"Biogames": {
"ActivityMapper": ACTIVITY,
"KML": KML,
"DauerEntfernungPunkteZeit": OEB,
},
"Geogames": {
"KML": KML_geo,
},
}
URLS = {
"KML": "/",
"DauerEntfernungPunkteZeit": "/",
"ActivityMapper": "#",
}
HOSTS = {
#"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
#"Biogames": "http://www.biodiv2go.de",
"Biogames": "http://biogames.kinf.wiai.uni-bamberg.de/",
"Geogames": "http://log_data/",
}
RESULT_HOST = "http://results.ma.potato.kinf.wiai.uni-bamberg.de/"

View File

@ -1,4 +0,0 @@
function validateSettings() {
alert(document.getElementById('safety').checked);
return false;
}

View File

@ -1,9 +0,0 @@
body {
/* background-color: limegreen;*/
}
#data{
display: none;
}
li{
list-style-type: none;
}

View File

@ -1,5 +0,0 @@
<!doctype html>
<title></title>
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='style.css') }}">
<script type="application/javascript" src="{{url_for('static', filename='script.js') }}"></script>
{% block body %} {% endblock %}

View File

@ -1,25 +0,0 @@
{% extends "base.html" %}
{% block body %}
<form action="/start" method="post">
<div id="data"> {{logs}}</div>
<ul>
{% for log in logs %}
<li>
<input type="checkbox" name="logs" value="{{log['@id']}}">
{{log.start_date}}: {{log.player_group_name}}
</li>
<!--{{log}}-->
{% endfor %}
</ul>
<!--input type="checkbox" id="safety"><label for="safety">Confirm selection</label-->
<input type="text" id="name" maxlength="128" placeholder="name" name="name"/><br>
<select name="config">
{% for config in configs %}
<option>{{config}}</option>
{% endfor %}
</select>
<input type="submit">
</form>
<a href="/results">show analysis progress/results</a>
{% endblock %}

View File

@ -1,14 +0,0 @@
{% extends "base.html" %}
{% block body %}
<form action="/login" method="post">
<select name="game">
{% for game in clients %}
<option>{{ game }}</option>
{% endfor %}
</select>
<input type="text" name="username" placeholder="username"/>
<input type="password" name="password" placeholder="passwort"/>
<input type="submit">
</form>
{% endblock %}

View File

@ -1,22 +0,0 @@
{% extends "base.html" %}
{% block body %}
<a href="/games">create new analysis</a>
<div id="results">
<ul>
{% for job in jobs %}
<li> {{jobs[job].status}}: "{{job}}":
<ul>
{% for r in jobs[job].results %}
<li><a href="{{jobs[job] | get_prefix}}{{r | get_name}}">{{r|get_name}} {{jobs[job].start}}</a></li>
{% endfor %}
</ul>
</li>
{% endfor %}
</ul>
</div>
{% endblock %}

View File

@ -1,120 +0,0 @@
import json
import logging
import typing
import uuid
import time
from clients.webclients import Client, CLIENTS
from flask import Flask, render_template, request, redirect, session
from tasks import tasks
from selector.config import CONFIGS, URLS, HOSTS, RESULT_HOST
app = Flask(__name__)
clients: typing.Dict[str, Client] = {}
log: logging.Logger = logging.getLogger(__name__)
@app.route("/")
def index():
return render_template("index.html", clients=CLIENTS)
@app.route("/login", methods=["POST"])
def login():
game = request.form["game"]
if not game in CLIENTS or game not in HOSTS:
return redirect("/?invalid_game")
client = CLIENTS[game](host=HOSTS[game], username=request.form['username'], password=request.form['password'])
if client.login():
session['logged_in'] = True
session['uid'] = str(uuid.uuid4())
session['username'] = request.form['username']
session['cookies'] = client.cookies
session['game'] = game
session['host'] = HOSTS[game]
clients[session['uid']] = client
return redirect("/results")
return redirect("/?fail")
@app.route("/results")
def results():
if not ('logged_in' in session and session['logged_in']):
return redirect("/")
if session['logged_in'] and not session['uid'] in clients:
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
status = tasks.redis.get(session['username'])
if status:
job_status = json.loads(status)
else:
job_status = {}
#for job in job_status:
# results = []
# for path in job_status[job]['results']:
# results.append(path.replace(tasks.DATA_PATH, RESULT_HOST))
# print(results) #TODO???
return render_template("results.html", jobs=job_status)
@app.route("/games")
def games():
if not ('logged_in' in session and session['logged_in']):
return redirect("/")
if session['logged_in'] and not session['uid'] in clients:
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
return render_template("games.html", logs=clients[session['uid']].list(), configs=CONFIGS[session['game']])
@app.route("/start", methods=['POST'])
def start():
print(str(request.form['logs']))
status = {
"status": "PENDING",
"submit": time.strftime("%c"),
"log_ids": request.form.getlist('logs'),
"config": request.form['config'],
}
params = {
"log_ids": request.form.getlist('logs'),
"config": CONFIGS[session['game']][request.form['config']],
"username": session['username'],
"cookies": session['cookies'],
"host": session['host'],
"clientName": session['game'],
"name": request.form['name'],
}
tasks.status_update(session['username'], request.form['name'], status)
tasks.analyze.delay(**params)
return redirect("/results")
@app.route("/status")
def status():
return json.dumps(json.loads(tasks.redis.get(session['username'])), indent=2)
@app.template_filter('get_url')
def get_url(path: str):
return path.replace(tasks.DATA_PATH, RESULT_HOST)
@app.template_filter('get_name')
def get_url(path: str):
return path.replace(tasks.DATA_PATH, "")
@app.template_filter('get_prefix')
def get_prefix(job):
print(job)
try:
return RESULT_HOST + URLS[job['config']]
except:
return RESULT_HOST + "#"
if __name__ == '__main__':
app.config.update({"SECRET_KEY": "59765798-2784-11e8-8d05-db4d6f6606c9"})
app.run(host="0.0.0.0", debug=True)

5
sources/__init__.py Normal file
View File

@ -0,0 +1,5 @@
from .biogames import Biogames
SOURCES = {
"Biogames": Biogames,
}

85
sources/biogames.py Normal file
View File

@ -0,0 +1,85 @@
import json
import logging
import typing
from tempfile import TemporaryDirectory
import os
from sources.source import Source
import shutil
import requests
log: logging.Logger = logging.getLogger(__name__)
class Biogames(Source):
def __init__(self):
self.headers: typing.Dict[str, str] = {'Accept': 'application/json'}
self.cookies: typing.Dict[str, str] = {}
self.id2link: typing.Dict[str, str] = {}
self.host: str = None
def connect(self, **kwargs):
for i in ['username', 'password', 'url', 'login_url', 'host']:
if not i in kwargs:
raise ValueError("missing value " + i)
csrf_request = requests.get(kwargs['url'])
if csrf_request.status_code != 200:
raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")")
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
log.info("obtained CSRF token (" + self.cookies['csrftoken'] + ")")
login_payload = {
'username': kwargs['username'],
'password': kwargs['password'],
'next': '',
'csrfmiddlewaretoken': 'csrftoken'
}
login = requests.post(kwargs['login_url'], data=json.dumps(login_payload), cookies=self.cookies)
if login.status_code != 200:
raise ConnectionError("Unable to authenticate!", login, login.text)
self.cookies['sessionid'] = login.cookies['sessionid']
log.info("obtained sessionid (" + self.cookies['sessionid'] + ")")
self.url = kwargs['url']
self.host = kwargs['host']
log.info("stored url (" + self.url + ")")
def list(self):
logs = self.get_json(self.url)
log.info(len(logs))
for i in logs:
self.id2link[i["id"]] = i["link"] # TODO
return logs
def get(self, ids: typing.Collection):
dir = TemporaryDirectory()
files = []
for i in ids:
url = self.id2link[i]
filename = os.path.join(dir.name, url.split("/")[-1])
file = self.download_file(url, filename)
if file:
files.append(file)
return dir
def download_file(self, url, filename):
with open(filename, "wb") as out:
try:
download = self._get(url)
shutil.copyfileobj(download.raw, out)
return filename
except Exception as e:
log.exception(e)
os.remove(filename)
def get_json(self, url):
http = self._get(url, stream=False)
if not http.ok:
raise ConnectionError("HTTP status is not OK", http.url)
return http.json()
def close(self):
pass
def _get(self, url, stream=True):
return requests.get(self.host + url, cookies=self.cookies, headers=self.headers, stream=stream)

18
sources/source.py Normal file
View File

@ -0,0 +1,18 @@
import typing
class Source:
def connect(self, **kwargs):
raise NotImplementedError
def list(self):
raise NotImplementedError
def get(self, ids: typing.Collection):
raise NotImplementedError
def get_json(self, url:str) -> dict:
raise NotImplementedError
def close(self):
raise NotImplementedError

45
static/heatmap.html Normal file
View File

@ -0,0 +1,45 @@
<!DOCTYPE html>
<html>
<head>
<title>Leaflet.heat demo</title>
<link rel="stylesheet" href="http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.css" />
<script src="http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.js"></script>
<style>
#map { width: 1024px; height: 768px; }
body { font: 16px/1.4 "Helvetica Neue", Arial, sans-serif; }
.ghbtns { position: relative; top: 4px; margin-left: 5px; }
a { color: #0077ff; }
</style>
</head>
<body>
<div id="map"></div>
<!-- <script src="../node_modules/simpleheat/simpleheat.js"></script>
<script src="../src/HeatLayer.js"></script> -->
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
<script src="./heat_data.js"></script>
<script>
var options = {maxZoom:22};
var map = L.map('map', options).setView(coords[0], 17);
L.control.scale().addTo(map);
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
}).addTo(map);
function addHeat(coords){
var heat = L.heatLayer(coords).addTo(map);
}
//coords = coords.map(function (p) { return [p[1], p[0], 0.05]; });
//var heat = L.heatLayer(coords).addTo(map);
addHeat(coords);
</script>
<!--script src="./coord.js"></script>
<script>
//addHeat(coords);
</script-->
</body>
</html>

View File

@ -0,0 +1,15 @@
<script
src="https://code.jquery.com/jquery-3.2.1.min.js"
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
crossorigin="anonymous"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
crossorigin=""></script>
<script src="my.js"></script>
<style>
.map { width: 512px; height: 256px; }
</style>

15
static/progress/log.html Normal file
View File

@ -0,0 +1,15 @@
<script
src="https://code.jquery.com/jquery-3.2.1.min.js"
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
crossorigin="anonymous"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
crossorigin=""></script>
<script src="log.js"></script>
<style>
.map { width: 512px; height: 512px; }
</style>

72
static/progress/log.js Normal file
View File

@ -0,0 +1,72 @@
$.getJSON("data/fooo", function (data) {
var list = $("<ul />");
var mapC = $("<div />", {class: "map", id: "map"});
mapC.appendTo("body");
var track = [];
var times = [];
$.each(data.spatials, function (i, elem) {
track.push([elem.coordinates[1], elem.coordinates[0]]);
times.push(i);
});
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
});
var map = L.map("map", {layers: [tiles]});
L.control.scale().addTo(map);
var layer = L.polyline(track).addTo(map);
map.fitBounds(layer.getBounds());
$.each(data, function (key, value) {
//console.log(key, value);
//key: instance_id, value: AnlysisResult
//value.result.instance: InstanceConfig_id
// console.log(key, value[0].result.store[0].timestamp);
/*$.each(value[0].result.store, function (index, entry) {
//console.log(entry);
var time = new Date(entry.timestamp);
var item = $("<li>", {html: entry.sequence + " @ " + time.toLocaleDateString() + " "+ time.toLocaleTimeString()});
var container = $("<p />");
if (entry.track.length > 0) {
var mapName = "map" + index;
//console.log(mapName, entry.track.length);
var mapContainer = $("<div />", {id: mapName, class: "map"});
var track = [];
$.each(entry.track, function (i, elem) {
track.push([elem.coordinates[1], elem.coordinates[0]]);
});
maps[mapName] = track;
mapContainer.appendTo(container);
}
$.each(entry.events, function (i, event) {
if ("image" in event) {
$("<img />", {src: event.image, height: 200}).appendTo(container);
}
});
container.appendTo(item);
item.appendTo(list);
});*/
});
list.appendTo("body");
var slider = $("<input />", {type: "range", start:0,end:100});
slider.appendTo("body");
/*});
$(window).on("load", function () {*/
// setTimeout(function () {
//console.log(maps);
/*$.each(maps, function (mapName, track) {
//console.log("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAa");
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
});
var map = L.map(mapName, {layers: [tiles]});
L.control.scale().addTo(map);
// console.log(mapName, track);
var layer = L.polyline(track, {color: "green"}).addTo(map);
map.fitBounds(layer.getBounds());
//console.log(layer)
//L.control.layers({"osm":tiles}, {layer]).addTo(map);
});*/
// }, 2000);
});

71
static/progress/my.js Normal file
View File

@ -0,0 +1,71 @@
$.getJSON("tmp3.json", function (data) {
var list = $("<ul />");
var maps = {};
$.each(data, function (index, entry) {
//key: instance_id, value: AnlysisResult
//value.result.instance: InstanceConfig_id
// console.log(key, value[0].result.store[0].timestamp);
//$.each(value[0].result.store, function (index, entry) {
//console.log(entry);
var time = new Date(entry.timestamp);
var item = $("<li>", {html: entry.sequence + " @ " + time.toLocaleDateString() + " "+ time.toLocaleTimeString()});
var container = $("<p />");
if (entry.track.length > 0) {
var mapName = "map" + index;
//console.log(mapName, entry.track.length);
var mapContainer = $("<div />", {id: mapName, class: "map"});
var track = [];
$.each(entry.track, function (i, elem) {
track.push([elem.coordinates[1], elem.coordinates[0]]);
});
maps[mapName] = track;
/* mapContainer.ready(function () {
var map = L.map(mapName, {maxZoom: 22});
L.control.scale().addTo(map);
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
}).addTo(map);
var track = [];
$.each(entry.track, function (i, elem) {
track.push([elem.coordinates[1], elem.coordinates[0]]);
});
var layer = L.polyline(track, {color: "green"});
console.log(track);
L.control.layers(null, [layer]).addTo(map);
});*/
mapContainer.appendTo(container);
}
$.each(entry.events, function (i, event) {
if ("image" in event) {
$("<img />", {src: event.image, height: 200}).appendTo(container);
}
});
container.appendTo(item);
item.appendTo(list);
//});
});
list.appendTo("body");
var slider = $("<input />", {type: "range" })
/*});
$(window).on("load", function () {*/
// setTimeout(function () {
//console.log(maps);
$.each(maps, function (mapName, track) {
//console.log("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAa");
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
});
var map = L.map(mapName, {layers: [tiles]});
L.control.scale().addTo(map);
// console.log(mapName, track);
var layer = L.polyline(track, {color: "green"}).addTo(map);
map.fitBounds(layer.getBounds());
//console.log(layer)
//L.control.layers({"osm":tiles}, {layer]).addTo(map);
});
// }, 2000);
});

68
static/trackmap.html Normal file
View File

@ -0,0 +1,68 @@
<!DOCTYPE html>
<html>
<head>
<title>Leaflet.heat demo</title>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
crossorigin=""></script>
<style>
#map { width: 1024px; height: 768px; }
body { font: 16px/1.4 "Helvetica Neue", Arial, sans-serif; }
.ghbtns { position: relative; top: 4px; margin-left: 5px; }
a { color: #0077ff; }
</style>
</head>
<body>
<div id="map"></div>
<!-- <script src="../node_modules/simpleheat/simpleheat.js"></script>
<script src="../src/HeatLayer.js"></script> -->
<script src="http://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
<script src="./track_data.js"></script>
<script>
var options = {maxZoom:22};
//var map = L.map('map', options).setView([49.90299388, 10.87004638], 17);
var map = L.map('map', options);
L.control.scale().addTo(map);
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
}).addTo(map);
function addHeat(coords){
//var transformed = coords.map(function (p) { return [p[1], p[0], 0.25]; });
var heat = L.heatLayer(coords).addTo(map);
}
var layers=[];
function addTrack(tracks, i){
var elem = L.polyline(tracks[i], {color:"green"});
// layers[i] =L.LayerGroup([elem]);
layers[i] = elem;
map.fitBounds(elem.getBounds());
layers[i].on('mouseover', function (e) {
e.target.setStyle({'color':'red'});
});
layers[i].on('mouseout', function (e) {
e.target.setStyle({'color':'green'});
});
}
//coords = coords.map(function (p) { return [p[1], p[0], 0.05]; });
//var heat = L.heatLayer(coords).addTo(map);
//addHeat(coords);
for (var i in tracks) {
addTrack(tracks, i);
}
L.control.layers(null, layers).addTo(map);
</script>
<!--script src="./heat_data.js"></script>
<script>
addHeat(coords);
</script-->
</body>
</html>

View File

@ -1,65 +0,0 @@
from .tasks import analyze
__log__ = ["/app/data/008cad400ab848f729913d034a.zip"]
__config__ = """{
"logFormat": "zip",
"entryType": "@class",
"spatials": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
],
"actions": [
"...QuestionAnswerEvent",
"...SimuAnswerEvent"
],
"boards": [
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
],
"analyzers": {
"analysis.analyzers": [
"BiogamesCategorizer",
"LocationAnalyzer"
]
},
"sequences": {
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
}
},
"custom": {
"simulation_rounds": [
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
],
"simu_data": [
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
],
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
"instance_id": "instance_id",
"instance_config_id": "config.@id",
"sequences2": {
"id_field": "sequence_id",
"start": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "START"
},
"end": {
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
"action": "PAUSE"
}
},
"coordinates": "location.coordinates",
"metadata": {
"timestamp": "timestamp",
"gamefield": "instance_id",
"user": "player_group_name"
}
},
"source": {
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
}
}"""

View File

@ -1,95 +0,0 @@
import json
import logging
import shutil
import uuid
import os.path
import os
import redis as redis_lib
import time
from celery import Celery
from analysis import log_analyzer as la
from analysis.analyzers import KMLRender, ActivityMapperRender
from analysis.analyzers.render.biogames import OEBRender
from clients.webclients import CLIENTS
FLASK_DB = 1
REDIS_HOST = "redis"
DATA_PATH = "/data/results/"
RENDERERS = { # TODO
"KMLRender": KMLRender,
"ActivityMapper": ActivityMapperRender,
"OEBRender": OEBRender
}
app = Celery('tasks', backend='redis://redis', broker='redis://redis')
redis = redis_lib.StrictRedis(host=REDIS_HOST, db=FLASK_DB)
log: logging.Logger = logging.getLogger(__name__)
def update_status(username, name, state, **kwargs):
status = json.loads(redis.get(username))
status[name][state[0]] = time.strftime("%c")
status[name]['status'] = state[1]
for i in kwargs:
status[name][i] = kwargs[i]
redis.set(username, json.dumps(status))
@app.task
def analyze(config, log_ids, **kwargs):
update_status(kwargs['username'], kwargs['name'], ('load', 'LOADING'))
try:
log.info("start analysis")
client = CLIENTS[kwargs['clientName']](host=kwargs['host'], **kwargs['cookies'])
logs = client.list()
id_urls = {str(x['@id']): x['file_url'] for x in logs}
urls = [id_urls[i] for i in log_ids]
tmpdir = client.download_files(urls)
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
uid = str(uuid.uuid4())
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'), uid=uid)
results = []
settings = la.parse_settings(config)
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
os.mkdir(os.path.join(DATA_PATH, uid))
render = RENDERERS[settings.render[0]]() # TODO
files = []
if settings.render[0] == "OEBRender":
files.append(render.render_store(store))
else:
for category in store.get_categories():
data = store.get_category(category)
print(category, type(category))
files = render.render(data, name=category[1])
log.error(files)
for file in files:
try:
head, tail = os.path.split(file)
target = os.path.join(DATA_PATH, uid, tail)
log.error(target)
log.error(shutil.move(file, target))
results.append(target)
except FileNotFoundError as e:
log.exception(e)
tmpdir.cleanup()
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)
except Exception as e:
log.exception(e)
update_status(kwargs['username'], kwargs['name'], ('abort', 'ERROR'), exception=str(e))
def status_update(key, status_key, status):
record = redis.get(key)
if not record:
redis.set(key, json.dumps({status_key: status}))
else:
data = json.loads(record)
data[status_key] = status
redis.set(key, json.dumps(data))
redis.save()

View File

@ -1,41 +0,0 @@
from analysis import log_analyzer as la
settings = la.load_settings("neocart.json")
client = settings.source
logs = client.list()
id_urls = {str(x['@id']): x['file_url'] for x in logs}
log_ids=['20351/playerid1430317168972.gpx','20351/playerid1430317188358.gpx']
urls = [id_urls[i] for i in log_ids]
tmpdir = client.download_files(urls)
import os
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
import json
print(json.dumps(store.serializable(), indent=1))
from analysis.analyzers import KMLRender, ActivityMapperRender
RENDERERS = { # TODO
"KMLRender": KMLRender,
"ActivityMapper": ActivityMapperRender,
}
render = RENDERERS[settings.render[0]]()
files = render.render(store.get_all())
DATA_PATH = "/app/data/results/"
import uuid
uid = str(uuid.uuid4())
results = []
os.mkdir(os.path.join(DATA_PATH, uid))
import shutil
for file in files:
try:
head, tail = os.path.split(file)
target = os.path.join(DATA_PATH, uid, tail)
shutil.move(file, target)
results.append(target)
except FileNotFoundError as e:
log.exception(e)
tmpdir.cleanup()

View File

@ -1,30 +1,31 @@
import logging
import os
from analysis.util import json_path
from util import json_path
logger = logging.getLogger(__name__)
#def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"): #FIXME: I was changed
def download_board(board_id, instance_config_id, sequence_id, source, path="activity/data/results/"):
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
abs_path = os.path.join(path, local_file)
if os.path.exists(abs_path):
def download_board(board_id, instance_config_id, sequence_id, source):
local_file = "static/progress/images/{config_id}/{sequence_id}/{board_id}".format(
config_id=instance_config_id,
sequence_id=sequence_id,
board_id=board_id)
if os.path.exists(local_file):
return local_file
url = "/game2/editor/config/{config_id}/sequence/{sequence_id}/board/{board_id}/".format(
config_id=instance_config_id,
sequence_id=sequence_id,
board_id=board_id
)
board = source.get(url)
board = source._get(url)
if not board.ok:
raise ConnectionError(url, board, board.status_code)
raise ConnectionError()
data = board.json()
preview_url = json_path(data, "preview_url.medium")
logger.debug(preview_url)
os.makedirs(abs_path[:-len(board_id)], exist_ok=True)
source.download_file(preview_url, abs_path)
os.makedirs(local_file[:-len(board_id)], exist_ok=True)
source.download_file(preview_url, local_file)
return local_file
@ -53,6 +54,7 @@ def get_board_data(source, instance_id, sequence_id, board_id):
return {"class": "error"}
result = {
"class": instance_data["@class"],
"id": instance_data["@id"]
}
for i in ["image", "audio", "video"]:
key = i + "_file"
@ -67,11 +69,10 @@ def get_json(source, url):
if url in cache:
return cache[url]
try:
data = source.get(url).json()
data = source.get_json(url)
except Exception as e:
print("exception", e, e.args)
logger.exception(e)
print("exception", e, e.args) # TODO: logging
data = None
cache[url] = data
return data

View File

@ -1,21 +1,14 @@
def json_path(obj: dict, key: str):# TODO: test me!
def json_path(obj: dict, key: str):
"""Query a nested dict with a dot-separated path"""
#if type(obj) is list and not "." in key:
# return obj[int(key)]
if type(obj) not in (dict, list):
raise ValueError("obj is no object (no list, too)")
if not type(obj) is dict:
return None
if "." not in key:
if key not in obj:
return KeyError("key not in object", key)
return None
return obj[key]
child_key = key.split(".")
if child_key[0] not in obj:
try:
index = int(child_key[0])
return json_path(obj[index], ".".join(child_key[1:]))
except:
raise KeyError("key not in object", key)
raise KeyError("key not in object", key)
return None
return json_path(obj[child_key[0]], ".".join(child_key[1:]))

View File

@ -97,108 +97,3 @@ CONFIG_NAMES = {
'fe43a0f0-3dea-11e6-a065-00199963ac6e': u'Vorlagen',
'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771': u'Bad Urach'
}
KML_PATTERN = """<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2">
<Document>
<Placemark>
<gx:MultiTrack>
<gx:Track>
{when}
{coordinates}
</gx:Track>
</gx:MultiTrack>
{coordinates}
</Placemark>
</Document>
</kml>
"""
GEOJSON_PATTERN = """{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {properties},
"geometry": {
"type": "LineString",
"coordinates": {coordinates}
}
}
]
}
"""# TODO: fix me
GEOJSON_COORDINATES = "[{lon},{lat}]"
TASK_NAMES = {
"16fc3117-61db-4f50-b84f-81de6310206f_13127209-103c-4aed-9cce-b8a2cd9f7663_32e93082-1aa7-11e5-9827-74d43509b03a": "Lebensraum",
"16fc3117-61db-4f50-b84f-81de6310206f_13127209-103c-4aed-9cce-b8a2cd9f7663_3a27040f-1a9c-11e5-9827-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075_096093b0-d1ca-49f3-8d51-f32fa8874db5": "Biosphärenreservat",
"16fc3117-61db-4f50-b84f-81de6310206f_2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075_b10951a5-1a8a-11e5-b1a2-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_41abfe17-aef3-41ee-b1e5-eedc8208680f_255d9c6d-1aa0-11e5-9827-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_41abfe17-aef3-41ee-b1e5-eedc8208680f_e0d2dee8-1a9f-11e5-9827-74d43509b03a": "Fellbestimmung",
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_10c3329f-7a88-4aa4-9567-14c811d2a6bc": "Lockstock-Code",
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_5732fe6c-1a9e-11e5-9827-74d43509b03a": "Lockstock-Nachweis",
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_e4bbaf4c-1a9d-11e5-9827-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_e08ffe7c-b24d-4fcd-9355-8a459a2c07b7_597c651a-1a8c-11e5-b1a2-74d43509b03a": "Simulation",
"16fc3117-61db-4f50-b84f-81de6310206f_e08ffe7c-b24d-4fcd-9355-8a459a2c07b7_da49b7be-bc13-11e4-a0de-6364e0bfe983": "Holzbedarf",
"17d401a9-de21-49a2-95bc-7dafa53dda64_027dcc39-d642-4900-91c4-abbd9c317cb8_610e91d9-0a1c-4a38-9399-deb0ff8dcb05": "Fellbestimmung",
"17d401a9-de21-49a2-95bc-7dafa53dda64_027dcc39-d642-4900-91c4-abbd9c317cb8_6a03c92d-9e23-4c67-9e76-6a5e28224371": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_25a3a482-a119-4db4-8c4e-235ea9d8dab7_90a01be2-dc8a-4733-b302-de5554969453": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_25a3a482-a119-4db4-8c4e-235ea9d8dab7_914778bc-f7e9-4327-a78b-71b6fa8762b1": "Biosphärenreservat",
"17d401a9-de21-49a2-95bc-7dafa53dda64_7a8ff4c4-7976-45e0-8ef5-cb386d536cb3_3ae4452e-ed67-4687-849d-e9341fca2900": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_7a8ff4c4-7976-45e0-8ef5-cb386d536cb3_f6f5c087-487c-43d8-9409-648a59684a09": "Lebensraum",
"17d401a9-de21-49a2-95bc-7dafa53dda64_97b86d4e-4724-4431-9c94-d2f57696fe2e_26e6558e-8069-45a1-961d-ab1ec9c5aa83": "Holzbedarf",
"17d401a9-de21-49a2-95bc-7dafa53dda64_97b86d4e-4724-4431-9c94-d2f57696fe2e_970ff4e0-16bd-4380-8e69-91a324a59523": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_3366d6a3-684f-423a-bd7f-5c0107d4b972": "Simulation",
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_a7188b81-e25b-456d-9742-5f11adb7d461": "Lockstock-Nachweis",
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_d29537b9-de0b-42c2-b3da-27a3dbc57988": "Lockstock-Code",
}
CACHE_NAMES = {
"043ab9fe-64e8-4e76-8bf6-8cc9db35eba1": "1a_Infozentrum",
"37f326ed-9732-44b5-9ba7-e666d31cc4e7": "2a_Holzlager",
"bf96eee0-4c92-43d8-aced-a95e4eedae9f": "2b_Holzstapel",
"a5723715-7ba7-4431-9d0b-c91c351a3ccc": "3a_Lebensraum_Hecke",
"dd68ba57-a081-46be-9a76-e49cd5209383": "3b_Lebensraum_Hecke",
"bb21628e-d039-4c16-9fe1-68de7f448fa4": "4a_Lockstock_finden",
"8291c397-b3a9-4564-9365-bd660ab1abcc": "4b_Lockstock_finden",
"e92d8175-a65f-40de-ae76-3cbde55dfd4d": "5a_Straße",
"30451de3-2d5d-44c7-84b2-2abddbc8adcc": "5b_Straße",
"22fcc44c-64d4-4f84-ad05-8107542a04d2": "6a_Jaegerstand",
"1740e151-cd75-45c0-a06e-d724b9d69729": "6a_Jaegerstand",
"6d97d48a-7ac1-4e3a-b797-c2b4aa681a10": "5a_Straße",
"98e60f51-c4d5-4833-bc3b-2820e1bdd09d": "4b_Lockstock_finden",
"61d6dc12-11b5-4a9c-b0d8-7a38a29d772a": "5b_Straße",
"f4762feb-addb-4e82-b923-78f8c7b6aff9": "2b_Holzstapel",
"25b2cc3b-f8fd-4a21-9350-d175d837f6b6": "3a_Lebensraum_Hecke",
"5ba5046f-c956-4c21-aea5-a0a6055ed7e4": "1a_Infozentrum",
"fb60b94b-3f82-4ba9-98ac-f52105bd26f1": "2a_Holzlager",
"12b9584a-14b4-40c6-aa13-9fb11062e917": "4a_Lockstock_finden",
"19908306-8c70-4861-bec8-49e849e94722": "3b_Lebensraum_Hecke",
"None": "initial",
"only": "",
}
SEQUENCE_NAMES = {
"89b769f8-2c98-4f55-b741-1dfa022c3286": "1_Videoerklaerung",
"286cab41-6a81-4dfe-9bef-e86923ca8c97": "A_Einleitung",
"2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075": "B",
"25a3a482-a119-4db4-8c4e-235ea9d8dab7": "B",
"97b86d4e-4724-4431-9c94-d2f57696fe2e": "C_mit_Dilemma",
"e08ffe7c-b24d-4fcd-9355-8a459a2c07b7": "C_mit_Dilemma",
"5f644fb4-5cc7-43a2-afb4-191dce80c875": "D_Dilemmasequenz",
"847ab5ff-7c98-4cdc-bc9e-bb619a0a98bb": "D_Dilemmasequenz",
"13127209-103c-4aed-9cce-b8a2cd9f7663": "E",
"7a8ff4c4-7976-45e0-8ef5-cb386d536cb3": "E",
"a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05": "F",
"b9571a6b-c537-4a92-8618-2d73415dec87": "F",
"027dcc39-d642-4900-91c4-abbd9c317cb8": "G",
"41abfe17-aef3-41ee-b1e5-eedc8208680f": "G",
"be59a20a-69ce-471b-8f70-76ce200e32c9": "H_Abschlusserzaehlung",
"d4073563-da42-4ad2-9a9b-20ef29da6309": "H_Abschlusserzaehlung",
"54e03082-1a6b-11e5-aa26-00199963ac6e": "seq_score",
"95d82cd3-5bda-465a-8757-7179cdafe590": "seq_score",
}