Compare commits
7 Commits
e254667256
...
da5df266c3
| Author | SHA1 | Date |
|---|---|---|
|
|
da5df266c3 | |
|
|
bba8c0719c | |
|
|
412239515d | |
|
|
01e2433b8b | |
|
|
7d93b5f6fd | |
|
|
d9fa60dfe5 | |
|
|
2c8eea0e6f |
|
|
@ -0,0 +1,8 @@
|
||||||
|
FROM alpine:edge
|
||||||
|
|
||||||
|
ADD ["requirements.txt", "/"]
|
||||||
|
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories && \
|
||||||
|
apk add --update --no-cache libpng libpng-dev freetype freetype-dev g++ python3 python3-dev libstdc++ openblas-dev && \
|
||||||
|
pip3 --no-cache-dir install -r requirements.txt && \
|
||||||
|
apk del libpng-dev freetype-dev g++ python3-dev openblas-dev && \
|
||||||
|
rm requirements.txt
|
||||||
|
|
@ -12,7 +12,7 @@ from .render import Render
|
||||||
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
||||||
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
|
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
|
||||||
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
|
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
|
||||||
LogEntryCountCSV
|
LogEntryCountCSV, KMLRender
|
||||||
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
||||||
LocomotionActionRatioRender
|
LocomotionActionRatioRender
|
||||||
|
|
||||||
|
|
@ -41,6 +41,7 @@ __MAPPING__ = {
|
||||||
LocationAnalyzer: [
|
LocationAnalyzer: [
|
||||||
TrackRender,
|
TrackRender,
|
||||||
HeatMapRender,
|
HeatMapRender,
|
||||||
|
KMLRender,
|
||||||
],
|
],
|
||||||
ActivityMapper: [
|
ActivityMapper: [
|
||||||
ActivityMapperRender
|
ActivityMapperRender
|
||||||
|
|
@ -2,7 +2,7 @@ import logging
|
||||||
from collections import KeysView
|
from collections import KeysView
|
||||||
from typing import Type, Sized, Collection
|
from typing import Type, Sized, Collection
|
||||||
|
|
||||||
from analyzers.settings import LogSettings
|
from analysis.analyzers.settings import LogSettings
|
||||||
|
|
||||||
log: logging.Logger = logging.getLogger(__name__)
|
log: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -3,8 +3,8 @@ from collections import defaultdict, namedtuple, OrderedDict
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
from typing import List, NamedTuple
|
from typing import List, NamedTuple
|
||||||
|
|
||||||
from util import json_path, combinate
|
from analysis.util import json_path, combinate
|
||||||
from util.download import download_board, get_board_data
|
from analysis.util.download import download_board, get_board_data
|
||||||
from . import Result, LogSettings, Analyzer, ResultStore
|
from . import Result, LogSettings, Analyzer, ResultStore
|
||||||
from .default import CategorizerStub, Store
|
from .default import CategorizerStub, Store
|
||||||
|
|
||||||
|
|
@ -218,7 +218,7 @@ class ActivityMapper(Analyzer):
|
||||||
def classify_entry(self, entry):
|
def classify_entry(self, entry):
|
||||||
entry_type = entry[self.settings.type_field]
|
entry_type = entry[self.settings.type_field]
|
||||||
if self.filters.end(entry):
|
if self.filters.end(entry):
|
||||||
data = {"extra_data": {"activity_type": "map"},"coordinate": self.new_coordinate()}
|
data = {"extra_data": {"activity_type": "map"}, "coordinate": self.new_coordinate()}
|
||||||
data.update(entry)
|
data.update(entry)
|
||||||
self.timeline.append(data)
|
self.timeline.append(data)
|
||||||
return "map"
|
return "map"
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import logging
|
import logging
|
||||||
from collections import defaultdict, OrderedDict
|
from collections import defaultdict, OrderedDict
|
||||||
|
|
||||||
from util import json_path
|
from analysis.util import json_path
|
||||||
from . import Result, LogSettings, Analyzer, ResultStore
|
from . import Result, LogSettings, Analyzer, ResultStore
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -16,9 +16,9 @@ class LocationAnalyzer(Analyzer):
|
||||||
super().__init__(settings)
|
super().__init__(settings)
|
||||||
self.entries = []
|
self.entries = []
|
||||||
|
|
||||||
def result(self, store: ResultStore) -> None:
|
def result(self, store: ResultStore, **kwargs) -> None:
|
||||||
self.log.debug(len(self.entries))
|
self.log.debug(len(self.entries))
|
||||||
store.add(Result(type(self), list(self.entries)))
|
store.add(Result(type(self), list(self.entries), name=kwargs['name']))
|
||||||
|
|
||||||
def process(self, entry: dict) -> bool:
|
def process(self, entry: dict) -> bool:
|
||||||
if entry[self.settings.type_field] in self.settings.spatials:
|
if entry[self.settings.type_field] in self.settings.spatials:
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import util
|
from analysis import util
|
||||||
from . import Analyzer, LogSettings, Result, ResultStore
|
from . import Analyzer, LogSettings, Result, ResultStore
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -5,7 +5,7 @@ from .. import Result
|
||||||
class Render:
|
class Render:
|
||||||
result_types = []
|
result_types = []
|
||||||
|
|
||||||
def render(self, results: List[Result], name=None):
|
def render(self, results: List[Result], name=None) -> [str]:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def filter(self, results: List[Result]):
|
def filter(self, results: List[Result]):
|
||||||
|
|
@ -9,8 +9,8 @@ from scipy.interpolate import interp1d
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
from analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
|
from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
|
||||||
from util.meta_temp import CONFIG_NAMES
|
from analysis.util.meta_temp import CONFIG_NAMES
|
||||||
from . import Render
|
from . import Render
|
||||||
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
|
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
|
||||||
|
|
||||||
|
|
@ -135,16 +135,16 @@ class BoardDurationHistRender(Render):
|
||||||
class BoardDurationBoxRender(Render):
|
class BoardDurationBoxRender(Render):
|
||||||
result_types = [BoardDurationAnalyzer]
|
result_types = [BoardDurationAnalyzer]
|
||||||
|
|
||||||
def render(self, results: List[Result], name=None):
|
def render(self, results: List[Result], name=None) -> [str]:
|
||||||
data = defaultdict(list)
|
data = defaultdict(list)
|
||||||
for result in self.filter(results):
|
for result in self.filter(results):
|
||||||
get = result.get()
|
for board in result.get():
|
||||||
for board in get:
|
|
||||||
duration = board['active'] if 'active' in board else 0
|
duration = board['active'] if 'active' in board else 0
|
||||||
data[board['id']].append(duration)
|
data[board['id']].append(duration)
|
||||||
data_tuples = [(key, data[key]) for key in sorted(data)]
|
data_tuples = [(key, data[key]) for key in sorted(data)]
|
||||||
data_tuples = sorted(data_tuples, key=lambda x: sum(x[1]))
|
data_tuples = sorted(data_tuples, key=lambda x: sum(x[1]))
|
||||||
plot(data_tuples)
|
plot(data_tuples, name=name)
|
||||||
|
return [name]
|
||||||
|
|
||||||
|
|
||||||
class ActivityMapperRender(Render):
|
class ActivityMapperRender(Render):
|
||||||
|
|
@ -152,12 +152,14 @@ class ActivityMapperRender(Render):
|
||||||
|
|
||||||
def render(self, results: List[Result], name=None):
|
def render(self, results: List[Result], name=None):
|
||||||
print(os.getcwd())
|
print(os.getcwd())
|
||||||
|
files = []
|
||||||
for result in self.filter(results):
|
for result in self.filter(results):
|
||||||
data = result.get()
|
data = result.get()
|
||||||
with open(os.path.join("static", "progress", "data", data['instance'] + "_" + str(name) + ".json"),
|
path = os.path.join("/tmp", data['instance'] + "_" + str(name) + ".json")
|
||||||
"w") as out:
|
with open(path, "w") as out:
|
||||||
json.dump(data, out, indent=1)
|
json.dump(data, out, indent=1)
|
||||||
return "ok"
|
files.append(path)
|
||||||
|
return files
|
||||||
|
|
||||||
|
|
||||||
class StoreRender(Render):
|
class StoreRender(Render):
|
||||||
|
|
@ -186,7 +188,7 @@ class SimulationOrderRender(Render):
|
||||||
|
|
||||||
class SimulationGroupRender(Render):
|
class SimulationGroupRender(Render):
|
||||||
def render(self, results: List[Result], name=None):
|
def render(self, results: List[Result], name=None):
|
||||||
#data = [r.get() for r in self.filter(results)]
|
# data = [r.get() for r in self.filter(results)]
|
||||||
data = []
|
data = []
|
||||||
for r in self.filter(results):
|
for r in self.filter(results):
|
||||||
raw = r.get()
|
raw = r.get()
|
||||||
|
|
@ -2,11 +2,13 @@ import json
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
import datetime
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
from analyzers import LogEntryCountAnalyzer
|
from analysis.analyzers import LogEntryCountAnalyzer
|
||||||
|
from analysis.util.meta_temp import KML_PATTERN
|
||||||
from . import Render, Result
|
from . import Render, Result
|
||||||
from .. import LocationAnalyzer
|
from analysis.analyzers import LocationAnalyzer
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -39,6 +41,34 @@ class TrackRender(Render):
|
||||||
return dumps
|
return dumps
|
||||||
|
|
||||||
|
|
||||||
|
def format_time(ts):
|
||||||
|
return datetime.datetime.fromtimestamp(ts/1000).strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
|
||||||
|
|
||||||
|
class KMLRender(Render):
|
||||||
|
result_types = [LocationAnalyzer]
|
||||||
|
|
||||||
|
def render(self, results: List[Result], name=None):
|
||||||
|
files = []
|
||||||
|
for result in self.filter(results):
|
||||||
|
times = ["<when>{time}</when>".format(time=format_time(entry["timestamp"])) for entry in result.get()]
|
||||||
|
coords = [
|
||||||
|
"<gx:coord>{long} {lat} 0.0</gx:coord>"
|
||||||
|
.format(
|
||||||
|
lat=entry['location']['coordinates'][1],
|
||||||
|
long=entry['location']['coordinates'][0])
|
||||||
|
for entry in result.get()
|
||||||
|
]
|
||||||
|
filename = str(result.name)+".kml"
|
||||||
|
print(filename)
|
||||||
|
with open(filename, "w") as out:
|
||||||
|
out.write(KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
|
||||||
|
files.append(filename)
|
||||||
|
return files
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class HeatMapRender(TrackRender):
|
class HeatMapRender(TrackRender):
|
||||||
weight = 0.01
|
weight = 0.01
|
||||||
|
|
||||||
|
|
@ -2,8 +2,8 @@ import json
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
import analyzers
|
import analysis.analyzers
|
||||||
from util.geo import calc_distance
|
from analysis.util.geo import calc_distance
|
||||||
|
|
||||||
|
|
||||||
def time_distribution(store):
|
def time_distribution(store):
|
||||||
|
|
@ -97,7 +97,7 @@ def time_distribution(store):
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from util.meta_temp import CONFIG_NAMES
|
from analysis.util.meta_temp import CONFIG_NAMES
|
||||||
|
|
||||||
keys = [
|
keys = [
|
||||||
"simu",
|
"simu",
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from sources import SOURCES
|
from clients.webclients import CLIENTS
|
||||||
|
|
||||||
|
|
||||||
def load_source(config):
|
def load_source(config):
|
||||||
if config["type"] in SOURCES:
|
if config["type"] in CLIENTS:
|
||||||
source = SOURCES[config["type"]]()
|
source = CLIENTS[config["type"]](**config)
|
||||||
source.connect(**config)
|
source.login()
|
||||||
return source
|
return source
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -28,13 +28,15 @@ class LogSettings:
|
||||||
self.boards = json_dict['boards']
|
self.boards = json_dict['boards']
|
||||||
for mod in json_dict['analyzers']:
|
for mod in json_dict['analyzers']:
|
||||||
for name in json_dict['analyzers'][mod]:
|
for name in json_dict['analyzers'][mod]:
|
||||||
print(mod, name)
|
print(mod, name, getattr(sys.modules[mod], name))
|
||||||
self.analyzers.append(getattr(sys.modules[mod], name))
|
self.analyzers.append(getattr(sys.modules[mod], name))
|
||||||
self.sequences = json_dict['sequences']
|
self.sequences = json_dict['sequences']
|
||||||
if 'custom' in json_dict:
|
if 'custom' in json_dict:
|
||||||
self.custom = json_dict['custom']
|
self.custom = json_dict['custom']
|
||||||
if "source" in json_dict:
|
if "source" in json_dict:
|
||||||
self.source = load_source(json_dict['source'])
|
self.source = load_source(json_dict['source'])
|
||||||
|
if "render" in json_dict:
|
||||||
|
self.render = json_dict['render']
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str({
|
return str({
|
||||||
|
|
@ -51,3 +53,7 @@ class LogSettings:
|
||||||
|
|
||||||
def load_settings(file: str) -> LogSettings:
|
def load_settings(file: str) -> LogSettings:
|
||||||
return LogSettings(json.load(open(file)))
|
return LogSettings(json.load(open(file)))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_settings(config: str) -> LogSettings:
|
||||||
|
return LogSettings(json.loads(config))
|
||||||
|
|
@ -2,16 +2,16 @@ import json
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import analyzers
|
from analysis import analyzers
|
||||||
from analyzers import get_renderer, render
|
from analysis.analyzers import get_renderer, render
|
||||||
from analyzers.analyzer import ResultStore
|
from analysis.analyzers.analyzer import ResultStore
|
||||||
from analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
|
from analysis.analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
|
||||||
from analyzers.render import wip
|
from analysis.analyzers.render import wip
|
||||||
from analyzers.render.default import LogEntryCountCSV
|
from analysis.analyzers.render.default import LogEntryCountCSV, KMLRender
|
||||||
from analyzers.render.wip import time_distribution, plot_data
|
from analysis.analyzers.render.wip import time_distribution, plot_data
|
||||||
from analyzers.settings import LogSettings, load_settings
|
from analysis.analyzers.settings import LogSettings, load_settings, parse_settings
|
||||||
from loaders import LOADERS
|
from analysis.loaders import LOADERS
|
||||||
from util.processing import grep, run_analysis, src_file
|
from analysis.util.processing import grep, run_analysis, src_file
|
||||||
|
|
||||||
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
|
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
|
||||||
log: logging.Logger = logging.getLogger(__name__)
|
log: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
@ -26,36 +26,39 @@ def urach_logs(log_ids, settings):
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
settings: LogSettings = load_settings("biogames2.json")
|
settings = {}
|
||||||
log_ids_urach: List[str] = urach_logs([
|
log_ids_gf = []
|
||||||
# "34fecf49dbaca3401d745fb467",
|
# settings: LogSettings = load_settings("biogames2.json")
|
||||||
# "44ea194de594cd8d63ac0314be",
|
# log_ids_urach: List[str] = urach_logs([
|
||||||
# "57c444470dbf88605433ca935c",
|
# # "34fecf49dbaca3401d745fb467",
|
||||||
# "78e0c545b594e82edfad55bd7f",
|
# # "44ea194de594cd8d63ac0314be",
|
||||||
# "91abfd4b31a5562b1c66be37d9",
|
# # "57c444470dbf88605433ca935c",
|
||||||
|
# # "78e0c545b594e82edfad55bd7f",
|
||||||
|
# # "91abfd4b31a5562b1c66be37d9",
|
||||||
|
# # "597b704fe9ace475316c345903",
|
||||||
|
# # "e01a684aa29dff9ddd9705edf8",
|
||||||
# "597b704fe9ace475316c345903",
|
# "597b704fe9ace475316c345903",
|
||||||
# "e01a684aa29dff9ddd9705edf8",
|
# "e01a684aa29dff9ddd9705edf8",
|
||||||
"597b704fe9ace475316c345903",
|
|
||||||
"e01a684aa29dff9ddd9705edf8",
|
|
||||||
"fbf9d64ae0bdad0de7efa3eec6",
|
|
||||||
# "fbf9d64ae0bdad0de7efa3eec6",
|
# "fbf9d64ae0bdad0de7efa3eec6",
|
||||||
"fe1331481f85560681f86827ec", # urach
|
# # "fbf9d64ae0bdad0de7efa3eec6",
|
||||||
# "fe1331481f85560681f86827ec"]
|
# "fe1331481f85560681f86827ec", # urach
|
||||||
"fec57041458e6cef98652df625",
|
# # "fe1331481f85560681f86827ec"]
|
||||||
]
|
# "fec57041458e6cef98652df625",
|
||||||
, settings)
|
# ]
|
||||||
log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
|
# , settings)
|
||||||
"a192ff420b8bdd899fd28573e2", # eichstätt
|
# log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
|
||||||
"3a3d994c04b1b1d87168422309", # stadtökologie
|
# "a192ff420b8bdd899fd28573e2", # eichstätt
|
||||||
"fe1331481f85560681f86827ec", # urach
|
# "3a3d994c04b1b1d87168422309", # stadtökologie
|
||||||
"96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
|
# "fe1331481f85560681f86827ec", # urach
|
||||||
],
|
# "96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
|
||||||
"/home/clemens/git/ma/test/src",
|
# ],
|
||||||
settings)
|
# "/home/clemens/git/ma/test/src",
|
||||||
log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
|
# settings)
|
||||||
|
# log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
|
||||||
|
|
||||||
#store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
|
if False:
|
||||||
#store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
|
||||||
|
# store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
|
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
|
||||||
|
|
@ -69,7 +72,7 @@ if __name__ == '__main__':
|
||||||
# render(analyzers.ProgressAnalyzer, store.get_all())
|
# render(analyzers.ProgressAnalyzer, store.get_all())
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
from analyzers.postprocessing import graph
|
from analysis.analyzers.postprocessing import graph
|
||||||
|
|
||||||
g = graph.Cache(settings)
|
g = graph.Cache(settings)
|
||||||
g.run(store)
|
g.run(store)
|
||||||
|
|
@ -85,7 +88,7 @@ if __name__ == '__main__':
|
||||||
if False:
|
if False:
|
||||||
time_distribution(store)
|
time_distribution(store)
|
||||||
|
|
||||||
if True:
|
if False:
|
||||||
# spatial_data = get_data_distance(store,relative_values=False)
|
# spatial_data = get_data_distance(store,relative_values=False)
|
||||||
# temporal_data = get_data(store,relative_values=False)
|
# temporal_data = get_data(store,relative_values=False)
|
||||||
# spatial_data_rel = get_data_distance(store,relative_values=True)
|
# spatial_data_rel = get_data_distance(store,relative_values=True)
|
||||||
|
|
@ -104,6 +107,18 @@ if __name__ == '__main__':
|
||||||
# plot_time_space_rel(combined, keys)
|
# plot_time_space_rel(combined, keys)
|
||||||
plot_data(combined, wip.keys)
|
plot_data(combined, wip.keys)
|
||||||
|
|
||||||
|
if True:
|
||||||
|
settings: LogSettings = load_settings("../oeb_kml.json")
|
||||||
|
log_ids = src_file("/home/clemens/git/ma/test/oeb_2016_path")
|
||||||
|
log_ids = log_ids[0:2]
|
||||||
|
print(log_ids)
|
||||||
|
store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
||||||
|
print("render")
|
||||||
|
kml = KMLRender()
|
||||||
|
kml.render(store.get_all())
|
||||||
|
print("done")
|
||||||
|
#for cat in store.get_categories():
|
||||||
|
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
|
||||||
|
|
||||||
# for analyzers in analyzers:
|
# for analyzers in analyzers:
|
||||||
# if analyzers.name() in ["LogEntryCount", "ActionSequenceAnalyzer"]:
|
# if analyzers.name() in ["LogEntryCount", "ActionSequenceAnalyzer"]:
|
||||||
|
|
@ -1,31 +1,29 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from util import json_path
|
from analysis.util import json_path
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def download_board(board_id, instance_config_id, sequence_id, source):
|
def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"):
|
||||||
local_file = "static/progress/images/{config_id}/{sequence_id}/{board_id}".format(
|
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
|
||||||
config_id=instance_config_id,
|
abs_path = os.path.join(path, local_file)
|
||||||
sequence_id=sequence_id,
|
if os.path.exists(abs_path):
|
||||||
board_id=board_id)
|
|
||||||
if os.path.exists(local_file):
|
|
||||||
return local_file
|
return local_file
|
||||||
url = "/game2/editor/config/{config_id}/sequence/{sequence_id}/board/{board_id}/".format(
|
url = "/game2/editor/config/{config_id}/sequence/{sequence_id}/board/{board_id}/".format(
|
||||||
config_id=instance_config_id,
|
config_id=instance_config_id,
|
||||||
sequence_id=sequence_id,
|
sequence_id=sequence_id,
|
||||||
board_id=board_id
|
board_id=board_id
|
||||||
)
|
)
|
||||||
board = source._get(url)
|
board = source.get(url)
|
||||||
if not board.ok:
|
if not board.ok:
|
||||||
raise ConnectionError()
|
raise ConnectionError(url, board, board.status_code)
|
||||||
data = board.json()
|
data = board.json()
|
||||||
preview_url = json_path(data, "preview_url.medium")
|
preview_url = json_path(data, "preview_url.medium")
|
||||||
logger.debug(preview_url)
|
logger.debug(preview_url)
|
||||||
os.makedirs(local_file[:-len(board_id)], exist_ok=True)
|
os.makedirs(abs_path[:-len(board_id)], exist_ok=True)
|
||||||
source.download_file(preview_url, local_file)
|
source.download_file(preview_url, abs_path)
|
||||||
return local_file
|
return local_file
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -68,10 +66,11 @@ def get_json(source, url):
|
||||||
if url in cache:
|
if url in cache:
|
||||||
return cache[url]
|
return cache[url]
|
||||||
try:
|
try:
|
||||||
data = source.get_json(url)
|
data = source.get(url).json()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("exception", e, e.args) # TODO: logging
|
print("exception", e, e.args)
|
||||||
|
logger.exception(e)
|
||||||
data = None
|
data = None
|
||||||
cache[url] = data
|
cache[url] = data
|
||||||
return data
|
return data
|
||||||
|
|
@ -97,3 +97,20 @@ CONFIG_NAMES = {
|
||||||
'fe43a0f0-3dea-11e6-a065-00199963ac6e': u'Vorlagen',
|
'fe43a0f0-3dea-11e6-a065-00199963ac6e': u'Vorlagen',
|
||||||
'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771': u'Bad Urach'
|
'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771': u'Bad Urach'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
KML_PATTERN="""<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2">
|
||||||
|
<Document>
|
||||||
|
<Placemark>
|
||||||
|
<gx:MultiTrack>
|
||||||
|
<gx:Track>
|
||||||
|
{when}
|
||||||
|
{coordinates}
|
||||||
|
</gx:Track>
|
||||||
|
</gx:MultiTrack>
|
||||||
|
{coordinates}
|
||||||
|
</Placemark>
|
||||||
|
</Document>
|
||||||
|
</kml>
|
||||||
|
"""
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from analyzers.analyzer import ResultStore, Analyzer
|
from analysis.analyzers.analyzer import ResultStore, Analyzer
|
||||||
from analyzers.settings import LogSettings
|
from analysis.analyzers.settings import LogSettings
|
||||||
|
|
||||||
log: logging.Logger = logging.getLogger(__name__)
|
log: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -31,6 +31,7 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
|
||||||
def run_analysis(log_ids: list, settings, loaders):
|
def run_analysis(log_ids: list, settings, loaders):
|
||||||
store: ResultStore = ResultStore()
|
store: ResultStore = ResultStore()
|
||||||
for log_id in log_ids:
|
for log_id in log_ids:
|
||||||
|
log.info("LOG_ID: "+ str(log_id))
|
||||||
for analysis in process_log(log_id, settings, loaders):
|
for analysis in process_log(log_id, settings, loaders):
|
||||||
log.info("* Result for " + analysis.name())
|
log.info("* Result for " + analysis.name())
|
||||||
analysis.result(store, name=log_id)
|
analysis.result(store, name=log_id)
|
||||||
|
|
@ -0,0 +1,121 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import typing
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
log: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
host: str = ""
|
||||||
|
cookies: typing.Dict[str, str] = {}
|
||||||
|
headers: typing.Dict[str, str] = {}
|
||||||
|
|
||||||
|
def url(self, path):
|
||||||
|
if self.host:
|
||||||
|
return self.host + path
|
||||||
|
return path
|
||||||
|
|
||||||
|
def get(self, url, **kwargs) -> requests.models.Response:
|
||||||
|
log.info("GET " + str(url))
|
||||||
|
return requests.get(self.url(url), cookies=self.cookies, headers=self.headers, **kwargs)
|
||||||
|
|
||||||
|
def post(self, url, data, **kwargs) -> requests.models.Response:
|
||||||
|
log.info("POST " + str(url))
|
||||||
|
return requests.post(self.url(url), data, cookies=self.cookies, headers=self.headers, **kwargs)
|
||||||
|
|
||||||
|
def download_file(self, url, target, **kwargs) -> bool:
|
||||||
|
with open(target, "wb") as out:
|
||||||
|
try:
|
||||||
|
download = self.get(url, stream=True, **kwargs)
|
||||||
|
shutil.copyfileobj(download.raw, out)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(e)
|
||||||
|
os.remove(target)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def download_files(self, urls, **kwargs) -> tempfile.TemporaryDirectory:
|
||||||
|
target = tempfile.TemporaryDirectory()
|
||||||
|
for path in urls:
|
||||||
|
filename = os.path.join(target.name, path.split("/")[-1])
|
||||||
|
self.download_file(path, filename, **kwargs)
|
||||||
|
return target
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def list(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BiogamesClient(Client):
|
||||||
|
config_fields: typing.Dict[str, typing.List[str]] = {
|
||||||
|
'login': ('username', 'password', 'host'),
|
||||||
|
'session': ('sessionid', 'csrftoken', 'host'),
|
||||||
|
}
|
||||||
|
login_url: str = "/game2/auth/json-login"
|
||||||
|
list_url: str = "/game2/instance/log/list/"
|
||||||
|
headers: typing.Dict[str, str] = {'Accept': 'application/json'}
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
match = {j: all([i in kwargs for i in self.config_fields[j]]) for j in self.config_fields}
|
||||||
|
valid = filter(lambda x: match[x], match)
|
||||||
|
if not valid:
|
||||||
|
raise ValueError("missing parameter (" + str(self.config_fields) + ")")
|
||||||
|
self.config = kwargs
|
||||||
|
self.cookies = {}
|
||||||
|
self.host = self.config['host']
|
||||||
|
if 'session' in valid:
|
||||||
|
self.cookies = kwargs
|
||||||
|
|
||||||
|
def login(self) -> bool:
|
||||||
|
csrf_request = self.get(self.list_url)
|
||||||
|
if not csrf_request.ok:
|
||||||
|
log.exception(ConnectionError("Unable to obtain CSRF token (" + str(csrf_request) + ")"))
|
||||||
|
return False
|
||||||
|
if not 'csrftoken' in self.cookies:
|
||||||
|
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
|
||||||
|
login_payload = {
|
||||||
|
'username': self.config['username'],
|
||||||
|
'password': self.config['password'],
|
||||||
|
'next': '',
|
||||||
|
'csrfmiddlewaretoken': 'csrftoken',
|
||||||
|
}
|
||||||
|
login = self.post(self.login_url, json.dumps(login_payload))
|
||||||
|
if not login.ok:
|
||||||
|
log.exception(ConnectionError("Unable to authenticate", login, login.text))
|
||||||
|
return False
|
||||||
|
self.cookies['sessionid'] = login.cookies['sessionid']
|
||||||
|
print(self.cookies)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def list(self) -> dict:
|
||||||
|
print(self.cookies)
|
||||||
|
logs = self.get(self.list_url)
|
||||||
|
if not logs.ok:
|
||||||
|
raise ConnectionError("HTTP fail", logs, logs.text)
|
||||||
|
return logs.json()
|
||||||
|
|
||||||
|
def load_all_logs(self) -> tempfile.TemporaryDirectory:
|
||||||
|
return self.download_files([i["file_url"] for i in self.list()])
|
||||||
|
|
||||||
|
|
||||||
|
CLIENTS: typing.Dict[str, typing.Type[Client]] = {
|
||||||
|
"Biogames": BiogamesClient,
|
||||||
|
}
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# c = BiogamesClient(host="http://biodiv", username="ba", password="853451")
|
||||||
|
# print(c.login())
|
||||||
|
# print(json.dumps(c.list(), indent=1))
|
||||||
|
# print(type(c.load_all_logs()))
|
||||||
|
# print(type(c.get("/")))
|
||||||
|
c = BiogamesClient(host="http://biodiv", **{'csrftoken': 'IgbwP83iEibW6RE7IADIFELYdbx0dvqQ',
|
||||||
|
'sessionid': 'zntsj09d92tjos1b6ruqjthlzv60xdin'})
|
||||||
|
print(json.dumps(c.list(), indent=1))
|
||||||
|
|
@ -0,0 +1,56 @@
|
||||||
|
version: "2.2"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: docker.clkl.de/ma/celery:0.3.3
|
||||||
|
build: ./selector
|
||||||
|
cpu_count: 4
|
||||||
|
volumes:
|
||||||
|
- ./:/app
|
||||||
|
working_dir: /app/selector
|
||||||
|
command: python3 webserver.py
|
||||||
|
environment:
|
||||||
|
- PYTHONPATH=/app
|
||||||
|
networks:
|
||||||
|
- default
|
||||||
|
- traefik_net
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=true"
|
||||||
|
- "traefik.port=5000"
|
||||||
|
- "traefik.docker.network=traefik_net"
|
||||||
|
- "traefik.url.frontend.rule=Host:select.ma.potato.kinf.wiai.uni-bamberg.de"
|
||||||
|
|
||||||
|
celery:
|
||||||
|
image: docker.clkl.de/ma/celery:0.3.3
|
||||||
|
environment:
|
||||||
|
- PYTHONPATH=/app
|
||||||
|
volumes:
|
||||||
|
- ./:/app
|
||||||
|
- ./data/results:/data/results
|
||||||
|
working_dir: /app
|
||||||
|
command: celery -A tasks.tasks worker --loglevel=info
|
||||||
|
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:4-alpine
|
||||||
|
volumes:
|
||||||
|
- ./data/redis:/data
|
||||||
|
command: redis-server --appendonly yes
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
image: nginx:1.13-alpine
|
||||||
|
volumes:
|
||||||
|
- ./data/results:/usr/share/nginx/html:ro
|
||||||
|
networks:
|
||||||
|
- traefik_net
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=true"
|
||||||
|
- "traefik.port=80"
|
||||||
|
- "traefik.docker.network=traefik_net"
|
||||||
|
- "traefik.url.frontend.rule=Host:results.ma.potato.kinf.wiai.uni-bamberg.de"
|
||||||
|
|
||||||
|
|
||||||
|
networks:
|
||||||
|
traefik_net:
|
||||||
|
external:
|
||||||
|
name: traefik_net
|
||||||
|
|
@ -1,8 +1,13 @@
|
||||||
requests==2.18.4
|
requests==2.18.4
|
||||||
numpy==1.13.1
|
numpy==1.14.2
|
||||||
matplotlib==2.1.0
|
matplotlib==2.1.0
|
||||||
osmnx==0.6
|
#osmnx==0.6
|
||||||
networkx==2.0
|
networkx==2.0
|
||||||
pydot==1.2.3
|
#pydot==1.2.3
|
||||||
scipy==1.0.0
|
scipy==1.0.1
|
||||||
ipython==6.2.1
|
#ipython==6.2.1
|
||||||
|
|
||||||
|
flask==0.12.2
|
||||||
|
|
||||||
|
celery==4.1.0
|
||||||
|
redis==2.10.6
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
function validateSettings() {
|
||||||
|
alert(document.getElementById('safety').checked);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
body {
|
||||||
|
background-color: aqua;
|
||||||
|
}
|
||||||
|
#data{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
li{
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,133 @@
|
||||||
|
KML = """{
|
||||||
|
"logFormat": "zip",
|
||||||
|
"entryType": "@class",
|
||||||
|
"spatials": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
|
||||||
|
],
|
||||||
|
"actions": [],
|
||||||
|
"boards": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
|
||||||
|
],
|
||||||
|
"analyzers": {
|
||||||
|
"analysis.analyzers": [
|
||||||
|
"BiogamesCategorizer",
|
||||||
|
"LocationAnalyzer"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sequences": {
|
||||||
|
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
|
||||||
|
"end": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
|
||||||
|
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"simulation_rounds": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
|
||||||
|
],
|
||||||
|
"simu_data": [
|
||||||
|
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||||
|
],
|
||||||
|
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||||
|
"instance_id": "instance_id",
|
||||||
|
"instance_config_id": "config.@id",
|
||||||
|
"sequences2": {
|
||||||
|
"id_field": "sequence_id",
|
||||||
|
"start": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||||
|
"action": "START"
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||||
|
"action": "PAUSE"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"coordinates": "location.coordinates",
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"gamefield": "instance_id",
|
||||||
|
"user": "player_group_name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"type": "Biogames",
|
||||||
|
"username": "ba",
|
||||||
|
"password": "853451",
|
||||||
|
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||||
|
},
|
||||||
|
"render": [
|
||||||
|
"KMLRender"
|
||||||
|
]
|
||||||
|
}"""
|
||||||
|
|
||||||
|
ACTIVITY = """{
|
||||||
|
"logFormat": "zip",
|
||||||
|
"entryType": "@class",
|
||||||
|
"spatials": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
|
||||||
|
],
|
||||||
|
"actions": [],
|
||||||
|
"boards": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
|
||||||
|
],
|
||||||
|
"analyzers": {
|
||||||
|
"analysis.analyzers": [
|
||||||
|
"BiogamesCategorizer",
|
||||||
|
"ActivityMapper"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sequences": {
|
||||||
|
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
|
||||||
|
"end": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
|
||||||
|
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"simulation_rounds": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
|
||||||
|
],
|
||||||
|
"simu_data": [
|
||||||
|
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||||
|
],
|
||||||
|
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||||
|
"instance_id": "instance_id",
|
||||||
|
"instance_config_id": "config.@id",
|
||||||
|
"sequences2": {
|
||||||
|
"id_field": "sequence_id",
|
||||||
|
"start": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||||
|
"action": "START"
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||||
|
"action": "PAUSE"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"coordinates": "location.coordinates",
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"gamefield": "instance_id",
|
||||||
|
"user": "player_group_name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"type": "Biogames",
|
||||||
|
"username": "ba",
|
||||||
|
"password": "853451",
|
||||||
|
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||||
|
},
|
||||||
|
"render": [
|
||||||
|
"ActivityMapper"
|
||||||
|
]
|
||||||
|
}"""
|
||||||
|
|
||||||
|
CONFIGS = { # TODO
|
||||||
|
"KML": KML,
|
||||||
|
"ActivityMapper": ACTIVITY,
|
||||||
|
}
|
||||||
|
|
||||||
|
URLS = {
|
||||||
|
"KML": "/",
|
||||||
|
"ActivityMapper": "#",
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
<!doctype html>
|
||||||
|
<title>…</title>
|
||||||
|
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='style.css') }}">
|
||||||
|
<script type="application/javascript" src="{{url_for('static', filename='script.js') }}"></script>
|
||||||
|
{% block body %} {% endblock %}
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
{% extends "base.html" %}
|
||||||
|
{% block body %}
|
||||||
|
<form action="/start" method="post">
|
||||||
|
<div id="data"> {{logs}}</div>
|
||||||
|
<ul>
|
||||||
|
{% for log in logs %}
|
||||||
|
<li>
|
||||||
|
<input type="checkbox" name="logs" value="{{log['@id']}}">
|
||||||
|
{{log.start_date}}: {{log.player_group_name}} (→{{log.end_date}})
|
||||||
|
</li>
|
||||||
|
<!--{{log}}-->
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
<!--input type="checkbox" id="safety"><label for="safety">Confirm selection</label-->
|
||||||
|
<input type="text" id="name" maxlength="128" placeholder="name" name="name"/><br>
|
||||||
|
<select name="config">
|
||||||
|
{% for config in configs %}
|
||||||
|
<option>{{config}}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
<input type="submit">
|
||||||
|
|
||||||
|
</form>
|
||||||
|
<a href="/results">show analysis progress/results</a>
|
||||||
|
{% endblock %}
|
||||||
|
|
@ -0,0 +1,14 @@
|
||||||
|
{% extends "base.html" %}
|
||||||
|
{% block body %}
|
||||||
|
<form action="/login" method="post">
|
||||||
|
<select name="game">
|
||||||
|
{% for game in clients %}
|
||||||
|
<option>{{ game }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
<input type="text" name="username" placeholder="username"/>
|
||||||
|
<input type="password" name="password" placeholder="passwort"/>
|
||||||
|
<input type="submit">
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
|
@ -0,0 +1,22 @@
|
||||||
|
{% extends "base.html" %}
|
||||||
|
{% block body %}
|
||||||
|
|
||||||
|
<a href="/games">create new analysis</a>
|
||||||
|
|
||||||
|
<div id="results">
|
||||||
|
<ul>
|
||||||
|
{% for job in jobs %}
|
||||||
|
<li> {{jobs[job].status}}: "{{job}}":
|
||||||
|
<ul>
|
||||||
|
{% for r in jobs[job].results %}
|
||||||
|
<li><a href="{{jobs[job] | get_prefix}}{{r | get_name}}">{{r|get_name}} {{jobs[job].start}}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
|
@ -0,0 +1,124 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import typing
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
from clients.webclients import Client, CLIENTS
|
||||||
|
|
||||||
|
from flask import Flask, render_template, request, redirect, session
|
||||||
|
|
||||||
|
from tasks import tasks
|
||||||
|
from selector.temp_config import CONFIGS, URLS
|
||||||
|
|
||||||
|
BIOGAMES_HOST = "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||||
|
#BIOGAMES_HOST = "http://www.biodiv2go.de"
|
||||||
|
RESULT_HOST = "http://results.ma.potato.kinf.wiai.uni-bamberg.de/"
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
clients: typing.Dict[str, Client] = {}
|
||||||
|
log: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/")
|
||||||
|
def index():
|
||||||
|
return render_template("index.html", clients=CLIENTS)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/login", methods=["POST"])
|
||||||
|
def login():
|
||||||
|
game = request.form["game"]
|
||||||
|
if not game in CLIENTS:
|
||||||
|
return redirect("/?invalid_game")
|
||||||
|
client = CLIENTS[game](host=BIOGAMES_HOST, username=request.form['username'], password=request.form['password'])
|
||||||
|
if client.login():
|
||||||
|
session['logged_in'] = True
|
||||||
|
session['uid'] = str(uuid.uuid4())
|
||||||
|
session['username'] = request.form['username']
|
||||||
|
session['cookies'] = client.cookies
|
||||||
|
session['game'] = game
|
||||||
|
session['host'] = BIOGAMES_HOST
|
||||||
|
clients[session['uid']] = client
|
||||||
|
return redirect("/results")
|
||||||
|
return redirect("/?fail")
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/results")
|
||||||
|
def results():
|
||||||
|
if not ('logged_in' in session and session['logged_in']):
|
||||||
|
return redirect("/")
|
||||||
|
if session['logged_in'] and not session['uid'] in clients:
|
||||||
|
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
|
||||||
|
status = tasks.redis.get(session['username'])
|
||||||
|
if status:
|
||||||
|
job_status = json.loads(status)
|
||||||
|
else:
|
||||||
|
job_status = {}
|
||||||
|
#for job in job_status:
|
||||||
|
# results = []
|
||||||
|
# for path in job_status[job]['results']:
|
||||||
|
# results.append(path.replace(tasks.DATA_PATH, RESULT_HOST))
|
||||||
|
# print(results) #TODO???
|
||||||
|
return render_template("results.html", jobs=job_status)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/games")
|
||||||
|
def games():
|
||||||
|
if not ('logged_in' in session and session['logged_in']):
|
||||||
|
return redirect("/")
|
||||||
|
if session['logged_in'] and not session['uid'] in clients:
|
||||||
|
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
|
||||||
|
return render_template("games.html", logs=clients[session['uid']].list(), configs=CONFIGS)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/start", methods=['POST'])
|
||||||
|
def start():
|
||||||
|
print(str(request.form['logs']))
|
||||||
|
status = {
|
||||||
|
"status": "PENDING",
|
||||||
|
"submit": time.strftime("%c"),
|
||||||
|
"log_ids": request.form.getlist('logs'),
|
||||||
|
"config": request.form['config'],
|
||||||
|
}
|
||||||
|
params = {
|
||||||
|
"log_ids": request.form.getlist('logs'),
|
||||||
|
"config": CONFIGS[request.form['config']],
|
||||||
|
"username": session['username'],
|
||||||
|
"cookies": session['cookies'],
|
||||||
|
"host": session['host'],
|
||||||
|
"clientName": session['game'],
|
||||||
|
"name": request.form['name'],
|
||||||
|
}
|
||||||
|
tasks.status_update(session['username'], request.form['name'], status)
|
||||||
|
tasks.analyze.delay(**params)
|
||||||
|
return redirect("/results")
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/status")
|
||||||
|
def status():
|
||||||
|
return json.dumps(json.loads(tasks.redis.get(session['username'])), indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
@app.template_filter('get_url')
|
||||||
|
def get_url(path: str):
|
||||||
|
return path.replace(tasks.DATA_PATH, RESULT_HOST)
|
||||||
|
|
||||||
|
@app.template_filter('get_name')
|
||||||
|
def get_url(path: str):
|
||||||
|
return path.replace(tasks.DATA_PATH, "")
|
||||||
|
|
||||||
|
|
||||||
|
@app.template_filter('get_prefix')
|
||||||
|
def get_prefix(job):
|
||||||
|
print(job)
|
||||||
|
try:
|
||||||
|
return RESULT_HOST + URLS[job['config']]
|
||||||
|
except:
|
||||||
|
return RESULT_HOST + "#"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
app.config.update({"SECRET_KEY": "59765798-2784-11e8-8d05-db4d6f6606c9"})
|
||||||
|
app.run(host="0.0.0.0", debug=True)
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
from .biogames import Biogames
|
|
||||||
|
|
||||||
SOURCES = {
|
|
||||||
"Biogames": Biogames,
|
|
||||||
}
|
|
||||||
|
|
@ -1,85 +0,0 @@
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import typing
|
|
||||||
from tempfile import TemporaryDirectory
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from sources.source import Source
|
|
||||||
|
|
||||||
import shutil
|
|
||||||
import requests
|
|
||||||
|
|
||||||
log: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Biogames(Source):
|
|
||||||
def __init__(self):
|
|
||||||
self.headers: typing.Dict[str, str] = {'Accept': 'application/json'}
|
|
||||||
self.cookies: typing.Dict[str, str] = {}
|
|
||||||
self.id2link: typing.Dict[str, str] = {}
|
|
||||||
self.host: str = None
|
|
||||||
|
|
||||||
def connect(self, **kwargs):
|
|
||||||
for i in ['username', 'password', 'url', 'login_url', 'host']:
|
|
||||||
if not i in kwargs:
|
|
||||||
raise ValueError("missing value " + i)
|
|
||||||
csrf_request = requests.get(kwargs['url'])
|
|
||||||
if csrf_request.status_code != 200:
|
|
||||||
raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")")
|
|
||||||
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
|
|
||||||
log.info("obtained CSRF token (" + self.cookies['csrftoken'] + ")")
|
|
||||||
login_payload = {
|
|
||||||
'username': kwargs['username'],
|
|
||||||
'password': kwargs['password'],
|
|
||||||
'next': '',
|
|
||||||
'csrfmiddlewaretoken': 'csrftoken'
|
|
||||||
}
|
|
||||||
login = requests.post(kwargs['login_url'], data=json.dumps(login_payload), cookies=self.cookies)
|
|
||||||
if login.status_code != 200:
|
|
||||||
raise ConnectionError("Unable to authenticate!", login, login.text)
|
|
||||||
self.cookies['sessionid'] = login.cookies['sessionid']
|
|
||||||
log.info("obtained sessionid (" + self.cookies['sessionid'] + ")")
|
|
||||||
self.url = kwargs['url']
|
|
||||||
self.host = kwargs['host']
|
|
||||||
log.info("stored url (" + self.url + ")")
|
|
||||||
|
|
||||||
def list(self):
|
|
||||||
logs = self.get_json(self.url)
|
|
||||||
log.info(len(logs))
|
|
||||||
for i in logs:
|
|
||||||
self.id2link[i["id"]] = i["link"] # TODO
|
|
||||||
return logs
|
|
||||||
|
|
||||||
def get(self, ids: typing.Collection):
|
|
||||||
dir = TemporaryDirectory()
|
|
||||||
files = []
|
|
||||||
for i in ids:
|
|
||||||
url = self.id2link[i]
|
|
||||||
filename = os.path.join(dir.name, url.split("/")[-1])
|
|
||||||
file = self.download_file(url, filename)
|
|
||||||
if file:
|
|
||||||
files.append(file)
|
|
||||||
return dir
|
|
||||||
|
|
||||||
def download_file(self, url, filename):
|
|
||||||
with open(filename, "wb") as out:
|
|
||||||
try:
|
|
||||||
download = self._get(url)
|
|
||||||
shutil.copyfileobj(download.raw, out)
|
|
||||||
return filename
|
|
||||||
except Exception as e:
|
|
||||||
log.exception(e)
|
|
||||||
os.remove(filename)
|
|
||||||
|
|
||||||
def get_json(self, url):
|
|
||||||
http = self._get(url, stream=False)
|
|
||||||
if not http.ok:
|
|
||||||
raise ConnectionError("HTTP status is not OK", http.url)
|
|
||||||
return http.json()
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _get(self, url, stream=True):
|
|
||||||
return requests.get(self.host + url, cookies=self.cookies, headers=self.headers, stream=stream)
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
import typing
|
|
||||||
|
|
||||||
|
|
||||||
class Source:
|
|
||||||
def connect(self, **kwargs):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def list(self):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def get(self, ids: typing.Collection):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def get_json(self, url:str) -> dict:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
|
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
|
||||||
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_de7df5b5-edd5-4070-840f-68854ffab9aa.json", function (data) { //urach
|
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_de7df5b5-edd5-4070-840f-68854ffab9aa.json", function (data) { //urach
|
||||||
//$.getJSON("data/90278021-4c57-464e-90b1-d603799d07eb_07da99c9-398a-424f-99fc-2701763a63e9.json", function (data) { //eichstätt
|
//$.getJSON("data/90278021-4c57-464e-90b1-d603799d07eb_07da99c9-398a-424f-99fc-2701763a63e9.json", function (data) { //eichstätt
|
||||||
//$.getJSON("data/13241906-cdae-441a-aed0-d57ebeb37cac_d33976a6-8a56-4a63-b492-fe5427dbf377.json", function (data) { //stadtökologie
|
//$.getJSON("data/13241906-cdae-441a-aed0-d57ebeb37cac_d33976a6-8a56-4a63-b492-fe5427dbf377.json", function (data) { //stadtökologie
|
||||||
//$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
|
$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
|
||||||
|
//$.getJSON("data/17d401a9-de21-49a2-95bc-7dafa53dda64_98edcb70-03db-4465-b185-a9c9574995ce.json", function (data) { //oeb2016
|
||||||
var images = {};
|
var images = {};
|
||||||
var tiles = {
|
var tiles = {
|
||||||
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,65 @@
|
||||||
|
from .tasks import analyze
|
||||||
|
|
||||||
|
__log__ = ["/app/data/008cad400ab848f729913d034a.zip"]
|
||||||
|
|
||||||
|
__config__ = """{
|
||||||
|
"logFormat": "zip",
|
||||||
|
"entryType": "@class",
|
||||||
|
"spatials": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
"...QuestionAnswerEvent",
|
||||||
|
"...SimuAnswerEvent"
|
||||||
|
],
|
||||||
|
"boards": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
|
||||||
|
],
|
||||||
|
"analyzers": {
|
||||||
|
"analysis.analyzers": [
|
||||||
|
"BiogamesCategorizer",
|
||||||
|
"LocationAnalyzer"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sequences": {
|
||||||
|
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
|
||||||
|
"end": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
|
||||||
|
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"simulation_rounds": [
|
||||||
|
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
|
||||||
|
],
|
||||||
|
"simu_data": [
|
||||||
|
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||||
|
],
|
||||||
|
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||||
|
"instance_id": "instance_id",
|
||||||
|
"instance_config_id": "config.@id",
|
||||||
|
"sequences2": {
|
||||||
|
"id_field": "sequence_id",
|
||||||
|
"start": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||||
|
"action": "START"
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||||
|
"action": "PAUSE"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"coordinates": "location.coordinates",
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"gamefield": "instance_id",
|
||||||
|
"user": "player_group_name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"type": "Biogames",
|
||||||
|
"username": "ba",
|
||||||
|
"password": "853451",
|
||||||
|
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
|
@ -0,0 +1,86 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import shutil
|
||||||
|
import uuid
|
||||||
|
import os.path
|
||||||
|
import os
|
||||||
|
|
||||||
|
import redis as redis_lib
|
||||||
|
import time
|
||||||
|
from celery import Celery
|
||||||
|
from analysis import log_analyzer as la
|
||||||
|
from analysis.analyzers import KMLRender, ActivityMapperRender
|
||||||
|
from clients.webclients import CLIENTS
|
||||||
|
|
||||||
|
FLASK_DB = 1
|
||||||
|
REDIS_HOST = "redis"
|
||||||
|
DATA_PATH = "/app/data/results/"
|
||||||
|
|
||||||
|
RENDERERS = { # TODO
|
||||||
|
"KMLRender": KMLRender,
|
||||||
|
"ActivityMapper": ActivityMapperRender,
|
||||||
|
}
|
||||||
|
|
||||||
|
app = Celery('tasks', backend='redis://redis', broker='redis://redis')
|
||||||
|
redis = redis_lib.StrictRedis(host=REDIS_HOST, db=FLASK_DB)
|
||||||
|
log: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def update_status(username, name, state, **kwargs):
|
||||||
|
status = json.loads(redis.get(username))
|
||||||
|
status[name][state[0]] = time.strftime("%c")
|
||||||
|
status[name]['status'] = state[1]
|
||||||
|
for i in kwargs:
|
||||||
|
status[name][i] = kwargs[i]
|
||||||
|
redis.set(username, json.dumps(status))
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def analyze(config, log_ids, **kwargs):
|
||||||
|
update_status(kwargs['username'], kwargs['name'], ('load', 'LOADING'))
|
||||||
|
|
||||||
|
try:
|
||||||
|
log.info("start analysis")
|
||||||
|
client = CLIENTS[kwargs['clientName']](host=kwargs['host'], **kwargs['cookies'])
|
||||||
|
logs = client.list()
|
||||||
|
id_urls = {str(x['@id']): x['file_url'] for x in logs}
|
||||||
|
urls = [id_urls[i] for i in log_ids]
|
||||||
|
tmpdir = client.download_files(urls)
|
||||||
|
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
|
||||||
|
|
||||||
|
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'))
|
||||||
|
|
||||||
|
settings = la.parse_settings(config)
|
||||||
|
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
|
||||||
|
render = RENDERERS[settings.render[0]]() # TODO
|
||||||
|
files = render.render(store.get_all())
|
||||||
|
|
||||||
|
uid = str(uuid.uuid4())
|
||||||
|
results = []
|
||||||
|
log.error(files)
|
||||||
|
os.mkdir(os.path.join(DATA_PATH, uid))
|
||||||
|
for file in files:
|
||||||
|
try:
|
||||||
|
head, tail = os.path.split(file)
|
||||||
|
target = os.path.join(DATA_PATH, uid, tail)
|
||||||
|
shutil.move(file, target)
|
||||||
|
results.append(target)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
log.exception(e)
|
||||||
|
tmpdir.cleanup()
|
||||||
|
|
||||||
|
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(e)
|
||||||
|
update_status(kwargs['username'], kwargs['name'], ('abort', 'ERROR'), exception=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def status_update(key, status_key, status):
|
||||||
|
record = redis.get(key)
|
||||||
|
if not record:
|
||||||
|
redis.set(key, json.dumps({status_key: status}))
|
||||||
|
else:
|
||||||
|
data = json.loads(record)
|
||||||
|
data[status_key] = status
|
||||||
|
redis.set(key, json.dumps(data))
|
||||||
|
redis.save()
|
||||||
Loading…
Reference in New Issue