restructured
* add selector frontend * basic celery backend * client libraryactivity_mapper
parent
e254667256
commit
2c8eea0e6f
|
|
@ -0,0 +1,10 @@
|
|||
FROM alpine:edge
|
||||
|
||||
ADD ["requirements.txt", "/"]
|
||||
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories && \
|
||||
sed -i 's/numpy/#numpy/' requirements.txt && \
|
||||
sed -i 's/scipy/#scipy/' requirements.txt && \
|
||||
apk add --update libpng libpng-dev freetype freetype-dev g++ python3 py3-numpy python3-dev py-numpy-dev py3-scipy&& \
|
||||
pip3 install -r requirements.txt && \
|
||||
apk del libpng-dev freetype-dev g++ python3-dev py-numpy-dev && \
|
||||
rm requirements.txt
|
||||
|
|
@ -12,7 +12,7 @@ from .render import Render
|
|||
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
||||
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
|
||||
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
|
||||
LogEntryCountCSV
|
||||
LogEntryCountCSV, KMLRender
|
||||
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
||||
LocomotionActionRatioRender
|
||||
|
||||
|
|
@ -41,6 +41,7 @@ __MAPPING__ = {
|
|||
LocationAnalyzer: [
|
||||
TrackRender,
|
||||
HeatMapRender,
|
||||
KMLRender,
|
||||
],
|
||||
ActivityMapper: [
|
||||
ActivityMapperRender
|
||||
|
|
@ -2,7 +2,7 @@ import logging
|
|||
from collections import KeysView
|
||||
from typing import Type, Sized, Collection
|
||||
|
||||
from analyzers.settings import LogSettings
|
||||
from analysis.analyzers.settings import LogSettings
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -3,8 +3,8 @@ from collections import defaultdict, namedtuple, OrderedDict
|
|||
from types import SimpleNamespace
|
||||
from typing import List, NamedTuple
|
||||
|
||||
from util import json_path, combinate
|
||||
from util.download import download_board, get_board_data
|
||||
from analysis.util import json_path, combinate
|
||||
from analysis.util.download import download_board, get_board_data
|
||||
from . import Result, LogSettings, Analyzer, ResultStore
|
||||
from .default import CategorizerStub, Store
|
||||
|
||||
|
|
@ -174,13 +174,13 @@ class ActivityMapper(Analyzer):
|
|||
board["image"] = "simu.png"
|
||||
continue
|
||||
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
|
||||
self.settings.source)
|
||||
self.settings.source)
|
||||
if local_file:
|
||||
board['image'] = local_file
|
||||
else:
|
||||
board['image'] = "ERROR_FETCHING_FILE"
|
||||
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
|
||||
board["board_id"])
|
||||
board["board_id"])
|
||||
else:
|
||||
board["image"] = "map.png"
|
||||
store.add(Result(type(self), {
|
||||
|
|
@ -202,7 +202,7 @@ class ActivityMapper(Analyzer):
|
|||
self.add_location(entry)
|
||||
elif entry[self.settings.type_field] in self.settings.boards:
|
||||
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
|
||||
entry["board_id"])
|
||||
entry["board_id"])
|
||||
entry["extra_data"] = board_data
|
||||
entry["extra_data"]["activity_type"] = self.last_board_type
|
||||
entry['coordinate'] = self.new_coordinate()
|
||||
|
|
@ -218,14 +218,14 @@ class ActivityMapper(Analyzer):
|
|||
def classify_entry(self, entry):
|
||||
entry_type = entry[self.settings.type_field]
|
||||
if self.filters.end(entry):
|
||||
data = {"extra_data": {"activity_type": "map"},"coordinate": self.new_coordinate()}
|
||||
data = {"extra_data": {"activity_type": "map"}, "coordinate": self.new_coordinate()}
|
||||
data.update(entry)
|
||||
self.timeline.append(data)
|
||||
return "map"
|
||||
if not entry_type in self.settings.boards:
|
||||
return self.last_board_type
|
||||
board_data = get_board_data(self.settings.source, self.instance_config_id, entry["sequence_id"],
|
||||
entry["board_id"])
|
||||
entry["board_id"])
|
||||
for pattern in self.classes:
|
||||
if pattern in board_data['class']:
|
||||
return self.classes[pattern]
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
from collections import defaultdict, OrderedDict
|
||||
|
||||
from util import json_path
|
||||
from analysis.util import json_path
|
||||
from . import Result, LogSettings, Analyzer, ResultStore
|
||||
|
||||
|
||||
|
|
@ -16,9 +16,9 @@ class LocationAnalyzer(Analyzer):
|
|||
super().__init__(settings)
|
||||
self.entries = []
|
||||
|
||||
def result(self, store: ResultStore) -> None:
|
||||
def result(self, store: ResultStore, **kwargs) -> None:
|
||||
self.log.debug(len(self.entries))
|
||||
store.add(Result(type(self), list(self.entries)))
|
||||
store.add(Result(type(self), list(self.entries), name=kwargs['name']))
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if entry[self.settings.type_field] in self.settings.spatials:
|
||||
|
|
@ -9,8 +9,8 @@ from scipy.interpolate import interp1d
|
|||
import networkx as nx
|
||||
import itertools
|
||||
|
||||
from analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
|
||||
from util.meta_temp import CONFIG_NAMES
|
||||
from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
|
||||
from analysis.util.meta_temp import CONFIG_NAMES
|
||||
from . import Render
|
||||
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
|
||||
|
||||
|
|
@ -2,11 +2,13 @@ import json
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
import datetime
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from analyzers import LogEntryCountAnalyzer
|
||||
from analysis.analyzers import LogEntryCountAnalyzer
|
||||
from analysis.util.meta_temp import KML_PATTERN
|
||||
from . import Render, Result
|
||||
from .. import LocationAnalyzer
|
||||
from analysis.analyzers import LocationAnalyzer
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -39,6 +41,31 @@ class TrackRender(Render):
|
|||
return dumps
|
||||
|
||||
|
||||
def format_time(ts):
|
||||
return datetime.datetime.fromtimestamp(ts/1000).strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
|
||||
|
||||
class KMLRender(Render):
|
||||
result_types = [LocationAnalyzer]
|
||||
|
||||
def render(self, results: List[Result], name=None):
|
||||
for result in self.filter(results):
|
||||
times = ["<when>{time}</when>".format(time=format_time(entry["timestamp"])) for entry in result.get()]
|
||||
coords = [
|
||||
"<gx:coord>{long} {lat} 0.0</gx:coord>"
|
||||
.format(
|
||||
lat=entry['location']['coordinates'][1],
|
||||
long=entry['location']['coordinates'][0])
|
||||
for entry in result.get()
|
||||
]
|
||||
filename = str(result.name)+".kml"
|
||||
print(filename)
|
||||
with open(filename, "w") as out:
|
||||
out.write(KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
|
||||
|
||||
|
||||
|
||||
|
||||
class HeatMapRender(TrackRender):
|
||||
weight = 0.01
|
||||
|
||||
|
|
@ -2,8 +2,8 @@ import json
|
|||
|
||||
import numpy as np
|
||||
|
||||
import analyzers
|
||||
from util.geo import calc_distance
|
||||
import analysis.analyzers
|
||||
from analysis.util.geo import calc_distance
|
||||
|
||||
|
||||
def time_distribution(store):
|
||||
|
|
@ -97,7 +97,7 @@ def time_distribution(store):
|
|||
|
||||
from collections import defaultdict
|
||||
import matplotlib.pyplot as plt
|
||||
from util.meta_temp import CONFIG_NAMES
|
||||
from analysis.util.meta_temp import CONFIG_NAMES
|
||||
|
||||
keys = [
|
||||
"simu",
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import sys
|
||||
from sources import SOURCES
|
||||
from analysis.sources import SOURCES
|
||||
|
||||
|
||||
def load_source(config):
|
||||
|
|
@ -28,7 +28,7 @@ class LogSettings:
|
|||
self.boards = json_dict['boards']
|
||||
for mod in json_dict['analyzers']:
|
||||
for name in json_dict['analyzers'][mod]:
|
||||
print(mod, name)
|
||||
print(mod, name, getattr(sys.modules[mod], name))
|
||||
self.analyzers.append(getattr(sys.modules[mod], name))
|
||||
self.sequences = json_dict['sequences']
|
||||
if 'custom' in json_dict:
|
||||
|
|
@ -51,3 +51,7 @@ class LogSettings:
|
|||
|
||||
def load_settings(file: str) -> LogSettings:
|
||||
return LogSettings(json.load(open(file)))
|
||||
|
||||
|
||||
def parse_settings(config: str) -> LogSettings:
|
||||
return LogSettings(json.loads(config))
|
||||
|
|
@ -2,16 +2,16 @@ import json
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
import analyzers
|
||||
from analyzers import get_renderer, render
|
||||
from analyzers.analyzer import ResultStore
|
||||
from analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
|
||||
from analyzers.render import wip
|
||||
from analyzers.render.default import LogEntryCountCSV
|
||||
from analyzers.render.wip import time_distribution, plot_data
|
||||
from analyzers.settings import LogSettings, load_settings
|
||||
from loaders import LOADERS
|
||||
from util.processing import grep, run_analysis, src_file
|
||||
from analysis import analyzers
|
||||
from analysis.analyzers import get_renderer, render
|
||||
from analysis.analyzers.analyzer import ResultStore
|
||||
from analysis.analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
|
||||
from analysis.analyzers.render import wip
|
||||
from analysis.analyzers.render.default import LogEntryCountCSV, KMLRender
|
||||
from analysis.analyzers.render.wip import time_distribution, plot_data
|
||||
from analysis.analyzers.settings import LogSettings, load_settings, parse_settings
|
||||
from analysis.loaders import LOADERS
|
||||
from analysis.util.processing import grep, run_analysis, src_file
|
||||
|
||||
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
|
@ -26,36 +26,39 @@ def urach_logs(log_ids, settings):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
settings: LogSettings = load_settings("biogames2.json")
|
||||
log_ids_urach: List[str] = urach_logs([
|
||||
# "34fecf49dbaca3401d745fb467",
|
||||
# "44ea194de594cd8d63ac0314be",
|
||||
# "57c444470dbf88605433ca935c",
|
||||
# "78e0c545b594e82edfad55bd7f",
|
||||
# "91abfd4b31a5562b1c66be37d9",
|
||||
# "597b704fe9ace475316c345903",
|
||||
# "e01a684aa29dff9ddd9705edf8",
|
||||
"597b704fe9ace475316c345903",
|
||||
"e01a684aa29dff9ddd9705edf8",
|
||||
"fbf9d64ae0bdad0de7efa3eec6",
|
||||
# "fbf9d64ae0bdad0de7efa3eec6",
|
||||
"fe1331481f85560681f86827ec", # urach
|
||||
# "fe1331481f85560681f86827ec"]
|
||||
"fec57041458e6cef98652df625",
|
||||
]
|
||||
, settings)
|
||||
log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
|
||||
"a192ff420b8bdd899fd28573e2", # eichstätt
|
||||
"3a3d994c04b1b1d87168422309", # stadtökologie
|
||||
"fe1331481f85560681f86827ec", # urach
|
||||
"96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
|
||||
],
|
||||
"/home/clemens/git/ma/test/src",
|
||||
settings)
|
||||
log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
|
||||
settings = {}
|
||||
log_ids_gf = []
|
||||
# settings: LogSettings = load_settings("biogames2.json")
|
||||
# log_ids_urach: List[str] = urach_logs([
|
||||
# # "34fecf49dbaca3401d745fb467",
|
||||
# # "44ea194de594cd8d63ac0314be",
|
||||
# # "57c444470dbf88605433ca935c",
|
||||
# # "78e0c545b594e82edfad55bd7f",
|
||||
# # "91abfd4b31a5562b1c66be37d9",
|
||||
# # "597b704fe9ace475316c345903",
|
||||
# # "e01a684aa29dff9ddd9705edf8",
|
||||
# "597b704fe9ace475316c345903",
|
||||
# "e01a684aa29dff9ddd9705edf8",
|
||||
# "fbf9d64ae0bdad0de7efa3eec6",
|
||||
# # "fbf9d64ae0bdad0de7efa3eec6",
|
||||
# "fe1331481f85560681f86827ec", # urach
|
||||
# # "fe1331481f85560681f86827ec"]
|
||||
# "fec57041458e6cef98652df625",
|
||||
# ]
|
||||
# , settings)
|
||||
# log_ids_gf = grep(["9d11b749c78a57e786bf5c8d28", # filderstadt
|
||||
# "a192ff420b8bdd899fd28573e2", # eichstätt
|
||||
# "3a3d994c04b1b1d87168422309", # stadtökologie
|
||||
# "fe1331481f85560681f86827ec", # urach
|
||||
# "96f6d9cc556b42f3b2fec0a2cb7ed36e" # oberelsbach
|
||||
# ],
|
||||
# "/home/clemens/git/ma/test/src",
|
||||
# settings)
|
||||
# log_ids = src_file("/home/clemens/git/ma/test/filtered_5_actions")
|
||||
|
||||
#store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
|
||||
#store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
||||
if False:
|
||||
store: ResultStore = run_analysis(log_ids_gf, settings, LOADERS)
|
||||
# store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
||||
|
||||
if False:
|
||||
for r in get_renderer(analyzers.LocomotionActionAnalyzer):
|
||||
|
|
@ -69,7 +72,7 @@ if __name__ == '__main__':
|
|||
# render(analyzers.ProgressAnalyzer, store.get_all())
|
||||
|
||||
if False:
|
||||
from analyzers.postprocessing import graph
|
||||
from analysis.analyzers.postprocessing import graph
|
||||
|
||||
g = graph.Cache(settings)
|
||||
g.run(store)
|
||||
|
|
@ -85,7 +88,7 @@ if __name__ == '__main__':
|
|||
if False:
|
||||
time_distribution(store)
|
||||
|
||||
if True:
|
||||
if False:
|
||||
# spatial_data = get_data_distance(store,relative_values=False)
|
||||
# temporal_data = get_data(store,relative_values=False)
|
||||
# spatial_data_rel = get_data_distance(store,relative_values=True)
|
||||
|
|
@ -104,6 +107,18 @@ if __name__ == '__main__':
|
|||
# plot_time_space_rel(combined, keys)
|
||||
plot_data(combined, wip.keys)
|
||||
|
||||
if True:
|
||||
settings: LogSettings = load_settings("../oeb_kml.json")
|
||||
log_ids = src_file("/home/clemens/git/ma/test/oeb_2016_path")
|
||||
log_ids = log_ids[0:2]
|
||||
print(log_ids)
|
||||
store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
||||
print("render")
|
||||
kml = KMLRender()
|
||||
kml.render(store.get_all())
|
||||
print("done")
|
||||
#for cat in store.get_categories():
|
||||
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
|
||||
|
||||
# for analyzers in analyzers:
|
||||
# if analyzers.name() in ["LogEntryCount", "ActionSequenceAnalyzer"]:
|
||||
|
|
@ -26,7 +26,7 @@ class Biogames(Source):
|
|||
raise ValueError("missing value " + i)
|
||||
csrf_request = requests.get(kwargs['url'])
|
||||
if csrf_request.status_code != 200:
|
||||
raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")")
|
||||
raise ConnectionError("unable to obtain CSRF token (" + str(csrf_request) + ")", csrf_request.url)
|
||||
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
|
||||
log.info("obtained CSRF token (" + self.cookies['csrftoken'] + ")")
|
||||
login_payload = {
|
||||
|
|
@ -97,3 +97,20 @@ CONFIG_NAMES = {
|
|||
'fe43a0f0-3dea-11e6-a065-00199963ac6e': u'Vorlagen',
|
||||
'ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771': u'Bad Urach'
|
||||
}
|
||||
|
||||
|
||||
KML_PATTERN="""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2">
|
||||
<Document>
|
||||
<Placemark>
|
||||
<gx:MultiTrack>
|
||||
<gx:Track>
|
||||
{when}
|
||||
{coordinates}
|
||||
</gx:Track>
|
||||
</gx:MultiTrack>
|
||||
{coordinates}
|
||||
</Placemark>
|
||||
</Document>
|
||||
</kml>
|
||||
"""
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
from analyzers.analyzer import ResultStore, Analyzer
|
||||
from analyzers.settings import LogSettings
|
||||
from analysis.analyzers.analyzer import ResultStore, Analyzer
|
||||
from analysis.analyzers.settings import LogSettings
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -31,6 +31,7 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
|
|||
def run_analysis(log_ids: list, settings, loaders):
|
||||
store: ResultStore = ResultStore()
|
||||
for log_id in log_ids:
|
||||
log.info("LOG_ID: "+ str(log_id))
|
||||
for analysis in process_log(log_id, settings, loaders):
|
||||
log.info("* Result for " + analysis.name())
|
||||
analysis.result(store, name=log_id)
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import typing
|
||||
|
||||
import requests
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Client:
|
||||
host: str = ""
|
||||
cookies: typing.Dict[str, str] = {}
|
||||
headers: typing.Dict[str, str] = {}
|
||||
|
||||
def url(self, path):
|
||||
if self.host:
|
||||
return self.host + path
|
||||
return path
|
||||
|
||||
def get(self, url, **kwargs) -> requests.models.Response:
|
||||
return requests.get(self.url(url), cookies=self.cookies, headers=self.headers, **kwargs)
|
||||
|
||||
def post(self, url, data, **kwargs) -> requests.models.Response:
|
||||
return requests.post(self.url(url), data, cookies=self.cookies, headers=self.headers, **kwargs)
|
||||
|
||||
def download_file(self, url, target, **kwargs) -> bool:
|
||||
with open(target, "wb") as out:
|
||||
try:
|
||||
download = self.get(url, stream=True, **kwargs)
|
||||
shutil.copyfileobj(download.raw, out)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
os.remove(target)
|
||||
return False
|
||||
return True
|
||||
|
||||
def download_files(self, urls, **kwargs) -> tempfile.TemporaryDirectory:
|
||||
target = tempfile.TemporaryDirectory()
|
||||
for path in urls:
|
||||
filename = os.path.join(target.name, path.split("/")[-1])
|
||||
self.download_file(path, filename, **kwargs)
|
||||
return target
|
||||
|
||||
|
||||
class BiogamesClient(Client):
|
||||
config_fields: typing.Dict[str, typing.List[str]] = {
|
||||
'login': ('username', 'password', 'host'),
|
||||
'session': ('sessionid', 'csrftoken', 'host'),
|
||||
}
|
||||
login_url: str = "/game2/auth/json-login"
|
||||
list_url: str = "/game2/instance/log/list/"
|
||||
headers: typing.Dict[str, str] = {'Accept': 'application/json'}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
match = {j: all([i in kwargs for i in self.config_fields[j]]) for j in self.config_fields}
|
||||
valid = filter(lambda x: match[x], match)
|
||||
if not valid:
|
||||
raise ValueError("missing parameter (" + str(self.config_fields) + ")")
|
||||
self.config = kwargs
|
||||
self.cookies = {}
|
||||
self.host = self.config['host']
|
||||
if 'session' in valid:
|
||||
self.cookies = kwargs
|
||||
|
||||
def login(self) -> bool:
|
||||
csrf_request = self.get(self.list_url)
|
||||
if not csrf_request.ok:
|
||||
raise ConnectionError("Unable to obtain CSRF token (" + str(csrf_request) + ")")
|
||||
self.cookies['csrftoken'] = csrf_request.cookies['csrftoken']
|
||||
login_payload = {
|
||||
'username': self.config['username'],
|
||||
'password': self.config['password'],
|
||||
'next': '',
|
||||
'csrfmiddlewaretoken': 'csrftoken',
|
||||
}
|
||||
login = self.post(self.login_url, json.dumps(login_payload))
|
||||
if not login.ok:
|
||||
raise ConnectionError("Unable to authenticate", login, login.text)
|
||||
self.cookies['sessionid'] = login.cookies['sessionid']
|
||||
print(self.cookies)
|
||||
return True
|
||||
|
||||
def list(self) -> dict:
|
||||
print(self.cookies)
|
||||
logs = self.get(self.list_url)
|
||||
if not logs.ok:
|
||||
raise ConnectionError("HTTP fail", logs, logs.text)
|
||||
return logs.json()
|
||||
|
||||
def load_all_logs(self) -> tempfile.TemporaryDirectory:
|
||||
return self.download_files([i["file_url"] for i in self.list()])
|
||||
|
||||
|
||||
CLIENTS: typing.Dict[str, Client] = {
|
||||
"Biogames": BiogamesClient,
|
||||
}
|
||||
|
||||
if __name__ == '__main__':
|
||||
# c = BiogamesClient(host="http://biodiv", username="ba", password="853451")
|
||||
# print(c.login())
|
||||
# print(json.dumps(c.list(), indent=1))
|
||||
# print(type(c.load_all_logs()))
|
||||
# print(type(c.get("/")))
|
||||
c = BiogamesClient(host="http://biodiv", **{'csrftoken': 'IgbwP83iEibW6RE7IADIFELYdbx0dvqQ',
|
||||
'sessionid': 'zntsj09d92tjos1b6ruqjthlzv60xdin'})
|
||||
print(json.dumps(c.list(), indent=1))
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
version: "3"
|
||||
|
||||
services:
|
||||
app:
|
||||
image: docker.clkl.de/ma/celery:0.1
|
||||
build: .
|
||||
volumes:
|
||||
- ./:/app
|
||||
working_dir: /app/selector
|
||||
command: python3 webserver.py
|
||||
environment:
|
||||
- PYTHONPATH=/app
|
||||
networks:
|
||||
- default
|
||||
- traefik_net
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.port=5000"
|
||||
- "traefik.docker.network=traefik_net"
|
||||
- "traefik.url.frontend.rule=Host:select.ma.potato.kinf.wiai.uni-bamberg.de"
|
||||
|
||||
|
||||
redis:
|
||||
image: redis:4-alpine
|
||||
|
||||
|
||||
|
||||
celery:
|
||||
image: docker.clkl.de/ma/celery:0.1
|
||||
build: .
|
||||
environment:
|
||||
- PYTHONPATH=/app
|
||||
volumes:
|
||||
- ./:/app
|
||||
working_dir: /app/tasks
|
||||
command: celery -A tasks worker --loglevel=info
|
||||
|
||||
networks:
|
||||
traefik_net:
|
||||
external:
|
||||
name: traefik_net
|
||||
|
|
@ -1,8 +1,13 @@
|
|||
requests==2.18.4
|
||||
numpy==1.13.1
|
||||
matplotlib==2.1.0
|
||||
osmnx==0.6
|
||||
#osmnx==0.6
|
||||
networkx==2.0
|
||||
pydot==1.2.3
|
||||
#pydot==1.2.3
|
||||
scipy==1.0.0
|
||||
ipython==6.2.1
|
||||
#ipython==6.2.1
|
||||
|
||||
flask==0.12.2
|
||||
|
||||
celery==4.1.0
|
||||
redis==2.10.6
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
function validateSettings() {
|
||||
alert(document.getElementById('safety').checked);
|
||||
return false;
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
body {
|
||||
background-color: aqua;
|
||||
}
|
||||
#data{
|
||||
display: none;
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
<!doctype html>
|
||||
<title>…</title>
|
||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='style.css') }}">
|
||||
<script type="application/javascript" src="{{url_for('static', filename='script.js') }}"></script>
|
||||
{% block body %} {% endblock %}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
{% extends "base.html" %}
|
||||
{% block body %}
|
||||
<form action="/start" method="post">
|
||||
<div id="data"> {{logs}} </div>
|
||||
<ul>
|
||||
{% for log in logs %}
|
||||
<li>
|
||||
<input type="checkbox" name="logs" value="{{log['@id']}}">
|
||||
{{log.start_date}}: {{log.player_group_name}} (→{{log.end_date}})
|
||||
</li>
|
||||
<!--{{log}}-->
|
||||
{% endfor %}
|
||||
</ul>
|
||||
<input type="checkbox" id="safety"><label for="safety">Confirm selection</label>
|
||||
<input type="submit">
|
||||
|
||||
</form>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
{% extends "base.html" %}
|
||||
{% block body %}
|
||||
<form action="/login" method="post">
|
||||
<select name="game">
|
||||
{% for game in clients %}
|
||||
<option>{{ game }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<input type="text" name="username" placeholder="username"/>
|
||||
<input type="password" name="password" placeholder="passwort"/>
|
||||
<input type="submit">
|
||||
</form>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
import typing
|
||||
import uuid
|
||||
|
||||
from clients.webclients import Client, CLIENTS
|
||||
|
||||
from flask import Flask, render_template, request, redirect, session
|
||||
|
||||
BIOGAMES_HOST = "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||
|
||||
app = Flask(__name__)
|
||||
clients: typing.Dict[str, Client] = {}
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return render_template("index.html", clients=CLIENTS)
|
||||
|
||||
|
||||
@app.route("/login", methods=["POST"])
|
||||
def login():
|
||||
game = request.form["game"]
|
||||
if not game in CLIENTS:
|
||||
return redirect("/")
|
||||
client = CLIENTS[game](host=BIOGAMES_HOST, username=request.form['username'], password=request.form['password'])
|
||||
if client.login():
|
||||
session['logged_in'] = True
|
||||
session['uid'] = str(uuid.uuid4())
|
||||
session['cookies'] = client.cookies
|
||||
session['game'] = game
|
||||
session['host'] = BIOGAMES_HOST
|
||||
clients[session['uid']] = client
|
||||
return redirect("/games")
|
||||
return redirect("/")
|
||||
|
||||
|
||||
@app.route("/games")
|
||||
def games():
|
||||
if not session['logged_in']:
|
||||
return redirect("/")
|
||||
if session['logged_in'] and not session['uid'] in clients:
|
||||
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
|
||||
return render_template("games.html", logs=clients[session['uid']].list())
|
||||
|
||||
@app.route("/start", methods=['POST'])
|
||||
def start():
|
||||
pass
|
||||
|
||||
@app.route("/status")
|
||||
def status():
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.config.update({"SECRET_KEY":"59765798-2784-11e8-8d05-db4d6f6606c9"})
|
||||
app.run(host="0.0.0.0", debug=True)
|
||||
|
|
@ -1,8 +1,9 @@
|
|||
$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
|
||||
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
|
||||
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_de7df5b5-edd5-4070-840f-68854ffab9aa.json", function (data) { //urach
|
||||
//$.getJSON("data/90278021-4c57-464e-90b1-d603799d07eb_07da99c9-398a-424f-99fc-2701763a63e9.json", function (data) { //eichstätt
|
||||
//$.getJSON("data/13241906-cdae-441a-aed0-d57ebeb37cac_d33976a6-8a56-4a63-b492-fe5427dbf377.json", function (data) { //stadtökologie
|
||||
//$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
|
||||
$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
|
||||
//$.getJSON("data/17d401a9-de21-49a2-95bc-7dafa53dda64_98edcb70-03db-4465-b185-a9c9574995ce.json", function (data) { //oeb2016
|
||||
var images = {};
|
||||
var tiles = {
|
||||
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
from celery import Celery
|
||||
#from analysis import log_analyzer as la
|
||||
|
||||
app = Celery('tasks', backend='redis://redis', broker='redis://redis')
|
||||
|
||||
|
||||
@app.task
|
||||
def add(x, y):
|
||||
return x + y
|
||||
|
||||
#@app.task
|
||||
#def analyze(config, log_ids):
|
||||
# settings = la.parse_settings(config)
|
||||
# store = la.run_analysis(log_ids, settings, la.LOADERS)
|
||||
Loading…
Reference in New Issue