activityMapper working with celery
parent
01e2433b8b
commit
412239515d
|
|
@ -40,7 +40,7 @@ def __plot_or_show(name=None):
|
|||
|
||||
|
||||
def plot(src_data: List[Tuple[str, List[int]]], ylabel="simulation rounds", title="simulation retries",
|
||||
rotation='vertical', name=None):
|
||||
rotation='vertical', name=None):
|
||||
names, datas = list(zip(*src_data))
|
||||
# plt.boxplot(datas, labels=names, showfliers=False, showmeans=True, meanline=True)
|
||||
rand = np.random.rand(len(datas), len(datas[0]))
|
||||
|
|
@ -53,7 +53,7 @@ def plot(src_data: List[Tuple[str, List[int]]], ylabel="simulation rounds", titl
|
|||
|
||||
|
||||
def graph_plot(src_data: List[Tuple[str, List[int]]], ylabel="simulation rounds", title="sequential simulation retries",
|
||||
rotation='vertical', name=None):
|
||||
rotation='vertical', name=None):
|
||||
config_name = CONFIG_NAMES[name] if name in CONFIG_NAMES else "---"
|
||||
counts_per_group = [sum(i) for i in src_data]
|
||||
label = "{}: n={n}; # of sim runs: ⌀={avg:.2f}, median={median}".format(
|
||||
|
|
@ -152,12 +152,14 @@ class ActivityMapperRender(Render):
|
|||
|
||||
def render(self, results: List[Result], name=None):
|
||||
print(os.getcwd())
|
||||
files = []
|
||||
for result in self.filter(results):
|
||||
data = result.get()
|
||||
with open(os.path.join("static", "progress", "data", data['instance'] + "_" + str(name) + ".json"),
|
||||
"w") as out:
|
||||
path = os.path.join("/tmp", data['instance'] + "_" + str(name) + ".json")
|
||||
with open(path, "w") as out:
|
||||
json.dump(data, out, indent=1)
|
||||
return "ok"
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
class StoreRender(Render):
|
||||
|
|
@ -186,7 +188,7 @@ class SimulationOrderRender(Render):
|
|||
|
||||
class SimulationGroupRender(Render):
|
||||
def render(self, results: List[Result], name=None):
|
||||
#data = [r.get() for r in self.filter(results)]
|
||||
# data = [r.get() for r in self.filter(results)]
|
||||
data = []
|
||||
for r in self.filter(results):
|
||||
raw = r.get()
|
||||
|
|
@ -196,6 +198,6 @@ class SimulationGroupRender(Render):
|
|||
print(name, len(data))
|
||||
# graph_fit(list(data), name=name)
|
||||
graph_plot(list(data), ylabel="simulation retries", title="sequential simulation retries", rotation=None,
|
||||
name=name)
|
||||
name=name)
|
||||
|
||||
result_types = [SimulationOrderAnalyzer]
|
||||
|
|
|
|||
|
|
@ -6,12 +6,10 @@ from analysis.util import json_path
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def download_board(board_id, instance_config_id, sequence_id, source):
|
||||
local_file = "static/progress/images/{config_id}/{sequence_id}/{board_id}".format(
|
||||
config_id=instance_config_id,
|
||||
sequence_id=sequence_id,
|
||||
board_id=board_id)
|
||||
if os.path.exists(local_file):
|
||||
def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"):
|
||||
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
|
||||
abs_path = os.path.join(path, local_file)
|
||||
if os.path.exists(abs_path):
|
||||
return local_file
|
||||
url = "/game2/editor/config/{config_id}/sequence/{sequence_id}/board/{board_id}/".format(
|
||||
config_id=instance_config_id,
|
||||
|
|
@ -20,12 +18,12 @@ def download_board(board_id, instance_config_id, sequence_id, source):
|
|||
)
|
||||
board = source.get(url)
|
||||
if not board.ok:
|
||||
raise ConnectionError()
|
||||
raise ConnectionError(url, board, board.status_code)
|
||||
data = board.json()
|
||||
preview_url = json_path(data, "preview_url.medium")
|
||||
logger.debug(preview_url)
|
||||
os.makedirs(local_file[:-len(board_id)], exist_ok=True)
|
||||
source.download_file(preview_url, local_file)
|
||||
os.makedirs(abs_path[:-len(board_id)], exist_ok=True)
|
||||
source.download_file(preview_url, abs_path)
|
||||
return local_file
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,9 +22,11 @@ class Client:
|
|||
return path
|
||||
|
||||
def get(self, url, **kwargs) -> requests.models.Response:
|
||||
log.info("GET " + str(url))
|
||||
return requests.get(self.url(url), cookies=self.cookies, headers=self.headers, **kwargs)
|
||||
|
||||
def post(self, url, data, **kwargs) -> requests.models.Response:
|
||||
log.info("POST " + str(url))
|
||||
return requests.post(self.url(url), data, cookies=self.cookies, headers=self.headers, **kwargs)
|
||||
|
||||
def download_file(self, url, target, **kwargs) -> bool:
|
||||
|
|
|
|||
|
|
@ -41,6 +41,8 @@ services:
|
|||
image: nginx:1.13-alpine
|
||||
volumes:
|
||||
- ./data/results:/usr/share/nginx/html:ro
|
||||
networks:
|
||||
- traefik_net
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.port=80"
|
||||
|
|
|
|||
|
|
@ -60,4 +60,69 @@ KML = """{
|
|||
]
|
||||
}"""
|
||||
|
||||
CONFIGS = {"KML": KML}#TODO
|
||||
ACTIVITY = """{
|
||||
"logFormat": "zip",
|
||||
"entryType": "@class",
|
||||
"spatials": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
|
||||
],
|
||||
"actions": [],
|
||||
"boards": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
|
||||
],
|
||||
"analyzers": {
|
||||
"analysis.analyzers": [
|
||||
"BiogamesCategorizer",
|
||||
"ActivityMapper"
|
||||
]
|
||||
},
|
||||
"sequences": {
|
||||
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
|
||||
"end": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
|
||||
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
|
||||
}
|
||||
},
|
||||
"custom": {
|
||||
"simulation_rounds": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
|
||||
],
|
||||
"simu_data": [
|
||||
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||
],
|
||||
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||
"instance_id": "instance_id",
|
||||
"instance_config_id": "config.@id",
|
||||
"sequences2": {
|
||||
"id_field": "sequence_id",
|
||||
"start": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action": "START"
|
||||
},
|
||||
"end": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action": "PAUSE"
|
||||
}
|
||||
},
|
||||
"coordinates": "location.coordinates",
|
||||
"metadata": {
|
||||
"timestamp": "timestamp",
|
||||
"gamefield": "instance_id",
|
||||
"user": "player_group_name"
|
||||
}
|
||||
},
|
||||
"source": {
|
||||
"type": "Biogames",
|
||||
"username": "ba",
|
||||
"password": "853451",
|
||||
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||
},
|
||||
"render": [
|
||||
"ActivityMapper"
|
||||
]
|
||||
}"""
|
||||
|
||||
CONFIGS = { # TODO
|
||||
"KML": KML,
|
||||
"ActivityMapper": ACTIVITY,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,17 +24,17 @@
|
|||
|
||||
<div id="results">
|
||||
<ul>
|
||||
{% for job in jobs %}
|
||||
{% for job in jobs %}
|
||||
<li> {{jobs[job].status}}: "{{job}}":
|
||||
<ul>
|
||||
{% for r in jobs[job].results %}
|
||||
<li>{{r}}</li>
|
||||
<li><a href="{{r | get_url}}">{{r|get_name}}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
|
||||
</li>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from tasks import tasks
|
|||
from selector.temp_config import CONFIGS
|
||||
|
||||
BIOGAMES_HOST = "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||
RESULT_HOST = "http://results.ma.potato.kinf.wiai.uni-bamberg.de/"
|
||||
|
||||
app = Flask(__name__)
|
||||
clients: typing.Dict[str, Client] = {}
|
||||
|
|
@ -44,11 +45,15 @@ def login():
|
|||
|
||||
@app.route("/games")
|
||||
def games():
|
||||
if not session['logged_in']:
|
||||
if not ('logged_in' in session and session['logged_in']):
|
||||
return redirect("/")
|
||||
if session['logged_in'] and not session['uid'] in clients:
|
||||
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
|
||||
job_status = json.loads(tasks.redis.get(session['username']))
|
||||
status = tasks.redis.get(session['username'])
|
||||
if status:
|
||||
job_status = json.loads(status)
|
||||
else:
|
||||
job_status = {}
|
||||
return render_template("games.html", logs=clients[session['uid']].list(), configs=CONFIGS, jobs=job_status)
|
||||
|
||||
|
||||
|
|
@ -80,6 +85,15 @@ def status():
|
|||
return json.dumps(json.loads(tasks.redis.get(session['username'])), indent=2)
|
||||
|
||||
|
||||
@app.template_filter('get_url')
|
||||
def get_url(path: str):
|
||||
return path.replace(tasks.DATA_PATH, RESULT_HOST)
|
||||
|
||||
@app.template_filter('get_name')
|
||||
def get_url(path: str):
|
||||
return path.replace(tasks.DATA_PATH, "")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.config.update({"SECRET_KEY": "59765798-2784-11e8-8d05-db4d6f6606c9"})
|
||||
app.run(host="0.0.0.0", debug=True)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from .tasks import add, analyze
|
||||
from .tasks import analyze
|
||||
|
||||
__log__ = ["/app/data/008cad400ab848f729913d034a.zip"]
|
||||
|
||||
|
|
|
|||
|
|
@ -9,15 +9,16 @@ import redis as redis_lib
|
|||
import time
|
||||
from celery import Celery
|
||||
from analysis import log_analyzer as la
|
||||
from analysis.analyzers import KMLRender
|
||||
from analysis.analyzers import KMLRender, ActivityMapperRender
|
||||
from clients.webclients import CLIENTS
|
||||
|
||||
FLASK_DB = 1
|
||||
REDIS_HOST = "redis"
|
||||
DATA_PATH = "/app/data/results_test/"
|
||||
DATA_PATH = "/app/data/results/"
|
||||
|
||||
RENDERERS = { # TODO
|
||||
"KMLRender": KMLRender
|
||||
"KMLRender": KMLRender,
|
||||
"ActivityMapper": ActivityMapperRender,
|
||||
}
|
||||
|
||||
app = Celery('tasks', backend='redis://redis', broker='redis://redis')
|
||||
|
|
@ -38,36 +39,40 @@ def update_status(username, name, state, **kwargs):
|
|||
def analyze(config, log_ids, **kwargs):
|
||||
update_status(kwargs['username'], kwargs['name'], ('load', 'LOADING'))
|
||||
|
||||
log.info("start analysis")
|
||||
client = CLIENTS[kwargs['clientName']](host=kwargs['host'], **kwargs['cookies'])
|
||||
logs = client.list()
|
||||
id_urls = {str(x['@id']): x['file_url'] for x in logs}
|
||||
urls = [id_urls[i] for i in log_ids]
|
||||
tmpdir = client.download_files(urls)
|
||||
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
|
||||
try:
|
||||
log.info("start analysis")
|
||||
client = CLIENTS[kwargs['clientName']](host=kwargs['host'], **kwargs['cookies'])
|
||||
logs = client.list()
|
||||
id_urls = {str(x['@id']): x['file_url'] for x in logs}
|
||||
urls = [id_urls[i] for i in log_ids]
|
||||
tmpdir = client.download_files(urls)
|
||||
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
|
||||
|
||||
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'))
|
||||
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'))
|
||||
|
||||
settings = la.parse_settings(config)
|
||||
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
|
||||
render = RENDERERS[settings.render[0]]() # TODO
|
||||
files = render.render(store.get_all())
|
||||
settings = la.parse_settings(config)
|
||||
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
|
||||
render = RENDERERS[settings.render[0]]() # TODO
|
||||
files = render.render(store.get_all())
|
||||
|
||||
uid = str(uuid.uuid4())
|
||||
results = []
|
||||
log.error(files)
|
||||
os.mkdir(os.path.join(DATA_PATH, uid))
|
||||
for file in files:
|
||||
try:
|
||||
head, tail = os.path.split(file)
|
||||
target = os.path.join(DATA_PATH, uid, tail)
|
||||
shutil.move(file, target)
|
||||
results.append(target)
|
||||
except FileNotFoundError as e:
|
||||
log.exception(e)
|
||||
tmpdir.cleanup()
|
||||
uid = str(uuid.uuid4())
|
||||
results = []
|
||||
log.error(files)
|
||||
os.mkdir(os.path.join(DATA_PATH, uid))
|
||||
for file in files:
|
||||
try:
|
||||
head, tail = os.path.split(file)
|
||||
target = os.path.join(DATA_PATH, uid, tail)
|
||||
shutil.move(file, target)
|
||||
results.append(target)
|
||||
except FileNotFoundError as e:
|
||||
log.exception(e)
|
||||
tmpdir.cleanup()
|
||||
|
||||
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)
|
||||
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
update_status(kwargs['username'], kwargs['name'], ('abort', 'ERROR'), exception=str(e))
|
||||
|
||||
|
||||
def status_update(key, status_key, status):
|
||||
|
|
|
|||
Loading…
Reference in New Issue