Compare commits
8 Commits
laptop_mas
...
master
| Author | SHA1 | Date |
|---|---|---|
|
|
cbe414232c | |
|
|
bc74cbd19a | |
|
|
86adc973a2 | |
|
|
29c04f2fdc | |
|
|
4a53aa5759 | |
|
|
51b5e8f04c | |
|
|
c4e9d7462b | |
|
|
2170a4a3fd |
|
|
@ -2,7 +2,7 @@ FROM alpine:edge
|
|||
|
||||
ADD ["requirements.txt", "/"]
|
||||
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories && \
|
||||
apk add --update --no-cache libpng freetype python3 libstdc++ libxml2 libxslt openblas && \
|
||||
apk add --update --no-cache libpng freetype python3 libstdc++ libxml2 libxslt openblas geos libc-dev && \
|
||||
apk add --update --no-cache --virtual .build-deps libpng-dev freetype-dev g++ python3-dev openblas-dev libxml2-dev libxslt-dev && \
|
||||
pip3 --no-cache-dir install -r requirements.txt && \
|
||||
apk del .build-deps && \
|
||||
|
|
|
|||
|
|
@ -3,35 +3,34 @@ from typing import List
|
|||
from .analyzer import Analyzer, Result
|
||||
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
|
||||
BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \
|
||||
SimulationFlagsAnalyzer
|
||||
SimulationFlagsAnalyzer, GameField_InstanceCategorizer, BiogamesTasks, BiogamesDuration, GameFieldInstanceGroup
|
||||
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
|
||||
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer
|
||||
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer, WhitelistAnalyzer
|
||||
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
|
||||
from .analyzer.mask import MaskSpatials
|
||||
from .render import Render
|
||||
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
||||
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
|
||||
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
|
||||
LogEntryCountCSV, KMLRender
|
||||
LogEntryCountCSV, KMLRender, GeoJSON
|
||||
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
||||
LocomotionActionRatioRender, LocomotionActionRatioHistRender
|
||||
LocomotionActionRatioRender
|
||||
#from .render.wip import get_all_data, plot_time_space_rel
|
||||
|
||||
__FALLBACK__ = PrintRender
|
||||
__MAPPING__ = {
|
||||
LocomotionActionAnalyzer: [
|
||||
LocomotionActionAbsoluteRender,
|
||||
LocomotionActionRelativeRender,
|
||||
LocomotionActionRatioRender,
|
||||
LocomotionActionRatioHistRender,
|
||||
],
|
||||
LocomotionActionRatioRender, ],
|
||||
LogEntryCountAnalyzer: [
|
||||
# JSONRender,
|
||||
LogEntryCountAnalyzerPlot,
|
||||
LogEntryCountCSV,
|
||||
],
|
||||
SimulationRoundsAnalyzer: [
|
||||
JSONRender,
|
||||
SimulationRoundsRender,
|
||||
SimulationRoundsMeanRender,
|
||||
],
|
||||
BoardDurationAnalyzer: [
|
||||
BoardDurationHistRender,
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ class Result:
|
|||
return self.__analysis__
|
||||
|
||||
def get(self):
|
||||
log.debug("get" + str(len(self.result)))
|
||||
return self.result
|
||||
|
||||
def __repr__(self):
|
||||
|
|
@ -28,13 +29,16 @@ class Result:
|
|||
class ResultStore:
|
||||
"""Store Results"""
|
||||
|
||||
def __init__(self, store_entry: Type[Collection] = list, store_action: callable = list.append) -> None:
|
||||
def __init__(self, store_entry: Type[Collection] = list, store_action: callable = list.append, key_index=None) -> None:
|
||||
self.store = {}
|
||||
self.category = None
|
||||
self.entry: Type[Collection] = store_entry
|
||||
self.action: callable = store_action
|
||||
self.key_index = key_index
|
||||
|
||||
def new_category(self, key) -> None:
|
||||
if not self.key_index is None:
|
||||
key = key[self.key_index]
|
||||
self.category = key
|
||||
if not key in self.store:
|
||||
self.store[key] = self.entry()
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class BoardDurationAnalyzer(Analyzer):
|
|||
board_id, timestamp = board["id"], board["timestamp"]
|
||||
|
||||
if not last_timestamp is None:
|
||||
result.append(self.save_entry(last_board, last_timestamp, (timestamp - last_timestamp)/1000))
|
||||
result.append(self.save_entry(last_board, last_timestamp, timestamp - last_timestamp))
|
||||
last_timestamp = timestamp
|
||||
last_board = board_id
|
||||
# TODO: last board?
|
||||
|
|
@ -133,6 +133,48 @@ class BiogamesCategorizer(CategorizerStub): # TODO: refactor
|
|||
return False
|
||||
|
||||
|
||||
class GameField_InstanceCategorizer(CategorizerStub): # TODO: refactor
|
||||
__name__ = "BiogamesCategorizer"
|
||||
|
||||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if self.key is "default":
|
||||
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
|
||||
try:
|
||||
self.key = json_path(entry, self.settings.custom['instance_config_id']) + "_" + entry[self.settings.custom['instance_id']] + "_" + str(entry["timestamp"])
|
||||
except KeyError as e:
|
||||
print(entry)
|
||||
raise e
|
||||
return False
|
||||
|
||||
|
||||
class GameFieldInstanceGroup(Analyzer):
|
||||
__name__ = "BiogamesGamefieldInstanceGroupAnalizer"
|
||||
|
||||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
self.metadata = None
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if not self.metadata:
|
||||
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
|
||||
try:
|
||||
self.metadata = {"instance_config_id": json_path(entry, self.settings.custom['instance_config_id']),
|
||||
"instance_id": entry[self.settings.custom['instance_id']],
|
||||
"timestamp": str(entry["timestamp"]),
|
||||
"player_group_name": entry['player_group_name']
|
||||
}
|
||||
except KeyError as e:
|
||||
print(entry)
|
||||
raise e
|
||||
return False
|
||||
|
||||
def result(self, store: ResultStore, **kwargs) -> None:
|
||||
store.add(Result(type(self), self.metadata))
|
||||
|
||||
|
||||
class ActivityMapper(Analyzer):
|
||||
__name__ = "ActivityMapper"
|
||||
classes = {
|
||||
|
|
@ -158,6 +200,7 @@ class ActivityMapper(Analyzer):
|
|||
self.last_board = {}
|
||||
self.last_board_type = "other"
|
||||
self.last_coordinate = None
|
||||
self.last_timestamp = None
|
||||
self.tracks = []
|
||||
self.track = None
|
||||
self.instance_config_id: str = None
|
||||
|
|
@ -173,21 +216,28 @@ class ActivityMapper(Analyzer):
|
|||
if board["extra_data"]["activity_type"] == "simu":
|
||||
board["image"] = "simu.png"
|
||||
continue
|
||||
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
|
||||
try:
|
||||
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
|
||||
self.settings.source)
|
||||
if local_file:
|
||||
board['image'] = local_file
|
||||
else:
|
||||
if local_file:
|
||||
board['image'] = local_file
|
||||
else:
|
||||
raise ValueError
|
||||
except Exception as e:
|
||||
board['image'] = "ERROR_FETCHING_FILE"
|
||||
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
|
||||
board["board_id"])
|
||||
logger.exception(e)
|
||||
else:
|
||||
board["image"] = "map.png"
|
||||
store.add(Result(type(self), {
|
||||
"instance": self.instance_config_id,
|
||||
"track": self.tracks,
|
||||
"boards": self.timeline,
|
||||
"colors": self.colors,
|
||||
"type": "FeatureCollection",
|
||||
"features": self.tracks,
|
||||
"properties": {
|
||||
"instance": self.instance_config_id,
|
||||
"boards": self.timeline,
|
||||
"colors": self.colors,
|
||||
},
|
||||
}))
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
|
|
@ -242,20 +292,120 @@ class ActivityMapper(Analyzer):
|
|||
|
||||
def add_location(self, entry):
|
||||
coordinates = json_path(entry, self.settings.custom['coordinates'])
|
||||
self.track['coordinates'].append(coordinates)
|
||||
self.track["geometry"]['coordinates'].append(coordinates)
|
||||
self.track['properties']['coordTimes'].append(entry['timestamp']) #FIXME
|
||||
self.last_coordinate = coordinates
|
||||
self.last_timestamp = entry['timestamp']
|
||||
|
||||
def add_track(self, **props):
|
||||
self.track['properties'].update(props)
|
||||
if "activity_type" in self.track['properties'] and self.track['properties']['activity_type'] in self.colors:
|
||||
if not "stroke" in self.track['properties']:
|
||||
self.track['properties']['stroke'] = self.colors[self.track['properties']['activity_type']]
|
||||
self.tracks.append(self.track)
|
||||
self.track = self.new_track(props['end_timestamp'])
|
||||
if self.last_coordinate:
|
||||
self.track['coordinates'].append(self.last_coordinate)
|
||||
self.track["geometry"]['coordinates'].append(self.last_coordinate)
|
||||
self.track['properties']['coordTimes'].append(self.last_timestamp)
|
||||
|
||||
def new_track(self, timestamp):
|
||||
return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}}
|
||||
return {"type": "Feature", "geometry": {"type": "LineString", "coordinates": []}, "properties": {'start_timestamp': timestamp, 'coordTimes': []}}
|
||||
|
||||
|
||||
class BiogamesDuration(Analyzer):
|
||||
__name__ = "BiogamesDuration"
|
||||
|
||||
def __init__(self, settings: LogSettings) -> None:
|
||||
super().__init__(settings)
|
||||
self.first = None
|
||||
self.last = None
|
||||
self.sequences = defaultdict(list)
|
||||
self.filters = SimpleNamespace()
|
||||
self.filters.start = lambda entry: combinate(self.settings.custom["sequences2"]["start"], entry)
|
||||
self.filters.end = lambda entry: combinate(self.settings.custom["sequences2"]["end"], entry)
|
||||
self.sequence = None
|
||||
self.sequence_start = None
|
||||
self.cache = "None"
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if not self.first:
|
||||
self.first = entry['timestamp']
|
||||
self.last = entry['timestamp']
|
||||
if not self.sequence and self.filters.start(entry):
|
||||
self.sequence = entry['sequence_id']
|
||||
self.sequence_start = entry['timestamp']
|
||||
elif self.sequence and self.filters.end(entry):
|
||||
self.sequences[f"{self.cache}+{self.sequence}"].append((self.sequence_start, entry['timestamp']))
|
||||
self.sequences[f"only+{self.sequence}"].append((self.sequence_start, entry['timestamp']))
|
||||
self.sequence = None
|
||||
self.sequence_start = 0
|
||||
self.cache = "None"
|
||||
if entry['@class'] in self.settings.sequences['start']:
|
||||
if entry['cache']:
|
||||
self.cache = entry['cache']['@id']
|
||||
else:
|
||||
self.cache = "None"
|
||||
return False
|
||||
|
||||
def result(self, store: ResultStore, name=None) -> None:
|
||||
results = {"start": self.first, "end": self.last, "duration": self.last - self.first}
|
||||
for sid in self.sequences:
|
||||
seq = self.sequences[sid]
|
||||
#print([end-start for start,end in seq])
|
||||
results[f"sequence_{sid}_duration"] = sum([end-start for start,end in seq])
|
||||
store.add(Result(type(self), results))
|
||||
|
||||
|
||||
class BiogamesTasks(Analyzer):
|
||||
__name__ = "BiogamesTasks"
|
||||
DATA_CLASSES = ("de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion", )
|
||||
BOARD_CLASSES = ("de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry",)
|
||||
|
||||
def __init__(self, settings: LogSettings) -> None:
|
||||
super().__init__(settings)
|
||||
self.settings: LogSettings = settings
|
||||
self.tasks = {}
|
||||
self.first_board_view = {}
|
||||
self.last_board = None
|
||||
self.instance_config_id: str = None
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if self.instance_config_id is None:
|
||||
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
|
||||
self.instance_config_id = json_path(entry, self.settings.custom['instance_config_id'])
|
||||
if self.is_task(entry):
|
||||
task_id = entry['answers']['@id']
|
||||
if task_id not in self.first_board_view:
|
||||
logger.error("task_id is not in self.first_board_view!", task_id, entry)
|
||||
else:
|
||||
entry['__duration'] = entry['timestamp'] - self.first_board_view[task_id]['timestamp']
|
||||
self.tasks[self.ids(task_id)] = entry
|
||||
if self.is_board(entry):
|
||||
self.first_board_view[entry['board_id']] = entry
|
||||
return False
|
||||
|
||||
def result(self, store: ResultStore, name=None) -> None:
|
||||
results = {}
|
||||
for ids in self.tasks:
|
||||
task = self.tasks[ids]
|
||||
for action in task['selected_actions']:
|
||||
if self.is_dollar_action(action):
|
||||
results[ids] = {"duration": task['__duration'], "result": action['increment']}
|
||||
store.add(Result(type(self), results))
|
||||
|
||||
def ids(self, task_id):
|
||||
task = self.first_board_view[task_id]
|
||||
return f"{self.instance_config_id}_{task['sequence_id']}_{task['board_id']}"
|
||||
|
||||
def is_task(self, entry) -> bool:
|
||||
return entry['@class'] in self.DATA_CLASSES
|
||||
|
||||
def is_board(self, entry) -> bool:
|
||||
return entry['@class'] in self.BOARD_CLASSES
|
||||
|
||||
def is_dollar_action(self, action):
|
||||
return action['@class'] in ("de.findevielfalt.games.game2.instance.action.IncrementDiversityDollarAction")
|
||||
|
||||
class BiogamesStore(Store):
|
||||
__name__ = "BiogamesStore"
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,26 @@ from analysis.util import json_path
|
|||
from . import Result, LogSettings, Analyzer, ResultStore
|
||||
|
||||
|
||||
class WhitelistAnalyzer(Analyzer): #TODO
|
||||
__name__ = "WhiteList"
|
||||
|
||||
def __init__(self, settings: LogSettings) -> None:
|
||||
super().__init__(settings)
|
||||
self.key = settings.custom["whitelist"]["key"]
|
||||
self.filter_objs = settings.custom["whitelist"]["objs"]
|
||||
self.valid_entries = settings.custom["whitelist"]["valid"]
|
||||
self.blocked = False
|
||||
|
||||
def result(self, store: ResultStore, name=None) -> None:
|
||||
store.add(Result(type(self), {"blocked": self.blocked}, name=name))
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
if entry[self.settings.type_field] in self.filter_objs:
|
||||
if not json_path(entry, self.key) in self.valid_entries:
|
||||
self.blocked = True
|
||||
return self.blocked
|
||||
|
||||
|
||||
class LocationAnalyzer(Analyzer):
|
||||
"""
|
||||
store spatial log entries
|
||||
|
|
@ -17,7 +37,7 @@ class LocationAnalyzer(Analyzer):
|
|||
self.entries = []
|
||||
|
||||
def result(self, store: ResultStore, **kwargs) -> None:
|
||||
self.log.debug(len(self.entries))
|
||||
#self.log.debug(len(self.entries))
|
||||
store.add(Result(type(self), list(self.entries), name=kwargs['name']))
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
|
|
@ -90,7 +110,8 @@ class CategorizerStub(Analyzer):
|
|||
__name__ = "Categorizer"
|
||||
|
||||
def result(self, store: ResultStore, name=None) -> None:
|
||||
store.new_category(name if name else self.key)
|
||||
print(name if name else self.key)
|
||||
store.new_category((name, self.key) if name else self.key)
|
||||
|
||||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
from analysis import util
|
||||
from . import Analyzer, LogSettings, Result, ResultStore
|
||||
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
ONE_DAY = 24 * 60 * 60 * 1000
|
||||
MIN_DURATION = 5 * 60 * 1000
|
||||
|
||||
def init_filter(settings: LogSettings, state: str) -> callable:
|
||||
# this implies OR for lists; AND for dicts
|
||||
|
|
@ -22,8 +19,6 @@ class LocomotionActionAnalyzer(Analyzer):
|
|||
"""
|
||||
__name__ = "LocomotionAction"
|
||||
|
||||
limit = ONE_DAY
|
||||
|
||||
def process(self, entry: dict) -> bool:
|
||||
self.last_timestamp = entry["timestamp"]
|
||||
if self.instance_start is None:
|
||||
|
|
@ -46,7 +41,7 @@ class LocomotionActionAnalyzer(Analyzer):
|
|||
self.current_cache = None
|
||||
self.last = None
|
||||
|
||||
def result(self, store: ResultStore) -> None:
|
||||
def result(self, store: ResultStore, **kwargs) -> None:
|
||||
if self.last is not None:
|
||||
if self.current_cache is None:
|
||||
self.locomotion.append(self.last - self.cache_time)
|
||||
|
|
@ -55,26 +50,24 @@ class LocomotionActionAnalyzer(Analyzer):
|
|||
self.last = None
|
||||
locomotion = sum(self.locomotion)
|
||||
action = sum(self.actions)
|
||||
if not action > 0:
|
||||
action = 1
|
||||
if action== 0:
|
||||
action = 42
|
||||
print("Division by zero") #FIXME
|
||||
total = locomotion + action
|
||||
if total > self.limit:
|
||||
log.error("total duration over limit, skip")
|
||||
elif total < MIN_DURATION:
|
||||
log.error("total duration too short, skip")
|
||||
elif action < MIN_DURATION:
|
||||
log.error("action time too short, skip")
|
||||
else:
|
||||
store.add(Result(type(self), {
|
||||
'locomotion_sum': locomotion/1000,
|
||||
'action_sum': action/1000,
|
||||
'locomotion': self.locomotion,
|
||||
'action': self.actions,
|
||||
'duration': (self.last_timestamp - self.instance_start)/1000,
|
||||
'locomotion_relative': locomotion / total,
|
||||
'action_relative': action / total,
|
||||
'locomotion_action_ratio': locomotion / action,
|
||||
}))
|
||||
if not self.last_timestamp:
|
||||
self.last_timestamp = 0
|
||||
if not self.instance_start:
|
||||
self.instance_start = 0
|
||||
store.add(Result(type(self), {
|
||||
'locomotion_sum': locomotion,
|
||||
'action_sum': action,
|
||||
'locomotion': self.locomotion,
|
||||
'action': self.actions,
|
||||
'duration': (self.last_timestamp - self.instance_start),
|
||||
'locomotion_relative': locomotion / total,
|
||||
'action_relative': action / total,
|
||||
'locomotion_action_ratio': locomotion / action,
|
||||
}))
|
||||
|
||||
def __init__(self, settings: LogSettings):
|
||||
super().__init__(settings)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
import logging
|
||||
from typing import List
|
||||
|
||||
from analysis.analyzers.analyzer import ResultStore
|
||||
from .. import Result
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Render:
|
||||
result_types = []
|
||||
|
|
@ -8,6 +12,11 @@ class Render:
|
|||
def render(self, results: List[Result], name=None) -> [str]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def render_store(self, store: ResultStore, name=None) -> str:
|
||||
logging.getLogger(__name__).warning("using default implementation!")
|
||||
for category in store.get_categories():
|
||||
self.render(store.get_category(category), name=name)
|
||||
|
||||
def filter(self, results: List[Result]):
|
||||
if len(self.result_types) == 0:
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
from typing import List, Tuple
|
||||
|
||||
|
|
@ -9,8 +10,13 @@ from scipy.interpolate import interp1d
|
|||
import networkx as nx
|
||||
import itertools
|
||||
|
||||
from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer
|
||||
from analysis.util.meta_temp import CONFIG_NAMES
|
||||
from analysis.analyzers import Store, BiogamesStore, SimulationOrderAnalyzer, LocationAnalyzer, BiogamesDuration, \
|
||||
BiogamesTasks, GameFieldInstanceGroup
|
||||
from analysis.analyzers.analyzer import ResultStore
|
||||
from analysis.analyzers.render.default import GeoJSON
|
||||
from analysis.util.geo import calc_distance, calc_distance_simplified
|
||||
from analysis.util.meta_temp import CONFIG_NAMES, TASK_NAMES, CACHE_NAMES, SEQUENCE_NAMES
|
||||
from analysis.util.output import flat_dict_to_csv, pretty_ts
|
||||
from . import Render
|
||||
from .. import Result, SimulationRoundsAnalyzer, BoardDurationAnalyzer, ActivityMapper
|
||||
|
||||
|
|
@ -155,7 +161,7 @@ class ActivityMapperRender(Render):
|
|||
files = []
|
||||
for result in self.filter(results):
|
||||
data = result.get()
|
||||
path = os.path.join("/tmp", data['instance'] + "_" + str(name) + ".json")
|
||||
path = os.path.join("/tmp", data["properties"]['instance'] + "_" + str(name) + ".json")
|
||||
with open(path, "w") as out:
|
||||
json.dump(data, out, indent=1)
|
||||
files.append(path)
|
||||
|
|
@ -201,3 +207,60 @@ class SimulationGroupRender(Render):
|
|||
name=name)
|
||||
|
||||
result_types = [SimulationOrderAnalyzer]
|
||||
|
||||
class OEBRender(Render):
|
||||
result_types = [LocationAnalyzer, BiogamesTasks, BiogamesDuration, GameFieldInstanceGroup]
|
||||
timestamp_fields = ("timestamp", "start", "end")
|
||||
|
||||
def render(self, results: List[Result], name=None) -> [str]:
|
||||
data = {}
|
||||
for r in self.filter(results):
|
||||
if r.analysis() is LocationAnalyzer:
|
||||
geojson = GeoJSON()
|
||||
json = geojson.make_geojson(r.get())
|
||||
data[f"{r.analysis().__name__}__distance"] = calc_distance(json, "features.0.geometry.coordinates", load=False)
|
||||
data[f"{r.analysis().__name__}__distance_simplified"] = calc_distance_simplified(json, "features.0.geometry.coordinates", load=False)
|
||||
else:
|
||||
for i in r.get():
|
||||
a = r.analysis().__name__
|
||||
value = r.get()[i]
|
||||
if i in self.timestamp_fields:
|
||||
value = pretty_ts(value)
|
||||
key = f"{a}__{i}"
|
||||
key = self.replace(key, i)
|
||||
if type(value) is dict:
|
||||
for j in value:
|
||||
data[key+"__"+j] = value[j]
|
||||
else:
|
||||
data[key] = value
|
||||
return data
|
||||
|
||||
def render_store(self, store: ResultStore, name=None) -> str:
|
||||
data = []
|
||||
for category in store.get_categories():
|
||||
data.append(self.render(store.get_category(category)))
|
||||
#import json
|
||||
#print(json.dumps(data, indent=1))
|
||||
csv = flat_dict_to_csv(data)
|
||||
#print(csv)
|
||||
if name:
|
||||
filename = str(name) + ".csv"
|
||||
else:
|
||||
filename = "/tmp/biogames" + ".csv"
|
||||
try:
|
||||
with open(filename, "w") as out:
|
||||
out.write(csv)
|
||||
except PermissionError as e:
|
||||
raise PermissionError(e, filename)
|
||||
return filename
|
||||
|
||||
def replace(self, key, i):
|
||||
if i in TASK_NAMES:
|
||||
key = f"{TASK_NAMES[i]} ({key})"
|
||||
if "sequence_" in i:
|
||||
sid = i.split("_")[1]
|
||||
cache, seq = sid.split("+")
|
||||
cache = CACHE_NAMES.get(cache, cache)
|
||||
seq = SEQUENCE_NAMES.get(seq, seq)
|
||||
key = f"{cache}->{seq} {sid} duration"
|
||||
return key
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import copy
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
|
|
@ -6,7 +7,7 @@ import datetime
|
|||
import matplotlib.pyplot as plt
|
||||
|
||||
from analysis.analyzers import LogEntryCountAnalyzer
|
||||
from analysis.util.meta_temp import KML_PATTERN
|
||||
from analysis.util.meta_temp import GEOJSON_COORDINATES, GEOJSON_PATTERN, KML_PATTERN
|
||||
from . import Render, Result
|
||||
from analysis.analyzers import LocationAnalyzer
|
||||
|
||||
|
|
@ -23,9 +24,12 @@ class JSONRender(Render):
|
|||
print(json.dumps([r.get() for r in self.filter(results)], indent=1))
|
||||
|
||||
|
||||
class TrackRender(Render):
|
||||
class SpatialRender:
|
||||
result_types = [LocationAnalyzer]
|
||||
|
||||
|
||||
class TrackRender(SpatialRender, Render):
|
||||
|
||||
def render(self, results: List[Result], name=None):
|
||||
data = []
|
||||
log.debug(results)
|
||||
|
|
@ -42,11 +46,10 @@ class TrackRender(Render):
|
|||
|
||||
|
||||
def format_time(ts):
|
||||
return datetime.datetime.fromtimestamp(ts/1000).strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
return datetime.datetime.fromtimestamp(ts / 1000).strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
|
||||
|
||||
class KMLRender(Render):
|
||||
result_types = [LocationAnalyzer]
|
||||
class KMLRender(SpatialRender, Render):
|
||||
|
||||
def render(self, results: List[Result], name=None):
|
||||
files = []
|
||||
|
|
@ -59,14 +62,59 @@ class KMLRender(Render):
|
|||
long=entry['location']['coordinates'][0])
|
||||
for entry in result.get()
|
||||
]
|
||||
filename = str(result.name)+".kml"
|
||||
if name:
|
||||
filename = str(name) + ".kml"
|
||||
else:
|
||||
filename = str(result.name) + ".kml"
|
||||
print(filename)
|
||||
with open(filename, "w") as out:
|
||||
out.write(KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
|
||||
out.write(
|
||||
KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
|
||||
#with open(filename + ".json", "w") as out: #FIXME: why am I here??
|
||||
# json.dump(result.get(), out, indent=1)
|
||||
files.append(filename)
|
||||
return files
|
||||
|
||||
|
||||
class GeoJSON(SpatialRender, Render):
|
||||
template = {
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def make_geojson(self, src):
|
||||
coordinates = []
|
||||
times = []
|
||||
for location in src:
|
||||
#print(location)
|
||||
coordinates.append(location["location"]["coordinates"])
|
||||
times.append(location["timestamp"])
|
||||
template = copy.deepcopy(self.template)
|
||||
template["features"][0]["properties"] = {"times": times}
|
||||
template["features"][0]["geometry"]["coordinates"] = coordinates
|
||||
return template
|
||||
|
||||
def render(self, results: List[Result], name=None) -> [str]:
|
||||
files = []
|
||||
for result in self.filter(results):
|
||||
if name:
|
||||
filename = str(name) + ".geojson"
|
||||
else:
|
||||
filename = str(result.name) + ".geojson"
|
||||
json = self.make_geojson(result.get())
|
||||
with open(filename, "w") as out:
|
||||
json.dump(self.template, out, indent=1)
|
||||
files.append(filename)
|
||||
return files
|
||||
|
||||
|
||||
class HeatMapRender(TrackRender):
|
||||
|
|
@ -104,6 +152,7 @@ class LogEntryCountAnalyzerPlot(Render):
|
|||
plt.clf()
|
||||
plt.close()
|
||||
|
||||
|
||||
class LogEntryCountCSV(Render):
|
||||
result_types = [LogEntryCountAnalyzer]
|
||||
summary = None
|
||||
|
|
@ -113,4 +162,4 @@ class LogEntryCountCSV(Render):
|
|||
return
|
||||
for result in self.filter(results):
|
||||
raw_data = result.get()
|
||||
self.summary[name] = raw_data
|
||||
self.summary[name] = raw_data
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
|
@ -8,14 +6,6 @@ import numpy as np
|
|||
from . import Render
|
||||
from .. import Result, LocomotionActionAnalyzer
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def default(item):
|
||||
return item
|
||||
def sort(item):
|
||||
return item[0]
|
||||
def sort_sum(item):
|
||||
return sum(item)
|
||||
|
||||
def plot(results: [[int]], ylabel: str, title: str, legend: (str,) = ("Locomotion", "Action")):
|
||||
size = len(results)
|
||||
|
|
@ -48,12 +38,11 @@ def plot_line(results: [[int]], ylabel="Ratio", title="Locomotion/Action "):
|
|||
plt.show()
|
||||
|
||||
|
||||
def filter_results(raw_results: [Result], keys, sort=default) -> [[int]]:
|
||||
def filter_results(raw_results: [Result], keys) -> [[int]]:
|
||||
results = []
|
||||
for result in raw_results:
|
||||
raw = result.get()
|
||||
results.append([raw[k] for k in keys])
|
||||
results = sorted(results,key=sort)
|
||||
return results
|
||||
|
||||
|
||||
|
|
@ -77,12 +66,3 @@ class LocomotionActionRatioRender(LocomotionActionRender):
|
|||
def render(self, results: List[Result], name=None):
|
||||
results = filter_results(self.filter(results), ['locomotion_action_ratio'])
|
||||
plot_line(results, ylabel="Ratio", title="Locomotion/Action Ratio")
|
||||
|
||||
class LocomotionActionRatioHistRender(LocomotionActionRender):
|
||||
def render(self, results: List[Result]):
|
||||
results = filter_results(self.filter(results), ['locomotion_action_ratio'])
|
||||
plt.title("locomotion/action")
|
||||
plt.xlabel("ratio")
|
||||
plt.ylabel("frequency")
|
||||
n, bins, patches = plt.hist([results], bins=len(results))
|
||||
plt.show()
|
||||
|
|
@ -3,6 +3,7 @@ import json
|
|||
import numpy as np
|
||||
|
||||
import analysis.analyzers
|
||||
from analysis import analyzers
|
||||
from analysis.util.geo import calc_distance
|
||||
|
||||
|
||||
|
|
@ -188,10 +189,11 @@ def get_all_data(store, sort=False, relative=True):
|
|||
if not log.analysis() == analyzers.ActivityMapper:
|
||||
continue
|
||||
result = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
for i in log.get()['track']:
|
||||
coords = i['coordinates']
|
||||
for i in log.get()['features']:
|
||||
coords = i["geometry"]['coordinates']
|
||||
print(coords)
|
||||
if len(coords) > 1:
|
||||
distance = calc_distance(coords)
|
||||
distance = calc_distance(json.dumps(i["geometry"]))
|
||||
else:
|
||||
distance = 0.0
|
||||
result["space"][i['properties']['activity_type']] += distance
|
||||
|
|
@ -219,9 +221,9 @@ def get_all_data(store, sort=False, relative=True):
|
|||
value = percentage
|
||||
else:
|
||||
value = total
|
||||
places[log.get()['instance']].append(value)
|
||||
places[log.get()["properties"]['instance']].append(value)
|
||||
simus = defaultdict(lambda: 0)
|
||||
for item in log.get()['boards']:
|
||||
for item in log.get()["properties"]['boards']:
|
||||
if item["extra_data"]["activity_type"] == "simu":
|
||||
simus[item["board_id"]] += 1
|
||||
simu_distribution[len(simus)] += 1
|
||||
|
|
@ -381,7 +383,7 @@ def plot_time_space_rel(combined, keys):
|
|||
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids])
|
||||
# plt.show()
|
||||
dpi = 200
|
||||
plt.savefig("speed2.png", dpi=dpi)
|
||||
plt.savefig("speed2_2019.png", dpi=dpi)
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@
|
|||
"SimulationCategorizer",
|
||||
"InstanceConfig"],
|
||||
"disabled_analyzers": [
|
||||
"ActivityMapper",
|
||||
"LocomotionActionAnalyzer",
|
||||
"LogEntryCountAnalyzer",
|
||||
"LocationAnalyzer",
|
||||
|
|
|
|||
|
|
@ -7,17 +7,19 @@ from analysis.analyzers import get_renderer, render
|
|||
from analysis.analyzers.analyzer import ResultStore
|
||||
from analysis.analyzers.analyzer.default import write_logentry_count_csv, write_simulation_flag_csv
|
||||
from analysis.analyzers.render import wip
|
||||
from analysis.analyzers.render.default import LogEntryCountCSV, KMLRender
|
||||
from analysis.analyzers.render.default import LogEntryCountCSV, KMLRender, GeoJSON
|
||||
from analysis.analyzers.render.wip import time_distribution, plot_data
|
||||
from analysis.analyzers.settings import LogSettings, load_settings, parse_settings
|
||||
from analysis.loaders import LOADERS
|
||||
from analysis.util.processing import grep, run_analysis, src_file
|
||||
from analysis.util.meta_temp import CONFIG_NAMES
|
||||
|
||||
logging.basicConfig(format='%(levelname)s %(name)s:%(message)s', level=logging.DEBUG)
|
||||
log: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
logging.getLogger('requests').setLevel(logging.WARN)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARN)
|
||||
logging.getLogger("shapely").setLevel(logging.WARN)
|
||||
|
||||
|
||||
def urach_logs(log_ids, settings):
|
||||
|
|
@ -107,16 +109,127 @@ if __name__ == '__main__':
|
|||
# plot_time_space_rel(combined, keys)
|
||||
plot_data(combined, wip.keys)
|
||||
|
||||
if True:
|
||||
if False:
|
||||
|
||||
def store(x):
|
||||
pass
|
||||
settings: LogSettings = load_settings("../oeb_kml.json")
|
||||
log_ids = src_file("/home/clemens/git/ma/test/oeb_2016_path")
|
||||
log_ids = log_ids[0:2]
|
||||
#log_ids = src_file("/app/log_data/oeb/oeb_2016_path")
|
||||
log_ids = src_file("/app/log_data/oeb/oeb_paths")
|
||||
#log_ids = log_ids[0:10]
|
||||
print(log_ids)
|
||||
store: ResultStore = run_analysis(log_ids, settings, LOADERS)
|
||||
store: ResultStore = run_analysis(log_ids, settings, LOADERS, ResultStore(key_index=1))
|
||||
print("render")
|
||||
kml = KMLRender()
|
||||
kml.render(store.get_all())
|
||||
kml = GeoJSON()
|
||||
fields = store.get_categories()
|
||||
artifacts = {key: kml.render(store.get_category(key)) for key in fields}
|
||||
print(artifacts)
|
||||
print("done")
|
||||
|
||||
def atrifact_to_length(filename):
|
||||
g = json.load(open(filename))
|
||||
from analysis.util.geo import calc_distance
|
||||
return calc_distance(json.dumps(g), "features.0.geometry.coordinates")
|
||||
|
||||
def simplified_length(filename):
|
||||
from analysis.util.geo import json_to_track,distance
|
||||
g = json.load(open(filename))
|
||||
track = json_to_track(json.dumps(g), "features.0.geometry.coordinates")
|
||||
simplified = track.simplify(0.0002, preserve_topology=True)
|
||||
from shapely.geometry import mapping
|
||||
json.dump(mapping(simplified), open(f"{filename}.simplified.geojson","w"), indent=1)
|
||||
return distance(simplified)
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
def get_lengths(artifacts, atrifact_to_length=atrifact_to_length):
|
||||
stats = defaultdict(list)
|
||||
for field in artifacts:
|
||||
print(field, CONFIG_NAMES[field])
|
||||
for i in artifacts[field]:
|
||||
distance = atrifact_to_length(i)
|
||||
warn = "\tLONG!" if distance > 10000 else "\tSHORT!" if distance < 1000 else ""
|
||||
print(f"\t{i}\t{distance}{warn}")
|
||||
stats[field].append(distance)
|
||||
return stats
|
||||
|
||||
stats = get_lengths(artifacts)
|
||||
import numpy as np
|
||||
|
||||
def quart_1(x):
|
||||
return np.percentile(x, 25)
|
||||
def quart_2(x):
|
||||
return np.percentile(x, 50)
|
||||
def quart_3(x):
|
||||
return np.percentile(x, 75)
|
||||
def quart_4(x):
|
||||
return np.percentile(x, 100)
|
||||
|
||||
def print_stats(stats):
|
||||
fns = [np.size, np.min, np.max, np.mean, np.median]#, quart_1, quart_2, quart_3, quart_4]
|
||||
names = "\t".join([x.__name__ for x in fns] + ["id","name"])
|
||||
print(names)
|
||||
for i in stats:
|
||||
stat = [f"{fn(stats[i]):.2f}" for fn in fns]
|
||||
print("\t".join(stat + [i, CONFIG_NAMES[i]]))
|
||||
|
||||
def plot_stats(stats, filtered_stats, suffix=""):
|
||||
import matplotlib.pyplot as plt
|
||||
keys = sorted(stats.keys())
|
||||
names = [CONFIG_NAMES[i] for i in keys]
|
||||
values = [stats[i] for i in keys]
|
||||
values_filtered = [filtered_stats[i] for i in keys]
|
||||
fig, ax = plt.subplots()
|
||||
ax.boxplot(values, labels=names, showfliers=False, showmeans=True, meanline=True)
|
||||
fig.savefig(f"/app/log_data/oeb/plots/plot_raw{suffix}.png")
|
||||
fig, ax = plt.subplots()
|
||||
ax.boxplot(values_filtered, labels=names, showfliers=False, showmeans=True, meanline=True)
|
||||
fig.savefig(f"/app/log_data/oeb/plots/plot_filtered{suffix}.png")
|
||||
fig, ax = plt.subplots()
|
||||
agg_data = values + values_filtered
|
||||
agg_labels = names + [f"filtered(…{i[-4:]})" for i in names]
|
||||
ax.boxplot(agg_data, labels=agg_labels, showfliers=False, showmeans=True, meanline=True)
|
||||
fig.savefig(f"/app/log_data/oeb/plots/plot_combined{suffix}.png")
|
||||
|
||||
MIN = 1000
|
||||
MAX = 100000
|
||||
|
||||
def filter(stats):
|
||||
stats_filtered = defaultdict(list)
|
||||
for i in stats:
|
||||
stats_filtered[i] = [x for x in stats[i] if MIN < x < MAX]
|
||||
return stats_filtered
|
||||
stats_filtered = filter(stats)
|
||||
|
||||
stats_simple = get_lengths(artifacts, atrifact_to_length=simplified_length)
|
||||
stats_filtered_simple = filter(stats_simple)
|
||||
|
||||
def summary(stats, stats_filtered, title):
|
||||
print_stats(stats)
|
||||
print(f"filter {MIN} < x < {MAX}")
|
||||
print_stats(stats_filtered)
|
||||
plot_stats(stats, stats_filtered, suffix=f"_{title}")
|
||||
|
||||
summary(stats, stats_filtered, "raw")
|
||||
print("\nsimplified\n")
|
||||
summary(stats_simple, stats_filtered_simple, "simplified")
|
||||
|
||||
if True:
|
||||
settings: LogSettings = load_settings("time.json")
|
||||
# log_ids = src_file("/app/log_data/oeb/oeb_2016_path")
|
||||
log_ids = src_file("log_data/oeb/oeb_paths_host")
|
||||
log_ids = src_file("/home/clemens/git/ma/test/src")
|
||||
log_ids = src_file("/home/clemens/git/ma/project/log_data/neocartographer/index")
|
||||
# log_ids = log_ids[0:10]
|
||||
print(log_ids)
|
||||
store: ResultStore = run_analysis(log_ids, settings, LOADERS, ResultStore(key_index=1))
|
||||
results = {}
|
||||
for cat in store.get_categories():
|
||||
results[cat] = [result.get() for result in store.get_category(cat)]
|
||||
with open("times_neo.json", "w") as out:
|
||||
json.dump(results, out, indent=1)
|
||||
|
||||
####################
|
||||
#for cat in store.get_categories():
|
||||
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ from analysis.util import json_path
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"):
|
||||
#def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"): #FIXME: I was changed
|
||||
def download_board(board_id, instance_config_id, sequence_id, source, path="activity/data/results/"):
|
||||
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
|
||||
abs_path = os.path.join(path, local_file)
|
||||
if os.path.exists(abs_path):
|
||||
|
|
|
|||
|
|
@ -1,12 +1,33 @@
|
|||
def calc_distance(geojson: str):
|
||||
from shapely.geometry import LineString
|
||||
from shapely.ops import transform
|
||||
from functools import partial
|
||||
import pyproj
|
||||
import json
|
||||
track = LineString(json.loads(geojson)['coordinates'])
|
||||
import json
|
||||
|
||||
import pyproj
|
||||
from shapely.geometry import LineString, mapping
|
||||
from shapely.ops import transform
|
||||
from functools import partial
|
||||
|
||||
from analysis.util import json_path
|
||||
|
||||
|
||||
def distance(track):
|
||||
project = partial(
|
||||
pyproj.transform,
|
||||
pyproj.Proj(init='EPSG:4326'),
|
||||
pyproj.Proj(init='EPSG:32633'))
|
||||
return transform(project, track).length
|
||||
return transform(project, track).length
|
||||
|
||||
|
||||
def json_to_track(geojson, path, load=True):
|
||||
if load:
|
||||
geojson = json.loads(geojson)
|
||||
return LineString(json_path(geojson, path))
|
||||
|
||||
|
||||
def calc_distance(geojson: str, path="coordinates", load=True):
|
||||
track = json_to_track(geojson, path, load)
|
||||
return distance(track)
|
||||
|
||||
|
||||
def calc_distance_simplified(geojson, path="coordinates", load=True):
|
||||
track = json_to_track(geojson, path, load)
|
||||
simplified = track.simplify(0.0002, preserve_topology=True)
|
||||
return distance(simplified)
|
||||
|
|
@ -1,14 +1,21 @@
|
|||
def json_path(obj: dict, key: str):
|
||||
def json_path(obj: dict, key: str):# TODO: test me!
|
||||
"""Query a nested dict with a dot-separated path"""
|
||||
if not type(obj) is dict:
|
||||
return None
|
||||
#if type(obj) is list and not "." in key:
|
||||
# return obj[int(key)]
|
||||
if type(obj) not in (dict, list):
|
||||
raise ValueError("obj is no object (no list, too)")
|
||||
if "." not in key:
|
||||
if key not in obj:
|
||||
return None
|
||||
return KeyError("key not in object", key)
|
||||
return obj[key]
|
||||
child_key = key.split(".")
|
||||
if child_key[0] not in obj:
|
||||
return None
|
||||
try:
|
||||
index = int(child_key[0])
|
||||
return json_path(obj[index], ".".join(child_key[1:]))
|
||||
except:
|
||||
raise KeyError("key not in object", key)
|
||||
raise KeyError("key not in object", key)
|
||||
return json_path(obj[child_key[0]], ".".join(child_key[1:]))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ CONFIG_NAMES = {
|
|||
}
|
||||
|
||||
|
||||
KML_PATTERN="""<?xml version="1.0" encoding="UTF-8"?>
|
||||
KML_PATTERN = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2">
|
||||
<Document>
|
||||
<Placemark>
|
||||
|
|
@ -113,4 +113,92 @@ KML_PATTERN="""<?xml version="1.0" encoding="UTF-8"?>
|
|||
</Placemark>
|
||||
</Document>
|
||||
</kml>
|
||||
"""
|
||||
"""
|
||||
|
||||
GEOJSON_PATTERN = """{
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {properties},
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": {coordinates}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"""# TODO: fix me
|
||||
|
||||
GEOJSON_COORDINATES = "[{lon},{lat}]"
|
||||
|
||||
TASK_NAMES = {
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_13127209-103c-4aed-9cce-b8a2cd9f7663_32e93082-1aa7-11e5-9827-74d43509b03a": "Lebensraum",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_13127209-103c-4aed-9cce-b8a2cd9f7663_3a27040f-1a9c-11e5-9827-74d43509b03a": "Simulation",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075_096093b0-d1ca-49f3-8d51-f32fa8874db5": "Biosphärenreservat",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075_b10951a5-1a8a-11e5-b1a2-74d43509b03a": "Simulation",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_41abfe17-aef3-41ee-b1e5-eedc8208680f_255d9c6d-1aa0-11e5-9827-74d43509b03a": "Simulation",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_41abfe17-aef3-41ee-b1e5-eedc8208680f_e0d2dee8-1a9f-11e5-9827-74d43509b03a": "Fellbestimmung",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_10c3329f-7a88-4aa4-9567-14c811d2a6bc": "Lockstock-Code",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_5732fe6c-1a9e-11e5-9827-74d43509b03a": "Lockstock-Nachweis",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_b9571a6b-c537-4a92-8618-2d73415dec87_e4bbaf4c-1a9d-11e5-9827-74d43509b03a": "Simulation",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_e08ffe7c-b24d-4fcd-9355-8a459a2c07b7_597c651a-1a8c-11e5-b1a2-74d43509b03a": "Simulation",
|
||||
"16fc3117-61db-4f50-b84f-81de6310206f_e08ffe7c-b24d-4fcd-9355-8a459a2c07b7_da49b7be-bc13-11e4-a0de-6364e0bfe983": "Holzbedarf",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_027dcc39-d642-4900-91c4-abbd9c317cb8_610e91d9-0a1c-4a38-9399-deb0ff8dcb05": "Fellbestimmung",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_027dcc39-d642-4900-91c4-abbd9c317cb8_6a03c92d-9e23-4c67-9e76-6a5e28224371": "Simulation",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_25a3a482-a119-4db4-8c4e-235ea9d8dab7_90a01be2-dc8a-4733-b302-de5554969453": "Simulation",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_25a3a482-a119-4db4-8c4e-235ea9d8dab7_914778bc-f7e9-4327-a78b-71b6fa8762b1": "Biosphärenreservat",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_7a8ff4c4-7976-45e0-8ef5-cb386d536cb3_3ae4452e-ed67-4687-849d-e9341fca2900": "Simulation",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_7a8ff4c4-7976-45e0-8ef5-cb386d536cb3_f6f5c087-487c-43d8-9409-648a59684a09": "Lebensraum",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_97b86d4e-4724-4431-9c94-d2f57696fe2e_26e6558e-8069-45a1-961d-ab1ec9c5aa83": "Holzbedarf",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_97b86d4e-4724-4431-9c94-d2f57696fe2e_970ff4e0-16bd-4380-8e69-91a324a59523": "Simulation",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_3366d6a3-684f-423a-bd7f-5c0107d4b972": "Simulation",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_a7188b81-e25b-456d-9742-5f11adb7d461": "Lockstock-Nachweis",
|
||||
"17d401a9-de21-49a2-95bc-7dafa53dda64_a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05_d29537b9-de0b-42c2-b3da-27a3dbc57988": "Lockstock-Code",
|
||||
}
|
||||
|
||||
CACHE_NAMES = {
|
||||
"043ab9fe-64e8-4e76-8bf6-8cc9db35eba1": "1a_Infozentrum",
|
||||
"37f326ed-9732-44b5-9ba7-e666d31cc4e7": "2a_Holzlager",
|
||||
"bf96eee0-4c92-43d8-aced-a95e4eedae9f": "2b_Holzstapel",
|
||||
"a5723715-7ba7-4431-9d0b-c91c351a3ccc": "3a_Lebensraum_Hecke",
|
||||
"dd68ba57-a081-46be-9a76-e49cd5209383": "3b_Lebensraum_Hecke",
|
||||
"bb21628e-d039-4c16-9fe1-68de7f448fa4": "4a_Lockstock_finden",
|
||||
"8291c397-b3a9-4564-9365-bd660ab1abcc": "4b_Lockstock_finden",
|
||||
"e92d8175-a65f-40de-ae76-3cbde55dfd4d": "5a_Straße",
|
||||
"30451de3-2d5d-44c7-84b2-2abddbc8adcc": "5b_Straße",
|
||||
"22fcc44c-64d4-4f84-ad05-8107542a04d2": "6a_Jaegerstand",
|
||||
"1740e151-cd75-45c0-a06e-d724b9d69729": "6a_Jaegerstand",
|
||||
"6d97d48a-7ac1-4e3a-b797-c2b4aa681a10": "5a_Straße",
|
||||
"98e60f51-c4d5-4833-bc3b-2820e1bdd09d": "4b_Lockstock_finden",
|
||||
"61d6dc12-11b5-4a9c-b0d8-7a38a29d772a": "5b_Straße",
|
||||
"f4762feb-addb-4e82-b923-78f8c7b6aff9": "2b_Holzstapel",
|
||||
"25b2cc3b-f8fd-4a21-9350-d175d837f6b6": "3a_Lebensraum_Hecke",
|
||||
"5ba5046f-c956-4c21-aea5-a0a6055ed7e4": "1a_Infozentrum",
|
||||
"fb60b94b-3f82-4ba9-98ac-f52105bd26f1": "2a_Holzlager",
|
||||
"12b9584a-14b4-40c6-aa13-9fb11062e917": "4a_Lockstock_finden",
|
||||
"19908306-8c70-4861-bec8-49e849e94722": "3b_Lebensraum_Hecke",
|
||||
"None": "initial",
|
||||
"only": "",
|
||||
}
|
||||
|
||||
SEQUENCE_NAMES = {
|
||||
"89b769f8-2c98-4f55-b741-1dfa022c3286": "1_Videoerklaerung",
|
||||
"286cab41-6a81-4dfe-9bef-e86923ca8c97": "A_Einleitung",
|
||||
"2119b80e-74d3-4f3b-b0e0-e6a1f2c1d075": "B",
|
||||
"25a3a482-a119-4db4-8c4e-235ea9d8dab7": "B",
|
||||
"97b86d4e-4724-4431-9c94-d2f57696fe2e": "C_mit_Dilemma",
|
||||
"e08ffe7c-b24d-4fcd-9355-8a459a2c07b7": "C_mit_Dilemma",
|
||||
"5f644fb4-5cc7-43a2-afb4-191dce80c875": "D_Dilemmasequenz",
|
||||
"847ab5ff-7c98-4cdc-bc9e-bb619a0a98bb": "D_Dilemmasequenz",
|
||||
"13127209-103c-4aed-9cce-b8a2cd9f7663": "E",
|
||||
"7a8ff4c4-7976-45e0-8ef5-cb386d536cb3": "E",
|
||||
"a4f5c5d8-43b0-45f3-b3cf-3749fcb81f05": "F",
|
||||
"b9571a6b-c537-4a92-8618-2d73415dec87": "F",
|
||||
"027dcc39-d642-4900-91c4-abbd9c317cb8": "G",
|
||||
"41abfe17-aef3-41ee-b1e5-eedc8208680f": "G",
|
||||
"be59a20a-69ce-471b-8f70-76ce200e32c9": "H_Abschlusserzaehlung",
|
||||
"d4073563-da42-4ad2-9a9b-20ef29da6309": "H_Abschlusserzaehlung",
|
||||
"54e03082-1a6b-11e5-aa26-00199963ac6e": "seq_score",
|
||||
"95d82cd3-5bda-465a-8757-7179cdafe590": "seq_score",
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
from datetime import datetime as dt
|
||||
|
||||
SEP = "\",\""
|
||||
LS = "\""
|
||||
LE = "\""
|
||||
NL = LS + "\n" + LE
|
||||
|
||||
|
||||
|
||||
def flat_dict_to_csv(data):
|
||||
keys = set()
|
||||
for i in data:
|
||||
keys = keys.union(set(i.keys()))
|
||||
keys = sorted(keys)
|
||||
out = SEP.join(keys)
|
||||
for i in data:
|
||||
out += NL + SEP.join([escape(i.get(j, "")) for j in keys])
|
||||
return LS + out + LE
|
||||
|
||||
|
||||
def escape(value):
|
||||
val = str(value)
|
||||
val = val.replace(".", ",")
|
||||
return val
|
||||
|
||||
def pretty_ts(timestamp, fmt="%Y-%m-%d %H:%M:%S"):
|
||||
d = dt.fromtimestamp(int(timestamp)/1000.0)
|
||||
return d.strftime(fmt)
|
||||
|
|
@ -12,7 +12,7 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
|
|||
try:
|
||||
loader.load(logfile)
|
||||
except BaseException as e:
|
||||
raise RuntimeError(e)
|
||||
raise RuntimeError(e, logfile)
|
||||
analyzers: List[Analyzer] = []
|
||||
log.debug("build analyzers")
|
||||
for analyzer in settings.analyzers:
|
||||
|
|
@ -28,8 +28,10 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
|
|||
return analyzers
|
||||
|
||||
|
||||
def run_analysis(log_ids: list, settings, loaders):
|
||||
store: ResultStore = ResultStore()
|
||||
def run_analysis(log_ids: list, settings, loaders, result_store=None):
|
||||
if not result_store:
|
||||
result_store = ResultStore()
|
||||
store: ResultStore = result_store
|
||||
for log_id in log_ids:
|
||||
log.info("LOG_ID: "+ str(log_id))
|
||||
for analysis in process_log(log_id, settings, loaders):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
<script
|
||||
src="https://code.jquery.com/jquery-3.2.1.min.js"></script>
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"/>
|
||||
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"></script>
|
||||
|
||||
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
|
||||
|
||||
<script src="my.js"></script>
|
||||
<link href="style.css" rel="stylesheet"/>
|
||||
<main>
|
||||
<div class="mapDiv" id="mainMap"></div>
|
||||
<div class="sequenceContainer">
|
||||
<div class="sequence"></div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<!--div style="font-size:0.1px;position:absolute;bottom:0;">OSM Logo: CC-BY-SA
|
||||
http://wiki.openstreetmap.org/wiki/File:Mag_map-120x120.png
|
||||
</div-->
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 191 KiB |
|
|
@ -0,0 +1,110 @@
|
|||
function loadData() {
|
||||
console.log($(location).attr('hash').substr(1));
|
||||
$.getJSON($(location).attr('hash').substr(1), function (data) {
|
||||
var images = {};
|
||||
var tiles = {
|
||||
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
maxNativeZoom: 19,
|
||||
maxZoom: 24,
|
||||
attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
|
||||
}),
|
||||
"esri sat": L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
maxNativeZoom: 19,
|
||||
maxZoom: 24,
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
|
||||
}),
|
||||
"google sat": L.tileLayer('https://{s}.google.com/vt/lyrs=s&x={x}&y={y}&z={z}', {
|
||||
maxNativeZoom: 20,
|
||||
maxZoom: 24,
|
||||
subdomains: ['mt0', 'mt1', 'mt2', 'mt3']
|
||||
})
|
||||
};
|
||||
var map = L.map("mainMap", {layers: [tiles.openstreetmap]});
|
||||
|
||||
function styleTrack(feature) {
|
||||
var styles = {};
|
||||
styles.color = data.properties.colors[feature.properties.activity_type];
|
||||
return styles;
|
||||
}
|
||||
|
||||
var highlighted = null;
|
||||
|
||||
function onClick(e) {
|
||||
var start = e.target.feature.geometry.properties.start_timestamp;
|
||||
var end = e.target.feature.geometry.properties.end_timestamp;
|
||||
var changed = highlighted !== e.target.feature;
|
||||
$.each(images, function (timestamp, board) {
|
||||
if ((timestamp >= start && timestamp < end) && changed) {
|
||||
board.image.first().addClass("highlight");
|
||||
} else {
|
||||
board.image.removeClass("highlight");
|
||||
highlighted = null;
|
||||
}
|
||||
}
|
||||
);
|
||||
if (changed) {
|
||||
highlighted = e.target.feature;
|
||||
}
|
||||
}
|
||||
|
||||
var coords = [];
|
||||
|
||||
function onEachFeature(feature, layer) {
|
||||
layer.setStyle(styleTrack(feature));
|
||||
layer.on('click', onClick);
|
||||
if (feature.geometry.coordinates.length > 1) {
|
||||
coords = coords.concat(feature.geometry.coordinates.map(function (p) {
|
||||
return [p[1], p[0], 0.1];
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
var track = L.geoJSON(data['features'], {
|
||||
//style: styleTrack,
|
||||
onEachFeature: onEachFeature
|
||||
}).addTo(map);
|
||||
|
||||
map.fitBounds(track.getBounds());
|
||||
|
||||
//var heat = L.heatLayer(coords);
|
||||
//L.control.layers(tiles, {"heatmap": heat}).addTo(map);
|
||||
|
||||
var list = $("<ul />");
|
||||
var current = {
|
||||
"pos": data.properties["boards"][0].coordinate.coordinates
|
||||
};
|
||||
var i = 0;
|
||||
while (current.pos == undefined) {
|
||||
i+=1;
|
||||
current.pos = data.properties["boards"][i].coordinate.coordinates;
|
||||
}
|
||||
console.log(current);
|
||||
var marker = L.marker([current.pos[1], current.pos[0]]).addTo(map);
|
||||
$.each(data.properties["boards"], function (index, entry) {
|
||||
//console.log(index, entry);
|
||||
var item = $("<li>", {class: entry.extra_data.activity_type});
|
||||
var container = $("<div>", {class: "board"});
|
||||
var image = $("<img>", {src: entry.image.replace("static/progress/", "")});
|
||||
image.attr("data-time", entry.timestamp);
|
||||
image.hover(function () {
|
||||
marker.setLatLng([entry.coordinate.coordinates[1], entry.coordinate.coordinates[0]]);
|
||||
}, function () {
|
||||
marker.setLatLng(current.pos.coordinates[1], current.pos.coordinates[0]);
|
||||
});
|
||||
image.click(function (e) {
|
||||
current.board = image;
|
||||
current.pos = entry.coordinate;
|
||||
});
|
||||
images[entry.timestamp] = {image: image, coordinate: entry.coordinate};
|
||||
image.appendTo(container);
|
||||
container.appendTo(item);
|
||||
item.appendTo(list);
|
||||
});
|
||||
current.board = images[data.properties["boards"][1].timestamp];
|
||||
list.appendTo(".sequence");
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
loadData();
|
||||
});
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 181 KiB |
|
|
@ -2,7 +2,7 @@ version: "3"
|
|||
|
||||
services:
|
||||
app:
|
||||
image: docker.clkl.de/ma/celery:0.4.1
|
||||
image: docker.clkl.de/ma/celery:0.4.2
|
||||
build: .
|
||||
volumes:
|
||||
- ./:/app
|
||||
|
|
@ -21,7 +21,7 @@ services:
|
|||
- "traefik.url.frontend.rule=Host:select.ma.potato.kinf.wiai.uni-bamberg.de"
|
||||
|
||||
celery:
|
||||
image: docker.clkl.de/ma/celery:0.4.1
|
||||
image: docker.clkl.de/ma/celery:0.4.2
|
||||
environment:
|
||||
- PYTHONPATH=/app
|
||||
- PYTHONUNBUFFERED=1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,66 @@
|
|||
{
|
||||
"logFormat": "zip",
|
||||
"entryType": "@class",
|
||||
"spatials": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
|
||||
],
|
||||
"actions": [
|
||||
"...QuestionAnswerEvent",
|
||||
"...SimuAnswerEvent"
|
||||
],
|
||||
"boards": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
|
||||
],
|
||||
"analyzers": {
|
||||
"analysis.analyzers": [
|
||||
"SimulationCategorizer",
|
||||
"LocationAnalyzer"
|
||||
]
|
||||
},
|
||||
"sequences": {
|
||||
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
|
||||
"end": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
|
||||
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
|
||||
}
|
||||
},
|
||||
"custom": {
|
||||
"simulation_rounds": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
|
||||
],
|
||||
"simu_data": [
|
||||
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||
],
|
||||
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||
"instance_id": "instance_id",
|
||||
"instance_config_id": "config.@id",
|
||||
"sequences2": {
|
||||
"id_field": "sequence_id",
|
||||
"start": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action": "START"
|
||||
},
|
||||
"end": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action": "PAUSE"
|
||||
}
|
||||
},
|
||||
"coordinates": "location.coordinates",
|
||||
"metadata": {
|
||||
"timestamp": "timestamp",
|
||||
"gamefield": "instance_id",
|
||||
"user": "player_group_name"
|
||||
}
|
||||
},
|
||||
"source": {
|
||||
"type": "Biogames",
|
||||
"username": "ba",
|
||||
"password": "853451",
|
||||
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||
},
|
||||
"render": [
|
||||
"KMLRender"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -12,4 +12,7 @@ flask==0.12.2
|
|||
celery==4.1.1
|
||||
redis==2.10.6
|
||||
|
||||
lxml==4.2.1
|
||||
lxml==4.2.1
|
||||
|
||||
shapely==1.6.4
|
||||
pyproj==1.9.5.1
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ KML = """{
|
|||
"type": "Biogames",
|
||||
"username": "ba",
|
||||
"password": "853451",
|
||||
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
|
||||
},
|
||||
"render": [
|
||||
"KMLRender"
|
||||
|
|
@ -115,25 +115,137 @@ ACTIVITY = """{
|
|||
"type": "Biogames",
|
||||
"username": "ba",
|
||||
"password": "853451",
|
||||
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
||||
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
|
||||
},
|
||||
"render": [
|
||||
"ActivityMapper"
|
||||
]
|
||||
}"""
|
||||
|
||||
CONFIGS = { # TODO: more
|
||||
"KML": KML,
|
||||
"ActivityMapper": ACTIVITY,
|
||||
KML_geo = """{
|
||||
"logFormat": "neocartographer",
|
||||
"entryType": "type",
|
||||
"spatials": [
|
||||
"location"
|
||||
],
|
||||
"actions": [],
|
||||
"boards": [],
|
||||
"analyzers": {
|
||||
"analysis.analyzers": [
|
||||
"SimpleCategorizer",
|
||||
"LocationAnalyzer"
|
||||
]
|
||||
},
|
||||
"sequences": {},
|
||||
"custom": {
|
||||
"coordinates": "location.coordinates",
|
||||
"metadata": {
|
||||
"timestamp": "timestamp",
|
||||
"gamefield": "instance_id",
|
||||
"user": "player_group_name"
|
||||
}
|
||||
},
|
||||
"source": {
|
||||
"type": "Geogames",
|
||||
"host": "http://log_data/",
|
||||
"path": "neocartographer"
|
||||
},
|
||||
"render": [
|
||||
"KMLRender"
|
||||
]
|
||||
}"""
|
||||
|
||||
OEB = """{
|
||||
"logFormat": "zip",
|
||||
"entryType": "@class",
|
||||
"spatials": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.LogEntryLocation"
|
||||
],
|
||||
"actions": [
|
||||
"...QuestionAnswerEvent",
|
||||
"...SimuAnswerEvent"
|
||||
],
|
||||
"boards": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.ShowBoardLogEntry"
|
||||
],
|
||||
"analyzers": {
|
||||
"analysis.analyzers": [
|
||||
"BiogamesCategorizer",
|
||||
"LocationAnalyzer",
|
||||
"BiogamesDuration",
|
||||
"BiogamesTasks",
|
||||
"GameFieldInstanceGroup"
|
||||
]
|
||||
},
|
||||
"sequences": {
|
||||
"start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryCache",
|
||||
"end": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.LogEntryInstanceAction",
|
||||
"action.@class": "de.findevielfalt.games.game2.instance.action.CacheEnableAction"
|
||||
}
|
||||
},
|
||||
"custom": {
|
||||
"simulation_rounds": [
|
||||
"de.findevielfalt.games.game2.instance.log.entry.LogEntryQuestion"
|
||||
],
|
||||
"simu_data": [
|
||||
"de.findevielfalt.games.game2.instance.data.sequence.simulation.SimulationBoardData"
|
||||
],
|
||||
"instance_start": "de.findevielfalt.games.game2.instance.log.entry.LogEntryStartInstance",
|
||||
"instance_id": "instance_id",
|
||||
"instance_config_id": "config.@id",
|
||||
"sequences2": {
|
||||
"id_field": "sequence_id",
|
||||
"start": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action": "START"
|
||||
},
|
||||
"end": {
|
||||
"@class": "de.findevielfalt.games.game2.instance.log.entry.ShowSequenceLogEntry",
|
||||
"action": "PAUSE"
|
||||
}
|
||||
},
|
||||
"coordinates": "location.coordinates",
|
||||
"metadata": {
|
||||
"timestamp": "timestamp",
|
||||
"gamefield": "instance_id",
|
||||
"user": "player_group_name"
|
||||
}
|
||||
},
|
||||
"source": {
|
||||
"type": "Biogames",
|
||||
"username": "ba",
|
||||
"password": "853451",
|
||||
"host": "https://biogames.kinf.wiai.uni-bamberg.de"
|
||||
},
|
||||
"render": [
|
||||
"OEBRender"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
"""
|
||||
CONFIGS = {
|
||||
"Biogames": {
|
||||
"ActivityMapper": ACTIVITY,
|
||||
"KML": KML,
|
||||
"DauerEntfernungPunkteZeit": OEB,
|
||||
},
|
||||
"Geogames": {
|
||||
"KML": KML_geo,
|
||||
},
|
||||
}
|
||||
|
||||
URLS = {
|
||||
"KML": "/",
|
||||
"DauerEntfernungPunkteZeit": "/",
|
||||
"ActivityMapper": "#",
|
||||
}
|
||||
|
||||
HOSTS = {
|
||||
"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
|
||||
#"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
|
||||
#"Biogames": "http://www.biodiv2go.de",
|
||||
"Biogames": "http://biogames.kinf.wiai.uni-bamberg.de/",
|
||||
"Geogames": "http://log_data/",
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
body {
|
||||
background-color: aqua;
|
||||
/* background-color: limegreen;*/
|
||||
}
|
||||
#data{
|
||||
display: none;
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ def login():
|
|||
session['username'] = request.form['username']
|
||||
session['cookies'] = client.cookies
|
||||
session['game'] = game
|
||||
session['host'] = BIOGAMES_HOST
|
||||
session['host'] = HOSTS[game]
|
||||
clients[session['uid']] = client
|
||||
return redirect("/results")
|
||||
return redirect("/?fail")
|
||||
|
|
@ -65,7 +65,7 @@ def games():
|
|||
return redirect("/")
|
||||
if session['logged_in'] and not session['uid'] in clients:
|
||||
clients[session['uid']] = CLIENTS[session['game']](host=session['host'], **session['cookies'])
|
||||
return render_template("games.html", logs=clients[session['uid']].list(), configs=CONFIGS)
|
||||
return render_template("games.html", logs=clients[session['uid']].list(), configs=CONFIGS[session['game']])
|
||||
|
||||
|
||||
@app.route("/start", methods=['POST'])
|
||||
|
|
@ -79,7 +79,7 @@ def start():
|
|||
}
|
||||
params = {
|
||||
"log_ids": request.form.getlist('logs'),
|
||||
"config": CONFIGS[request.form['config']],
|
||||
"config": CONFIGS[session['game']][request.form['config']],
|
||||
"username": session['username'],
|
||||
"cookies": session['cookies'],
|
||||
"host": session['host'],
|
||||
|
|
|
|||
|
|
@ -1,45 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Leaflet.heat demo</title>
|
||||
<link rel="stylesheet" href="http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.css" />
|
||||
<script src="http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.js"></script>
|
||||
<style>
|
||||
#map { width: 1024px; height: 768px; }
|
||||
body { font: 16px/1.4 "Helvetica Neue", Arial, sans-serif; }
|
||||
.ghbtns { position: relative; top: 4px; margin-left: 5px; }
|
||||
a { color: #0077ff; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id="map"></div>
|
||||
|
||||
<!-- <script src="../node_modules/simpleheat/simpleheat.js"></script>
|
||||
<script src="../src/HeatLayer.js"></script> -->
|
||||
|
||||
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
|
||||
|
||||
<script src="./heat_data.js"></script>
|
||||
<script>
|
||||
var options = {maxZoom:22};
|
||||
var map = L.map('map', options).setView(coords[0], 17);
|
||||
L.control.scale().addTo(map);
|
||||
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
|
||||
}).addTo(map);
|
||||
|
||||
function addHeat(coords){
|
||||
var heat = L.heatLayer(coords).addTo(map);
|
||||
}
|
||||
//coords = coords.map(function (p) { return [p[1], p[0], 0.05]; });
|
||||
|
||||
//var heat = L.heatLayer(coords).addTo(map);
|
||||
addHeat(coords);
|
||||
</script>
|
||||
<!--script src="./coord.js"></script>
|
||||
<script>
|
||||
//addHeat(coords);
|
||||
</script-->
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
version: "2"
|
||||
services:
|
||||
http:
|
||||
image: httpd:alpine
|
||||
volumes:
|
||||
- ./:/usr/local/apache2/htdocs
|
||||
ports:
|
||||
- 5001:80
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
<script
|
||||
src="https://code.jquery.com/jquery-3.2.1.min.js"
|
||||
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
|
||||
crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
|
||||
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
|
||||
crossorigin=""/>
|
||||
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
|
||||
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
|
||||
crossorigin=""></script>
|
||||
|
||||
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
|
||||
|
||||
<script src="my.js"></script>
|
||||
<link href="style.css" rel="stylesheet"/>
|
||||
<main>
|
||||
<div class="mapDiv" id="mainMap"></div>
|
||||
<div class="sequenceContainer">
|
||||
<div class="sequence"></div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<!--div style="font-size:0.1px;position:absolute;bottom:0;">OSM Logo: CC-BY-SA
|
||||
http://wiki.openstreetmap.org/wiki/File:Mag_map-120x120.png
|
||||
</div-->
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
<script
|
||||
src="https://code.jquery.com/jquery-3.2.1.min.js"
|
||||
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
|
||||
crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
|
||||
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
|
||||
crossorigin=""/>
|
||||
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
|
||||
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
|
||||
crossorigin=""></script>
|
||||
|
||||
<script src="log.js"></script>
|
||||
<style>
|
||||
.map { width: 512px; height: 512px; }
|
||||
</style>
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
$.getJSON("data/fooo", function (data) {
|
||||
var list = $("<ul />");
|
||||
var mapC = $("<div />", {class: "map", id: "map"});
|
||||
mapC.appendTo("body");
|
||||
var track = [];
|
||||
var times = [];
|
||||
$.each(data.spatials, function (i, elem) {
|
||||
track.push([elem.coordinates[1], elem.coordinates[0]]);
|
||||
times.push(i);
|
||||
});
|
||||
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
|
||||
});
|
||||
var map = L.map("map", {layers: [tiles]});
|
||||
L.control.scale().addTo(map);
|
||||
var layer = L.polyline(track).addTo(map);
|
||||
map.fitBounds(layer.getBounds());
|
||||
$.each(data, function (key, value) {
|
||||
//console.log(key, value);
|
||||
//key: instance_id, value: AnlysisResult
|
||||
//value.result.instance: InstanceConfig_id
|
||||
// console.log(key, value[0].result.store[0].timestamp);
|
||||
/*$.each(value[0].result.store, function (index, entry) {
|
||||
//console.log(entry);
|
||||
var time = new Date(entry.timestamp);
|
||||
var item = $("<li>", {html: entry.sequence + " @ " + time.toLocaleDateString() + " "+ time.toLocaleTimeString()});
|
||||
var container = $("<p />");
|
||||
if (entry.track.length > 0) {
|
||||
var mapName = "map" + index;
|
||||
//console.log(mapName, entry.track.length);
|
||||
var mapContainer = $("<div />", {id: mapName, class: "map"});
|
||||
var track = [];
|
||||
$.each(entry.track, function (i, elem) {
|
||||
track.push([elem.coordinates[1], elem.coordinates[0]]);
|
||||
});
|
||||
maps[mapName] = track;
|
||||
|
||||
mapContainer.appendTo(container);
|
||||
}
|
||||
$.each(entry.events, function (i, event) {
|
||||
if ("image" in event) {
|
||||
$("<img />", {src: event.image, height: 200}).appendTo(container);
|
||||
}
|
||||
});
|
||||
container.appendTo(item);
|
||||
item.appendTo(list);
|
||||
});*/
|
||||
});
|
||||
list.appendTo("body");
|
||||
var slider = $("<input />", {type: "range", start:0,end:100});
|
||||
slider.appendTo("body");
|
||||
/*});
|
||||
|
||||
$(window).on("load", function () {*/
|
||||
// setTimeout(function () {
|
||||
|
||||
//console.log(maps);
|
||||
/*$.each(maps, function (mapName, track) {
|
||||
//console.log("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAa");
|
||||
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
|
||||
});
|
||||
var map = L.map(mapName, {layers: [tiles]});
|
||||
L.control.scale().addTo(map);
|
||||
// console.log(mapName, track);
|
||||
var layer = L.polyline(track, {color: "green"}).addTo(map);
|
||||
map.fitBounds(layer.getBounds());
|
||||
//console.log(layer)
|
||||
//L.control.layers({"osm":tiles}, {layer]).addTo(map);
|
||||
});*/
|
||||
// }, 2000);
|
||||
});
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_03b9b6b4-c8ab-4182-8902-1620eebe8889.json", function (data) { //urach
|
||||
//$.getJSON("data/ff8f1e8f-6cf5-4a7b-835b-5e2226c1e771_de7df5b5-edd5-4070-840f-68854ffab9aa.json", function (data) { //urach
|
||||
//$.getJSON("data/90278021-4c57-464e-90b1-d603799d07eb_07da99c9-398a-424f-99fc-2701763a63e9.json", function (data) { //eichstätt
|
||||
//$.getJSON("data/13241906-cdae-441a-aed0-d57ebeb37cac_d33976a6-8a56-4a63-b492-fe5427dbf377.json", function (data) { //stadtökologie
|
||||
$.getJSON("data/5e64ce07-1c16-4d50-ac4e-b3117847ea43_2f664d7b-f0d8-42f5-8731-c034ef86703e.json", function (data) { //filderstadt
|
||||
//$.getJSON("data/17d401a9-de21-49a2-95bc-7dafa53dda64_98edcb70-03db-4465-b185-a9c9574995ce.json", function (data) { //oeb2016
|
||||
var images = {};
|
||||
var tiles = {
|
||||
"openstreetmap": L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
maxNativeZoom: 19,
|
||||
maxZoom: 24,
|
||||
attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
|
||||
}),
|
||||
"esri sat": L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
maxNativeZoom: 19,
|
||||
maxZoom: 24,
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
|
||||
}),
|
||||
"google sat": L.tileLayer('https://{s}.google.com/vt/lyrs=s&x={x}&y={y}&z={z}', {
|
||||
maxNativeZoom: 20,
|
||||
maxZoom: 24,
|
||||
subdomains: ['mt0', 'mt1', 'mt2', 'mt3']
|
||||
})
|
||||
};
|
||||
var map = L.map("mainMap", {layers: [tiles.openstreetmap]});
|
||||
|
||||
function styleTrack(feature) {
|
||||
var styles = {};
|
||||
styles.color = data.colors[feature.properties.activity_type];
|
||||
return styles;
|
||||
}
|
||||
|
||||
var highlighted = null;
|
||||
|
||||
function onClick(e) {
|
||||
var start = e.target.feature.geometry.properties.start_timestamp;
|
||||
var end = e.target.feature.geometry.properties.end_timestamp;
|
||||
var changed = highlighted !== e.target.feature;
|
||||
$.each(images, function (timestamp, board) {
|
||||
if ((timestamp >= start && timestamp < end) && changed) {
|
||||
board.image.first().addClass("highlight");
|
||||
} else {
|
||||
board.image.removeClass("highlight");
|
||||
highlighted = null;
|
||||
}
|
||||
}
|
||||
);
|
||||
if (changed) {
|
||||
highlighted = e.target.feature;
|
||||
}
|
||||
}
|
||||
|
||||
var coords = [];
|
||||
|
||||
function onEachFeature(feature, layer) {
|
||||
layer.setStyle(styleTrack(feature));
|
||||
layer.on('click', onClick);
|
||||
if (feature.coordinates.length > 1) {
|
||||
coords = coords.concat(feature.coordinates.map(function (p) {
|
||||
return [p[1], p[0], 0.1];
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
var track = L.geoJSON(data['track'], {
|
||||
//style: styleTrack,
|
||||
onEachFeature: onEachFeature
|
||||
}).addTo(map);
|
||||
|
||||
map.fitBounds(track.getBounds());
|
||||
|
||||
var heat = L.heatLayer(coords);
|
||||
L.control.layers(tiles, {"heatmap": heat}).addTo(map);
|
||||
|
||||
var list = $("<ul />");
|
||||
var current = {
|
||||
"pos":data["boards"][1].coordinate.coordinates
|
||||
};
|
||||
console.log(current);
|
||||
var marker = L.marker([current.pos[1], current.pos[0]]).addTo(map);
|
||||
$.each(data["boards"], function (index, entry) {
|
||||
//console.log(index, entry);
|
||||
var item = $("<li>", {class: entry.extra_data.activity_type});
|
||||
var container = $("<div>", {class: "board"});
|
||||
var image = $("<img>", {src: entry.image.replace("static/progress/", "")});
|
||||
image.attr("data-time", entry.timestamp);
|
||||
image.hover(function () {
|
||||
marker.setLatLng([entry.coordinate.coordinates[1], entry.coordinate.coordinates[0]]);
|
||||
}, function () {
|
||||
marker.setLatLng(current.pos.coordinates[1], current.pos.coordinates[0]);
|
||||
});
|
||||
image.click(function (e) {
|
||||
current.board = image;
|
||||
current.pos = entry.coordinate;
|
||||
});
|
||||
images[entry.timestamp] = {image: image, coordinate: entry.coordinate};
|
||||
image.appendTo(container);
|
||||
container.appendTo(item);
|
||||
item.appendTo(list);
|
||||
});
|
||||
current.board=images[data["boards"][1].timestamp];
|
||||
list.appendTo(".sequence");
|
||||
});
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Leaflet.heat demo</title>
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
|
||||
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
|
||||
crossorigin=""/>
|
||||
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
|
||||
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
|
||||
crossorigin=""></script>
|
||||
<style>
|
||||
#map { width: 1024px; height: 768px; }
|
||||
body { font: 16px/1.4 "Helvetica Neue", Arial, sans-serif; }
|
||||
.ghbtns { position: relative; top: 4px; margin-left: 5px; }
|
||||
a { color: #0077ff; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id="map"></div>
|
||||
|
||||
<!-- <script src="../node_modules/simpleheat/simpleheat.js"></script>
|
||||
<script src="../src/HeatLayer.js"></script> -->
|
||||
|
||||
<script src="http://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
|
||||
|
||||
<script src="./track_data.js"></script>
|
||||
<script>
|
||||
var options = {maxZoom:22};
|
||||
//var map = L.map('map', options).setView([49.90299388, 10.87004638], 17);
|
||||
var map = L.map('map', options);
|
||||
L.control.scale().addTo(map);
|
||||
var tiles = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors',
|
||||
}).addTo(map);
|
||||
|
||||
function addHeat(coords){
|
||||
//var transformed = coords.map(function (p) { return [p[1], p[0], 0.25]; });
|
||||
var heat = L.heatLayer(coords).addTo(map);
|
||||
}
|
||||
var layers=[];
|
||||
function addTrack(tracks, i){
|
||||
var elem = L.polyline(tracks[i], {color:"green"});
|
||||
// layers[i] =L.LayerGroup([elem]);
|
||||
layers[i] = elem;
|
||||
map.fitBounds(elem.getBounds());
|
||||
layers[i].on('mouseover', function (e) {
|
||||
e.target.setStyle({'color':'red'});
|
||||
});
|
||||
layers[i].on('mouseout', function (e) {
|
||||
e.target.setStyle({'color':'green'});
|
||||
});
|
||||
}
|
||||
//coords = coords.map(function (p) { return [p[1], p[0], 0.05]; });
|
||||
|
||||
//var heat = L.heatLayer(coords).addTo(map);
|
||||
//addHeat(coords);
|
||||
for (var i in tracks) {
|
||||
addTrack(tracks, i);
|
||||
}
|
||||
L.control.layers(null, layers).addTo(map);
|
||||
</script>
|
||||
<!--script src="./heat_data.js"></script>
|
||||
<script>
|
||||
addHeat(coords);
|
||||
</script-->
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -10,15 +10,17 @@ import time
|
|||
from celery import Celery
|
||||
from analysis import log_analyzer as la
|
||||
from analysis.analyzers import KMLRender, ActivityMapperRender
|
||||
from analysis.analyzers.render.biogames import OEBRender
|
||||
from clients.webclients import CLIENTS
|
||||
|
||||
FLASK_DB = 1
|
||||
REDIS_HOST = "redis"
|
||||
DATA_PATH = "/app/data/results/"
|
||||
DATA_PATH = "/data/results/"
|
||||
|
||||
RENDERERS = { # TODO
|
||||
"KMLRender": KMLRender,
|
||||
"ActivityMapper": ActivityMapperRender,
|
||||
"OEBRender": OEBRender
|
||||
}
|
||||
|
||||
app = Celery('tasks', backend='redis://redis', broker='redis://redis')
|
||||
|
|
@ -48,22 +50,29 @@ def analyze(config, log_ids, **kwargs):
|
|||
tmpdir = client.download_files(urls)
|
||||
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
|
||||
|
||||
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'))
|
||||
uid = str(uuid.uuid4())
|
||||
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'), uid=uid)
|
||||
|
||||
results = []
|
||||
settings = la.parse_settings(config)
|
||||
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
|
||||
render = RENDERERS[settings.render[0]]() # TODO
|
||||
files = render.render(store.get_all())
|
||||
|
||||
uid = str(uuid.uuid4())
|
||||
results = []
|
||||
log.error(files)
|
||||
os.mkdir(os.path.join(DATA_PATH, uid))
|
||||
render = RENDERERS[settings.render[0]]() # TODO
|
||||
files = []
|
||||
if settings.render[0] == "OEBRender":
|
||||
files.append(render.render_store(store))
|
||||
else:
|
||||
for category in store.get_categories():
|
||||
data = store.get_category(category)
|
||||
print(category, type(category))
|
||||
files = render.render(data, name=category[1])
|
||||
log.error(files)
|
||||
for file in files:
|
||||
try:
|
||||
head, tail = os.path.split(file)
|
||||
target = os.path.join(DATA_PATH, uid, tail)
|
||||
shutil.move(file, target)
|
||||
log.error(target)
|
||||
log.error(shutil.move(file, target))
|
||||
results.append(target)
|
||||
except FileNotFoundError as e:
|
||||
log.exception(e)
|
||||
|
|
|
|||
Loading…
Reference in New Issue