commit uncommited changes
parent
4a53aa5759
commit
29c04f2fdc
|
|
@ -3,18 +3,19 @@ from typing import List
|
||||||
from .analyzer import Analyzer, Result
|
from .analyzer import Analyzer, Result
|
||||||
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
|
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
|
||||||
BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \
|
BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \
|
||||||
SimulationFlagsAnalyzer
|
SimulationFlagsAnalyzer, GameField_InstanceCategorizer
|
||||||
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
|
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
|
||||||
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer
|
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer, WhitelistAnalyzer
|
||||||
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
|
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
|
||||||
from .analyzer.mask import MaskSpatials
|
from .analyzer.mask import MaskSpatials
|
||||||
from .render import Render
|
from .render import Render
|
||||||
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
|
||||||
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
|
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
|
||||||
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
|
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
|
||||||
LogEntryCountCSV, KMLRender
|
LogEntryCountCSV, KMLRender, GeoJSON
|
||||||
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
|
||||||
LocomotionActionRatioRender
|
LocomotionActionRatioRender
|
||||||
|
from .render.wip import get_all_data, plot_time_space_rel
|
||||||
|
|
||||||
__FALLBACK__ = PrintRender
|
__FALLBACK__ = PrintRender
|
||||||
__MAPPING__ = {
|
__MAPPING__ = {
|
||||||
|
|
|
||||||
|
|
@ -133,6 +133,23 @@ class BiogamesCategorizer(CategorizerStub): # TODO: refactor
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class GameField_InstanceCategorizer(CategorizerStub): # TODO: refactor
|
||||||
|
__name__ = "BiogamesCategorizer"
|
||||||
|
|
||||||
|
def __init__(self, settings: LogSettings):
|
||||||
|
super().__init__(settings)
|
||||||
|
|
||||||
|
def process(self, entry: dict) -> bool:
|
||||||
|
if self.key is "default":
|
||||||
|
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
|
||||||
|
try:
|
||||||
|
self.key = json_path(entry, self.settings.custom['instance_config_id']) + "_" + entry[self.settings.custom['instance_id']] + "_" + str(entry["timestamp"])
|
||||||
|
except KeyError as e:
|
||||||
|
print(entry)
|
||||||
|
raise e
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class ActivityMapper(Analyzer):
|
class ActivityMapper(Analyzer):
|
||||||
__name__ = "ActivityMapper"
|
__name__ = "ActivityMapper"
|
||||||
classes = {
|
classes = {
|
||||||
|
|
@ -158,6 +175,7 @@ class ActivityMapper(Analyzer):
|
||||||
self.last_board = {}
|
self.last_board = {}
|
||||||
self.last_board_type = "other"
|
self.last_board_type = "other"
|
||||||
self.last_coordinate = None
|
self.last_coordinate = None
|
||||||
|
self.last_timestamp = None
|
||||||
self.tracks = []
|
self.tracks = []
|
||||||
self.track = None
|
self.track = None
|
||||||
self.instance_config_id: str = None
|
self.instance_config_id: str = None
|
||||||
|
|
@ -173,21 +191,28 @@ class ActivityMapper(Analyzer):
|
||||||
if board["extra_data"]["activity_type"] == "simu":
|
if board["extra_data"]["activity_type"] == "simu":
|
||||||
board["image"] = "simu.png"
|
board["image"] = "simu.png"
|
||||||
continue
|
continue
|
||||||
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
|
try:
|
||||||
|
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
|
||||||
self.settings.source)
|
self.settings.source)
|
||||||
if local_file:
|
if local_file:
|
||||||
board['image'] = local_file
|
board['image'] = local_file
|
||||||
else:
|
else:
|
||||||
|
raise ValueError
|
||||||
|
except Exception as e:
|
||||||
board['image'] = "ERROR_FETCHING_FILE"
|
board['image'] = "ERROR_FETCHING_FILE"
|
||||||
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
|
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
|
||||||
board["board_id"])
|
board["board_id"])
|
||||||
|
logger.exception(e)
|
||||||
else:
|
else:
|
||||||
board["image"] = "map.png"
|
board["image"] = "map.png"
|
||||||
store.add(Result(type(self), {
|
store.add(Result(type(self), {
|
||||||
"instance": self.instance_config_id,
|
"type": "FeatureCollection",
|
||||||
"track": self.tracks,
|
"features": self.tracks,
|
||||||
"boards": self.timeline,
|
"properties": {
|
||||||
"colors": self.colors,
|
"instance": self.instance_config_id,
|
||||||
|
"boards": self.timeline,
|
||||||
|
"colors": self.colors,
|
||||||
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
def process(self, entry: dict) -> bool:
|
def process(self, entry: dict) -> bool:
|
||||||
|
|
@ -242,18 +267,24 @@ class ActivityMapper(Analyzer):
|
||||||
|
|
||||||
def add_location(self, entry):
|
def add_location(self, entry):
|
||||||
coordinates = json_path(entry, self.settings.custom['coordinates'])
|
coordinates = json_path(entry, self.settings.custom['coordinates'])
|
||||||
self.track['coordinates'].append(coordinates)
|
self.track["geometry"]['coordinates'].append(coordinates)
|
||||||
|
self.track['properties']['coordTimes'].append(entry['timestamp']) #FIXME
|
||||||
self.last_coordinate = coordinates
|
self.last_coordinate = coordinates
|
||||||
|
self.last_timestamp = entry['timestamp']
|
||||||
|
|
||||||
def add_track(self, **props):
|
def add_track(self, **props):
|
||||||
self.track['properties'].update(props)
|
self.track['properties'].update(props)
|
||||||
|
if "activity_type" in self.track['properties'] and self.track['properties']['activity_type'] in self.colors:
|
||||||
|
if not "stroke" in self.track['properties']:
|
||||||
|
self.track['properties']['stroke'] = self.colors[self.track['properties']['activity_type']]
|
||||||
self.tracks.append(self.track)
|
self.tracks.append(self.track)
|
||||||
self.track = self.new_track(props['end_timestamp'])
|
self.track = self.new_track(props['end_timestamp'])
|
||||||
if self.last_coordinate:
|
if self.last_coordinate:
|
||||||
self.track['coordinates'].append(self.last_coordinate)
|
self.track["geometry"]['coordinates'].append(self.last_coordinate)
|
||||||
|
self.track['properties']['coordTimes'].append(self.last_timestamp)
|
||||||
|
|
||||||
def new_track(self, timestamp):
|
def new_track(self, timestamp):
|
||||||
return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}}
|
return {"type": "Feature", "geometry": {"type": "LineString", "coordinates": []}, "properties": {'start_timestamp': timestamp, 'coordTimes': []}}
|
||||||
|
|
||||||
|
|
||||||
class BiogamesStore(Store):
|
class BiogamesStore(Store):
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,26 @@ from analysis.util import json_path
|
||||||
from . import Result, LogSettings, Analyzer, ResultStore
|
from . import Result, LogSettings, Analyzer, ResultStore
|
||||||
|
|
||||||
|
|
||||||
|
class WhitelistAnalyzer(Analyzer): #TODO
|
||||||
|
__name__ = "WhiteList"
|
||||||
|
|
||||||
|
def __init__(self, settings: LogSettings) -> None:
|
||||||
|
super().__init__(settings)
|
||||||
|
self.key = settings.custom["whitelist"]["key"]
|
||||||
|
self.filter_objs = settings.custom["whitelist"]["objs"]
|
||||||
|
self.valid_entries = settings.custom["whitelist"]["valid"]
|
||||||
|
self.blocked = False
|
||||||
|
|
||||||
|
def result(self, store: ResultStore, name=None) -> None:
|
||||||
|
store.add(Result(type(self), {"blocked": self.blocked}, name=name))
|
||||||
|
|
||||||
|
def process(self, entry: dict) -> bool:
|
||||||
|
if entry[self.settings.type_field] in self.filter_objs:
|
||||||
|
if not json_path(entry, self.key) in self.valid_entries:
|
||||||
|
self.blocked = True
|
||||||
|
return self.blocked
|
||||||
|
|
||||||
|
|
||||||
class LocationAnalyzer(Analyzer):
|
class LocationAnalyzer(Analyzer):
|
||||||
"""
|
"""
|
||||||
store spatial log entries
|
store spatial log entries
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,7 @@ class LocomotionActionAnalyzer(Analyzer):
|
||||||
self.current_cache = None
|
self.current_cache = None
|
||||||
self.last = None
|
self.last = None
|
||||||
|
|
||||||
def result(self, store: ResultStore) -> None:
|
def result(self, store: ResultStore, **kwargs) -> None:
|
||||||
if self.last is not None:
|
if self.last is not None:
|
||||||
if self.current_cache is None:
|
if self.current_cache is None:
|
||||||
self.locomotion.append(self.last - self.cache_time)
|
self.locomotion.append(self.last - self.cache_time)
|
||||||
|
|
@ -50,7 +50,14 @@ class LocomotionActionAnalyzer(Analyzer):
|
||||||
self.last = None
|
self.last = None
|
||||||
locomotion = sum(self.locomotion)
|
locomotion = sum(self.locomotion)
|
||||||
action = sum(self.actions)
|
action = sum(self.actions)
|
||||||
|
if action== 0:
|
||||||
|
action = 42
|
||||||
|
print("Division by zero") #FIXME
|
||||||
total = locomotion + action
|
total = locomotion + action
|
||||||
|
if not self.last_timestamp:
|
||||||
|
self.last_timestamp = 0
|
||||||
|
if not self.instance_start:
|
||||||
|
self.instance_start = 0
|
||||||
store.add(Result(type(self), {
|
store.add(Result(type(self), {
|
||||||
'locomotion_sum': locomotion,
|
'locomotion_sum': locomotion,
|
||||||
'action_sum': action,
|
'action_sum': action,
|
||||||
|
|
|
||||||
|
|
@ -155,7 +155,7 @@ class ActivityMapperRender(Render):
|
||||||
files = []
|
files = []
|
||||||
for result in self.filter(results):
|
for result in self.filter(results):
|
||||||
data = result.get()
|
data = result.get()
|
||||||
path = os.path.join("/tmp", data['instance'] + "_" + str(name) + ".json")
|
path = os.path.join("/tmp", data["properties"]['instance'] + "_" + str(name) + ".json")
|
||||||
with open(path, "w") as out:
|
with open(path, "w") as out:
|
||||||
json.dump(data, out, indent=1)
|
json.dump(data, out, indent=1)
|
||||||
files.append(path)
|
files.append(path)
|
||||||
|
|
|
||||||
|
|
@ -61,13 +61,16 @@ class KMLRender(SpatialRender, Render):
|
||||||
long=entry['location']['coordinates'][0])
|
long=entry['location']['coordinates'][0])
|
||||||
for entry in result.get()
|
for entry in result.get()
|
||||||
]
|
]
|
||||||
filename = str(result.name) + ".kml"
|
if name:
|
||||||
|
filename = str(name) + ".kml"
|
||||||
|
else:
|
||||||
|
filename = str(result.name) + ".kml"
|
||||||
print(filename)
|
print(filename)
|
||||||
with open(filename, "w") as out:
|
with open(filename, "w") as out:
|
||||||
out.write(
|
out.write(
|
||||||
KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
|
KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
|
||||||
with open(filename + ".json", "w") as out:
|
#with open(filename + ".json", "w") as out: #FIXME: why am I here??
|
||||||
json.dump(result.get(), out, indent=1)
|
# json.dump(result.get(), out, indent=1)
|
||||||
files.append(filename)
|
files.append(filename)
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
@ -96,7 +99,10 @@ class GeoJSON(SpatialRender, Render):
|
||||||
#print(location)
|
#print(location)
|
||||||
coordinates.append(location["location"]["coordinates"])
|
coordinates.append(location["location"]["coordinates"])
|
||||||
times.append(location["timestamp"])
|
times.append(location["timestamp"])
|
||||||
filename = str(result.name) + ".geojson"
|
if name:
|
||||||
|
filename = str(name) + ".geojson"
|
||||||
|
else:
|
||||||
|
filename = str(result.name) + ".geojson"
|
||||||
with open(filename, "w") as out:
|
with open(filename, "w") as out:
|
||||||
self.template["features"][0]["properties"] = {"times": times}
|
self.template["features"][0]["properties"] = {"times": times}
|
||||||
self.template["features"][0]["geometry"]["coordinates"] = coordinates
|
self.template["features"][0]["geometry"]["coordinates"] = coordinates
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ import json
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
import analysis.analyzers
|
import analysis.analyzers
|
||||||
|
from analysis import analyzers
|
||||||
from analysis.util.geo import calc_distance
|
from analysis.util.geo import calc_distance
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -188,10 +189,11 @@ def get_all_data(store, sort=False, relative=True):
|
||||||
if not log.analysis() == analyzers.ActivityMapper:
|
if not log.analysis() == analyzers.ActivityMapper:
|
||||||
continue
|
continue
|
||||||
result = defaultdict(lambda: defaultdict(lambda: 0))
|
result = defaultdict(lambda: defaultdict(lambda: 0))
|
||||||
for i in log.get()['track']:
|
for i in log.get()['features']:
|
||||||
coords = i['coordinates']
|
coords = i["geometry"]['coordinates']
|
||||||
|
print(coords)
|
||||||
if len(coords) > 1:
|
if len(coords) > 1:
|
||||||
distance = calc_distance(coords)
|
distance = calc_distance(json.dumps(i["geometry"]))
|
||||||
else:
|
else:
|
||||||
distance = 0.0
|
distance = 0.0
|
||||||
result["space"][i['properties']['activity_type']] += distance
|
result["space"][i['properties']['activity_type']] += distance
|
||||||
|
|
@ -219,9 +221,9 @@ def get_all_data(store, sort=False, relative=True):
|
||||||
value = percentage
|
value = percentage
|
||||||
else:
|
else:
|
||||||
value = total
|
value = total
|
||||||
places[log.get()['instance']].append(value)
|
places[log.get()["properties"]['instance']].append(value)
|
||||||
simus = defaultdict(lambda: 0)
|
simus = defaultdict(lambda: 0)
|
||||||
for item in log.get()['boards']:
|
for item in log.get()["properties"]['boards']:
|
||||||
if item["extra_data"]["activity_type"] == "simu":
|
if item["extra_data"]["activity_type"] == "simu":
|
||||||
simus[item["board_id"]] += 1
|
simus[item["board_id"]] += 1
|
||||||
simu_distribution[len(simus)] += 1
|
simu_distribution[len(simus)] += 1
|
||||||
|
|
@ -381,7 +383,7 @@ def plot_time_space_rel(combined, keys):
|
||||||
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids])
|
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids])
|
||||||
# plt.show()
|
# plt.show()
|
||||||
dpi = 200
|
dpi = 200
|
||||||
plt.savefig("speed2.png", dpi=dpi)
|
plt.savefig("speed2_2019.png", dpi=dpi)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -109,7 +109,7 @@ if __name__ == '__main__':
|
||||||
# plot_time_space_rel(combined, keys)
|
# plot_time_space_rel(combined, keys)
|
||||||
plot_data(combined, wip.keys)
|
plot_data(combined, wip.keys)
|
||||||
|
|
||||||
if True:
|
if False:
|
||||||
|
|
||||||
def store(x):
|
def store(x):
|
||||||
pass
|
pass
|
||||||
|
|
@ -214,6 +214,21 @@ if __name__ == '__main__':
|
||||||
print("\nsimplified\n")
|
print("\nsimplified\n")
|
||||||
summary(stats_simple, stats_filtered_simple, "simplified")
|
summary(stats_simple, stats_filtered_simple, "simplified")
|
||||||
|
|
||||||
|
if True:
|
||||||
|
settings: LogSettings = load_settings("time.json")
|
||||||
|
# log_ids = src_file("/app/log_data/oeb/oeb_2016_path")
|
||||||
|
log_ids = src_file("log_data/oeb/oeb_paths_host")
|
||||||
|
log_ids = src_file("/home/clemens/git/ma/test/src")
|
||||||
|
log_ids = src_file("/home/clemens/git/ma/project/log_data/neocartographer/index")
|
||||||
|
# log_ids = log_ids[0:10]
|
||||||
|
print(log_ids)
|
||||||
|
store: ResultStore = run_analysis(log_ids, settings, LOADERS, ResultStore(key_index=1))
|
||||||
|
results = {}
|
||||||
|
for cat in store.get_categories():
|
||||||
|
results[cat] = [result.get() for result in store.get_category(cat)]
|
||||||
|
with open("times_neo.json", "w") as out:
|
||||||
|
json.dump(results, out, indent=1)
|
||||||
|
|
||||||
####################
|
####################
|
||||||
#for cat in store.get_categories():
|
#for cat in store.get_categories():
|
||||||
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
|
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,8 @@ from analysis.util import json_path
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"):
|
#def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"): #FIXME: I was changed
|
||||||
|
def download_board(board_id, instance_config_id, sequence_id, source, path="activity/data/results/"):
|
||||||
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
|
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
|
||||||
abs_path = os.path.join(path, local_file)
|
abs_path = os.path.join(path, local_file)
|
||||||
if os.path.exists(abs_path):
|
if os.path.exists(abs_path):
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
|
||||||
try:
|
try:
|
||||||
loader.load(logfile)
|
loader.load(logfile)
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
raise RuntimeError(e)
|
raise RuntimeError(e, logfile)
|
||||||
analyzers: List[Analyzer] = []
|
analyzers: List[Analyzer] = []
|
||||||
log.debug("build analyzers")
|
log.debug("build analyzers")
|
||||||
for analyzer in settings.analyzers:
|
for analyzer in settings.analyzers:
|
||||||
|
|
@ -28,7 +28,9 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
|
||||||
return analyzers
|
return analyzers
|
||||||
|
|
||||||
|
|
||||||
def run_analysis(log_ids: list, settings, loaders, result_store=ResultStore()):
|
def run_analysis(log_ids: list, settings, loaders, result_store=None):
|
||||||
|
if not result_store:
|
||||||
|
result_store = ResultStore()
|
||||||
store: ResultStore = result_store
|
store: ResultStore = result_store
|
||||||
for log_id in log_ids:
|
for log_id in log_ids:
|
||||||
log.info("LOG_ID: "+ str(log_id))
|
log.info("LOG_ID: "+ str(log_id))
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,7 @@
|
||||||
<script
|
<script
|
||||||
src="https://code.jquery.com/jquery-3.2.1.min.js"
|
src="https://code.jquery.com/jquery-3.2.1.min.js"></script>
|
||||||
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
|
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"/>
|
||||||
crossorigin="anonymous"></script>
|
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"></script>
|
||||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
|
|
||||||
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
|
|
||||||
crossorigin=""/>
|
|
||||||
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
|
|
||||||
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
|
|
||||||
crossorigin=""></script>
|
|
||||||
|
|
||||||
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
|
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>
|
||||||
|
|
||||||
|
|
@ -22,4 +16,4 @@
|
||||||
|
|
||||||
<!--div style="font-size:0.1px;position:absolute;bottom:0;">OSM Logo: CC-BY-SA
|
<!--div style="font-size:0.1px;position:absolute;bottom:0;">OSM Logo: CC-BY-SA
|
||||||
http://wiki.openstreetmap.org/wiki/File:Mag_map-120x120.png
|
http://wiki.openstreetmap.org/wiki/File:Mag_map-120x120.png
|
||||||
</div-->
|
</div-->
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ function loadData() {
|
||||||
|
|
||||||
function styleTrack(feature) {
|
function styleTrack(feature) {
|
||||||
var styles = {};
|
var styles = {};
|
||||||
styles.color = data.colors[feature.properties.activity_type];
|
styles.color = data.properties.colors[feature.properties.activity_type];
|
||||||
return styles;
|
return styles;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -52,30 +52,35 @@ function loadData() {
|
||||||
function onEachFeature(feature, layer) {
|
function onEachFeature(feature, layer) {
|
||||||
layer.setStyle(styleTrack(feature));
|
layer.setStyle(styleTrack(feature));
|
||||||
layer.on('click', onClick);
|
layer.on('click', onClick);
|
||||||
if (feature.coordinates.length > 1) {
|
if (feature.geometry.coordinates.length > 1) {
|
||||||
coords = coords.concat(feature.coordinates.map(function (p) {
|
coords = coords.concat(feature.geometry.coordinates.map(function (p) {
|
||||||
return [p[1], p[0], 0.1];
|
return [p[1], p[0], 0.1];
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var track = L.geoJSON(data['track'], {
|
var track = L.geoJSON(data['features'], {
|
||||||
//style: styleTrack,
|
//style: styleTrack,
|
||||||
onEachFeature: onEachFeature
|
onEachFeature: onEachFeature
|
||||||
}).addTo(map);
|
}).addTo(map);
|
||||||
|
|
||||||
map.fitBounds(track.getBounds());
|
map.fitBounds(track.getBounds());
|
||||||
|
|
||||||
var heat = L.heatLayer(coords);
|
//var heat = L.heatLayer(coords);
|
||||||
L.control.layers(tiles, {"heatmap": heat}).addTo(map);
|
//L.control.layers(tiles, {"heatmap": heat}).addTo(map);
|
||||||
|
|
||||||
var list = $("<ul />");
|
var list = $("<ul />");
|
||||||
var current = {
|
var current = {
|
||||||
"pos": data["boards"][1].coordinate.coordinates
|
"pos": data.properties["boards"][0].coordinate.coordinates
|
||||||
};
|
};
|
||||||
|
var i = 0;
|
||||||
|
while (current.pos == undefined) {
|
||||||
|
i+=1;
|
||||||
|
current.pos = data.properties["boards"][i].coordinate.coordinates;
|
||||||
|
}
|
||||||
console.log(current);
|
console.log(current);
|
||||||
var marker = L.marker([current.pos[1], current.pos[0]]).addTo(map);
|
var marker = L.marker([current.pos[1], current.pos[0]]).addTo(map);
|
||||||
$.each(data["boards"], function (index, entry) {
|
$.each(data.properties["boards"], function (index, entry) {
|
||||||
//console.log(index, entry);
|
//console.log(index, entry);
|
||||||
var item = $("<li>", {class: entry.extra_data.activity_type});
|
var item = $("<li>", {class: entry.extra_data.activity_type});
|
||||||
var container = $("<div>", {class: "board"});
|
var container = $("<div>", {class: "board"});
|
||||||
|
|
@ -95,11 +100,11 @@ function loadData() {
|
||||||
container.appendTo(item);
|
container.appendTo(item);
|
||||||
item.appendTo(list);
|
item.appendTo(list);
|
||||||
});
|
});
|
||||||
current.board = images[data["boards"][1].timestamp];
|
current.board = images[data.properties["boards"][1].timestamp];
|
||||||
list.appendTo(".sequence");
|
list.appendTo(".sequence");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
loadData();
|
loadData();
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,7 @@ KML = """{
|
||||||
"type": "Biogames",
|
"type": "Biogames",
|
||||||
"username": "ba",
|
"username": "ba",
|
||||||
"password": "853451",
|
"password": "853451",
|
||||||
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
|
||||||
},
|
},
|
||||||
"render": [
|
"render": [
|
||||||
"KMLRender"
|
"KMLRender"
|
||||||
|
|
@ -115,7 +115,7 @@ ACTIVITY = """{
|
||||||
"type": "Biogames",
|
"type": "Biogames",
|
||||||
"username": "ba",
|
"username": "ba",
|
||||||
"password": "853451",
|
"password": "853451",
|
||||||
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
|
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
|
||||||
},
|
},
|
||||||
"render": [
|
"render": [
|
||||||
"ActivityMapper"
|
"ActivityMapper"
|
||||||
|
|
@ -157,8 +157,8 @@ KML_geo = """{
|
||||||
|
|
||||||
CONFIGS = {
|
CONFIGS = {
|
||||||
"Biogames": {
|
"Biogames": {
|
||||||
"KML": KML,
|
"ActivityMapper": ACTIVITY,
|
||||||
"ActivityMapper": ACTIVITY
|
"KML": KML
|
||||||
},
|
},
|
||||||
"Geogames": {
|
"Geogames": {
|
||||||
"KML": KML_geo,
|
"KML": KML_geo,
|
||||||
|
|
@ -171,7 +171,9 @@ URLS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
HOSTS = {
|
HOSTS = {
|
||||||
"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
|
#"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
|
||||||
|
#"Biogames": "http://www.biodiv2go.de",
|
||||||
|
"Biogames": "http://biodiv2govm.kinf.wiai.uni-bamberg.de/",
|
||||||
"Geogames": "http://log_data/",
|
"Geogames": "http://log_data/",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
body {
|
body {
|
||||||
background-color: aqua;
|
/* background-color: limegreen;*/
|
||||||
}
|
}
|
||||||
#data{
|
#data{
|
||||||
display: none;
|
display: none;
|
||||||
|
|
|
||||||
|
|
@ -48,25 +48,27 @@ def analyze(config, log_ids, **kwargs):
|
||||||
tmpdir = client.download_files(urls)
|
tmpdir = client.download_files(urls)
|
||||||
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
|
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
|
||||||
|
|
||||||
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'))
|
uid = str(uuid.uuid4())
|
||||||
|
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'), uid=uid)
|
||||||
|
|
||||||
|
results = []
|
||||||
settings = la.parse_settings(config)
|
settings = la.parse_settings(config)
|
||||||
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
|
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
|
||||||
render = RENDERERS[settings.render[0]]() # TODO
|
|
||||||
files = render.render(store.get_all())
|
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
|
||||||
results = []
|
|
||||||
log.error(files)
|
|
||||||
os.mkdir(os.path.join(DATA_PATH, uid))
|
os.mkdir(os.path.join(DATA_PATH, uid))
|
||||||
for file in files:
|
for category in store.get_categories():
|
||||||
try:
|
data = store.get_category(category)
|
||||||
head, tail = os.path.split(file)
|
render = RENDERERS[settings.render[0]]() # TODO
|
||||||
target = os.path.join(DATA_PATH, uid, tail)
|
print(category, type(category))
|
||||||
shutil.move(file, target)
|
files = render.render(data, name=category[1])
|
||||||
results.append(target)
|
log.error(files)
|
||||||
except FileNotFoundError as e:
|
for file in files:
|
||||||
log.exception(e)
|
try:
|
||||||
|
head, tail = os.path.split(file)
|
||||||
|
target = os.path.join(DATA_PATH, uid, tail)
|
||||||
|
shutil.move(file, target)
|
||||||
|
results.append(target)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
log.exception(e)
|
||||||
tmpdir.cleanup()
|
tmpdir.cleanup()
|
||||||
|
|
||||||
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)
|
update_status(kwargs['username'], kwargs['name'], ('done', 'FINISHED'), results=results)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue