commit uncommited changes

master
Clemens Klug 2019-10-02 13:20:51 +02:00
parent 4a53aa5759
commit 29c04f2fdc
15 changed files with 159 additions and 71 deletions

View File

@ -3,18 +3,19 @@ from typing import List
from .analyzer import Analyzer, Result
from .analyzer.biogames import BoardDurationAnalyzer, SimulationRoundsAnalyzer, ActivationSequenceAnalyzer, \
BiogamesCategorizer, ActivityMapper, BiogamesStore, InstanceConfig, SimulationOrderAnalyzer, SimulationCategorizer, \
SimulationFlagsAnalyzer
SimulationFlagsAnalyzer, GameField_InstanceCategorizer
from .analyzer.default import LogEntryCountAnalyzer, LocationAnalyzer, LogEntrySequenceAnalyzer, ActionSequenceAnalyzer, \
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer
CategorizerStub, Store, ProgressAnalyzer, SimpleCategorizer, WhitelistAnalyzer
from .analyzer.locomotion import LocomotionActionAnalyzer, CacheSequenceAnalyzer
from .analyzer.mask import MaskSpatials
from .render import Render
from .render.biogames import SimulationRoundsRender, BoardDurationHistRender, BoardDurationBoxRender, \
ActivityMapperRender, StoreRender, SimulationOrderRender, SimulationGroupRender
from .render.default import PrintRender, JSONRender, TrackRender, HeatMapRender, LogEntryCountAnalyzerPlot, \
LogEntryCountCSV, KMLRender
LogEntryCountCSV, KMLRender, GeoJSON
from .render.locomotion import LocomotionActionRelativeRender, LocomotionActionAbsoluteRender, \
LocomotionActionRatioRender
from .render.wip import get_all_data, plot_time_space_rel
__FALLBACK__ = PrintRender
__MAPPING__ = {

View File

@ -133,6 +133,23 @@ class BiogamesCategorizer(CategorizerStub): # TODO: refactor
return False
class GameField_InstanceCategorizer(CategorizerStub): # TODO: refactor
__name__ = "BiogamesCategorizer"
def __init__(self, settings: LogSettings):
super().__init__(settings)
def process(self, entry: dict) -> bool:
if self.key is "default":
if entry[self.settings.type_field] in self.settings.custom['instance_start']:
try:
self.key = json_path(entry, self.settings.custom['instance_config_id']) + "_" + entry[self.settings.custom['instance_id']] + "_" + str(entry["timestamp"])
except KeyError as e:
print(entry)
raise e
return False
class ActivityMapper(Analyzer):
__name__ = "ActivityMapper"
classes = {
@ -158,6 +175,7 @@ class ActivityMapper(Analyzer):
self.last_board = {}
self.last_board_type = "other"
self.last_coordinate = None
self.last_timestamp = None
self.tracks = []
self.track = None
self.instance_config_id: str = None
@ -173,21 +191,28 @@ class ActivityMapper(Analyzer):
if board["extra_data"]["activity_type"] == "simu":
board["image"] = "simu.png"
continue
try:
local_file = download_board(board["board_id"], self.instance_config_id, board["sequence_id"],
self.settings.source)
if local_file:
board['image'] = local_file
else:
raise ValueError
except Exception as e:
board['image'] = "ERROR_FETCHING_FILE"
logger.error("error downloading board! %s %s %s", self.instance_config_id, board["sequence_id"],
board["board_id"])
logger.exception(e)
else:
board["image"] = "map.png"
store.add(Result(type(self), {
"type": "FeatureCollection",
"features": self.tracks,
"properties": {
"instance": self.instance_config_id,
"track": self.tracks,
"boards": self.timeline,
"colors": self.colors,
},
}))
def process(self, entry: dict) -> bool:
@ -242,18 +267,24 @@ class ActivityMapper(Analyzer):
def add_location(self, entry):
coordinates = json_path(entry, self.settings.custom['coordinates'])
self.track['coordinates'].append(coordinates)
self.track["geometry"]['coordinates'].append(coordinates)
self.track['properties']['coordTimes'].append(entry['timestamp']) #FIXME
self.last_coordinate = coordinates
self.last_timestamp = entry['timestamp']
def add_track(self, **props):
self.track['properties'].update(props)
if "activity_type" in self.track['properties'] and self.track['properties']['activity_type'] in self.colors:
if not "stroke" in self.track['properties']:
self.track['properties']['stroke'] = self.colors[self.track['properties']['activity_type']]
self.tracks.append(self.track)
self.track = self.new_track(props['end_timestamp'])
if self.last_coordinate:
self.track['coordinates'].append(self.last_coordinate)
self.track["geometry"]['coordinates'].append(self.last_coordinate)
self.track['properties']['coordTimes'].append(self.last_timestamp)
def new_track(self, timestamp):
return {"type": "LineString", "coordinates": [], "properties": {'start_timestamp': timestamp}}
return {"type": "Feature", "geometry": {"type": "LineString", "coordinates": []}, "properties": {'start_timestamp': timestamp, 'coordTimes': []}}
class BiogamesStore(Store):

View File

@ -5,6 +5,26 @@ from analysis.util import json_path
from . import Result, LogSettings, Analyzer, ResultStore
class WhitelistAnalyzer(Analyzer): #TODO
__name__ = "WhiteList"
def __init__(self, settings: LogSettings) -> None:
super().__init__(settings)
self.key = settings.custom["whitelist"]["key"]
self.filter_objs = settings.custom["whitelist"]["objs"]
self.valid_entries = settings.custom["whitelist"]["valid"]
self.blocked = False
def result(self, store: ResultStore, name=None) -> None:
store.add(Result(type(self), {"blocked": self.blocked}, name=name))
def process(self, entry: dict) -> bool:
if entry[self.settings.type_field] in self.filter_objs:
if not json_path(entry, self.key) in self.valid_entries:
self.blocked = True
return self.blocked
class LocationAnalyzer(Analyzer):
"""
store spatial log entries

View File

@ -41,7 +41,7 @@ class LocomotionActionAnalyzer(Analyzer):
self.current_cache = None
self.last = None
def result(self, store: ResultStore) -> None:
def result(self, store: ResultStore, **kwargs) -> None:
if self.last is not None:
if self.current_cache is None:
self.locomotion.append(self.last - self.cache_time)
@ -50,7 +50,14 @@ class LocomotionActionAnalyzer(Analyzer):
self.last = None
locomotion = sum(self.locomotion)
action = sum(self.actions)
if action== 0:
action = 42
print("Division by zero") #FIXME
total = locomotion + action
if not self.last_timestamp:
self.last_timestamp = 0
if not self.instance_start:
self.instance_start = 0
store.add(Result(type(self), {
'locomotion_sum': locomotion,
'action_sum': action,

View File

@ -155,7 +155,7 @@ class ActivityMapperRender(Render):
files = []
for result in self.filter(results):
data = result.get()
path = os.path.join("/tmp", data['instance'] + "_" + str(name) + ".json")
path = os.path.join("/tmp", data["properties"]['instance'] + "_" + str(name) + ".json")
with open(path, "w") as out:
json.dump(data, out, indent=1)
files.append(path)

View File

@ -61,13 +61,16 @@ class KMLRender(SpatialRender, Render):
long=entry['location']['coordinates'][0])
for entry in result.get()
]
if name:
filename = str(name) + ".kml"
else:
filename = str(result.name) + ".kml"
print(filename)
with open(filename, "w") as out:
out.write(
KML_PATTERN.format(name=str(result.name), coordinates="\n".join(coords), when="\n".join(times)))
with open(filename + ".json", "w") as out:
json.dump(result.get(), out, indent=1)
#with open(filename + ".json", "w") as out: #FIXME: why am I here??
# json.dump(result.get(), out, indent=1)
files.append(filename)
return files
@ -96,6 +99,9 @@ class GeoJSON(SpatialRender, Render):
#print(location)
coordinates.append(location["location"]["coordinates"])
times.append(location["timestamp"])
if name:
filename = str(name) + ".geojson"
else:
filename = str(result.name) + ".geojson"
with open(filename, "w") as out:
self.template["features"][0]["properties"] = {"times": times}

View File

@ -3,6 +3,7 @@ import json
import numpy as np
import analysis.analyzers
from analysis import analyzers
from analysis.util.geo import calc_distance
@ -188,10 +189,11 @@ def get_all_data(store, sort=False, relative=True):
if not log.analysis() == analyzers.ActivityMapper:
continue
result = defaultdict(lambda: defaultdict(lambda: 0))
for i in log.get()['track']:
coords = i['coordinates']
for i in log.get()['features']:
coords = i["geometry"]['coordinates']
print(coords)
if len(coords) > 1:
distance = calc_distance(coords)
distance = calc_distance(json.dumps(i["geometry"]))
else:
distance = 0.0
result["space"][i['properties']['activity_type']] += distance
@ -219,9 +221,9 @@ def get_all_data(store, sort=False, relative=True):
value = percentage
else:
value = total
places[log.get()['instance']].append(value)
places[log.get()["properties"]['instance']].append(value)
simus = defaultdict(lambda: 0)
for item in log.get()['boards']:
for item in log.get()["properties"]['boards']:
if item["extra_data"]["activity_type"] == "simu":
simus[item["board_id"]] += 1
simu_distribution[len(simus)] += 1
@ -381,7 +383,7 @@ def plot_time_space_rel(combined, keys):
print([CONFIG_NAMES[i] if i in CONFIG_NAMES else "---" for i in ids])
# plt.show()
dpi = 200
plt.savefig("speed2.png", dpi=dpi)
plt.savefig("speed2_2019.png", dpi=dpi)

View File

@ -109,7 +109,7 @@ if __name__ == '__main__':
# plot_time_space_rel(combined, keys)
plot_data(combined, wip.keys)
if True:
if False:
def store(x):
pass
@ -214,6 +214,21 @@ if __name__ == '__main__':
print("\nsimplified\n")
summary(stats_simple, stats_filtered_simple, "simplified")
if True:
settings: LogSettings = load_settings("time.json")
# log_ids = src_file("/app/log_data/oeb/oeb_2016_path")
log_ids = src_file("log_data/oeb/oeb_paths_host")
log_ids = src_file("/home/clemens/git/ma/test/src")
log_ids = src_file("/home/clemens/git/ma/project/log_data/neocartographer/index")
# log_ids = log_ids[0:10]
print(log_ids)
store: ResultStore = run_analysis(log_ids, settings, LOADERS, ResultStore(key_index=1))
results = {}
for cat in store.get_categories():
results[cat] = [result.get() for result in store.get_category(cat)]
with open("times_neo.json", "w") as out:
json.dump(results, out, indent=1)
####################
#for cat in store.get_categories():
# render(analyzers.ActivityMapper, store.get_category(cat), name=cat)

View File

@ -6,7 +6,8 @@ from analysis.util import json_path
logger = logging.getLogger(__name__)
def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"):
#def download_board(board_id, instance_config_id, sequence_id, source, path="/data/results/"): #FIXME: I was changed
def download_board(board_id, instance_config_id, sequence_id, source, path="activity/data/results/"):
local_file = os.path.join("static", instance_config_id, sequence_id, board_id)
abs_path = os.path.join(path, local_file)
if os.path.exists(abs_path):

View File

@ -12,7 +12,7 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
try:
loader.load(logfile)
except BaseException as e:
raise RuntimeError(e)
raise RuntimeError(e, logfile)
analyzers: List[Analyzer] = []
log.debug("build analyzers")
for analyzer in settings.analyzers:
@ -28,7 +28,9 @@ def process_log(logfile: str, settings: LogSettings, loaders) -> List[Analyzer]:
return analyzers
def run_analysis(log_ids: list, settings, loaders, result_store=ResultStore()):
def run_analysis(log_ids: list, settings, loaders, result_store=None):
if not result_store:
result_store = ResultStore()
store: ResultStore = result_store
for log_id in log_ids:
log.info("LOG_ID: "+ str(log_id))

View File

@ -1,13 +1,7 @@
<script
src="https://code.jquery.com/jquery-3.2.1.min.js"
integrity="sha256-hwg4gsxgFZhOsEEamdOYGBf13FyQuiTwlAQgxVSNgt4="
crossorigin="anonymous"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"
integrity="sha512-M2wvCLH6DSRazYeZRIm1JnYyh22purTM+FDB5CsyxtQJYeKq83arPe5wgbNmcFXGqiSH2XR8dT/fJISVA1r/zQ=="
crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"
integrity="sha512-lInM/apFSqyy1o6s89K4iQUKg6ppXEgsVxT35HbzUupEVRh2Eu9Wdl4tHj7dZO0s1uvplcYGmt3498TtHq+log=="
crossorigin=""></script>
src="https://code.jquery.com/jquery-3.2.1.min.js"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"/>
<script src="https://unpkg.com/leaflet@1.2.0/dist/leaflet.js"></script>
<script src="https://rawgit.com/Leaflet/Leaflet.heat/gh-pages/dist/leaflet-heat.js"></script>

View File

@ -23,7 +23,7 @@ function loadData() {
function styleTrack(feature) {
var styles = {};
styles.color = data.colors[feature.properties.activity_type];
styles.color = data.properties.colors[feature.properties.activity_type];
return styles;
}
@ -52,30 +52,35 @@ function loadData() {
function onEachFeature(feature, layer) {
layer.setStyle(styleTrack(feature));
layer.on('click', onClick);
if (feature.coordinates.length > 1) {
coords = coords.concat(feature.coordinates.map(function (p) {
if (feature.geometry.coordinates.length > 1) {
coords = coords.concat(feature.geometry.coordinates.map(function (p) {
return [p[1], p[0], 0.1];
}));
}
}
var track = L.geoJSON(data['track'], {
var track = L.geoJSON(data['features'], {
//style: styleTrack,
onEachFeature: onEachFeature
}).addTo(map);
map.fitBounds(track.getBounds());
var heat = L.heatLayer(coords);
L.control.layers(tiles, {"heatmap": heat}).addTo(map);
//var heat = L.heatLayer(coords);
//L.control.layers(tiles, {"heatmap": heat}).addTo(map);
var list = $("<ul />");
var current = {
"pos": data["boards"][1].coordinate.coordinates
"pos": data.properties["boards"][0].coordinate.coordinates
};
var i = 0;
while (current.pos == undefined) {
i+=1;
current.pos = data.properties["boards"][i].coordinate.coordinates;
}
console.log(current);
var marker = L.marker([current.pos[1], current.pos[0]]).addTo(map);
$.each(data["boards"], function (index, entry) {
$.each(data.properties["boards"], function (index, entry) {
//console.log(index, entry);
var item = $("<li>", {class: entry.extra_data.activity_type});
var container = $("<div>", {class: "board"});
@ -95,7 +100,7 @@ function loadData() {
container.appendTo(item);
item.appendTo(list);
});
current.board = images[data["boards"][1].timestamp];
current.board = images[data.properties["boards"][1].timestamp];
list.appendTo(".sequence");
});
}

View File

@ -53,7 +53,7 @@ KML = """{
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
},
"render": [
"KMLRender"
@ -115,7 +115,7 @@ ACTIVITY = """{
"type": "Biogames",
"username": "ba",
"password": "853451",
"host": "http://biogames.potato.kinf.wiai.uni-bamberg.de"
"host": "http://biodiv2govm.kinf.wiai.uni-bamberg.de"
},
"render": [
"ActivityMapper"
@ -157,8 +157,8 @@ KML_geo = """{
CONFIGS = {
"Biogames": {
"KML": KML,
"ActivityMapper": ACTIVITY
"ActivityMapper": ACTIVITY,
"KML": KML
},
"Geogames": {
"KML": KML_geo,
@ -171,7 +171,9 @@ URLS = {
}
HOSTS = {
"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
#"Biogames": "http://biogames.potato.kinf.wiai.uni-bamberg.de",
#"Biogames": "http://www.biodiv2go.de",
"Biogames": "http://biodiv2govm.kinf.wiai.uni-bamberg.de/",
"Geogames": "http://log_data/",
}

View File

@ -1,5 +1,5 @@
body {
background-color: aqua;
/* background-color: limegreen;*/
}
#data{
display: none;

View File

@ -48,17 +48,19 @@ def analyze(config, log_ids, **kwargs):
tmpdir = client.download_files(urls)
log.info(tmpdir.name, list(os.scandir(tmpdir.name)))
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'))
uid = str(uuid.uuid4())
update_status(kwargs['username'], kwargs['name'], ('start', 'RUNNING'), uid=uid)
results = []
settings = la.parse_settings(config)
store = la.run_analysis([p.path for p in os.scandir(tmpdir.name)], settings, la.LOADERS)
render = RENDERERS[settings.render[0]]() # TODO
files = render.render(store.get_all())
uid = str(uuid.uuid4())
results = []
log.error(files)
os.mkdir(os.path.join(DATA_PATH, uid))
for category in store.get_categories():
data = store.get_category(category)
render = RENDERERS[settings.render[0]]() # TODO
print(category, type(category))
files = render.render(data, name=category[1])
log.error(files)
for file in files:
try:
head, tail = os.path.split(file)