diff --git a/.gitignore b/.gitignore
index 7f6756c..d91951a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
-alfred.json
-logs/*
\ No newline at end of file
+*.json
+logs/*
+www/logs/*
diff --git a/README.md b/README.md
index 3921163..36e09e3 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,11 @@
gluon-alfred-vis
================
-Lists all nodes in Freifunk by analysing alfred.json
+Lists all nodes in Freifunk by analysing `alfred.json`
-there has to be a file `alfred.json` in the root folder of this krepository
+The file `alfred.json` will be regularly downloaded into the `www` folder of this repository by `cron` that calls the script `update-json.sh`. Therefore copy the `crontab` file into your `/etc/cron.d/` folder and adapt it to your needs.
+
+The files `alfred-log.py` and `update-json.sh` from the `bin` folder have to stay in the same folder for example `/opt/ff/gluon-alfred-vis/bin/`.
+
+ + *git repository:* https://github.com/ffnord/gluon-alfred-vis
+ + *online version Kiel:* http://freifunk.discovibration.de/gluon-alfred-vis/www/alfred.html
+ + *online version Hamburg:* http://freifunk.discovibration.de/ffhh/www/alfred.html
\ No newline at end of file
diff --git a/alfred.html b/alfred.html
deleted file mode 100644
index bc7547a..0000000
--- a/alfred.html
+++ /dev/null
@@ -1,185 +0,0 @@
-
-
-
-
-
diff --git a/bin/alfred-log.py b/bin/alfred-log.py
new file mode 100755
index 0000000..0ed58ad
--- /dev/null
+++ b/bin/alfred-log.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# -*- coding: UTF-8 -*-
+
+"""
+This script splits all nodes in single files under logs/nodes and creates an alfred_offline.json file
+
+Syntax: python alfred-log.py < alfred.json
+"""
+
+import json, sys, os, datetime, time
+
+# timestamp to be added in alfred_offline.json
+timestamp = time.time()
+h_datetime = datetime.datetime.fromtimestamp(timestamp).strftime('%d.%m %H:%M:%S')
+
+try:
+ nodes=json.load(sys.stdin)
+except:
+ sys.exit('No JSON object could be decoded')
+
+folder='logs/nodes/'
+hostnames=[]
+# write all nodes in separate files under logs/nodes/
+for i in nodes:
+ nodes[i]['last_seen_h']=h_datetime
+ nodes[i]['last_seen']=timestamp
+ with open(folder+nodes[i]['hostname'], 'w') as fp:
+ json.dump(nodes[i], fp)
+ fp.close()
+ hostnames.append(nodes[i]['hostname'])
+
+ons=[]
+
+# look for files not in the nodes array
+for on_hostname in os.listdir('logs/nodes'):
+ if not (on_hostname in hostnames):
+ on_fp=open(folder+on_hostname,'r')
+ on_json=on_fp.read()
+ on=json.loads(on_json)
+ if not 'distribution' in on:
+ try:
+ ons.append('"%s":%s' % (on["network"]["mac"], on_json))
+ except:
+ print on
+ sys.exit('ERROR: ["network"]["mac"] not in JSON object')
+
+
+# offline nodes (on):
+offline=open('alfred_offline.json', 'w')
+joined_ons=",".join(ons)
+# if there are no offline nodes, ",".join(ons) should return {}
+if joined_ons=='':
+ joined_ons='"0":{}'
+offline.write('{"additional_data": {"datetime": "%s", "timestamp": "%s"}, %s}' % (h_datetime, timestamp, joined_ons))
+offline.close()
diff --git a/bin/nodes-log.py b/bin/nodes-log.py
new file mode 100755
index 0000000..42a991f
--- /dev/null
+++ b/bin/nodes-log.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# -*- coding: UTF-8 -*-
+
+"""
+This script splits all nodes in single files under logs/nodes and creates an nodes_offline.json file
+
+Syntax: python nodes-log.py < nodes.json
+"""
+
+import json, sys, os, datetime, time
+
+# timestamp to be added in nodes_offline.json
+timestamp = time.time()
+h_datetime = datetime.datetime.fromtimestamp(timestamp).strftime('%d.%m %H:%M:%S')
+
+try:
+ nodes_data=json.load(sys.stdin)
+except:
+ sys.exit('No JSON object could be decoded')
+
+nodes=nodes_data['nodes']
+
+folder='logs/nodes_legacy/'
+hostnames=[]
+# write all online nodes in separate files under logs/nodes_legacy/
+for i in range(0, len(nodes)):
+ if nodes[i]['flags']['online'] and 'flags' in nodes[i] and 'legacy' in nodes[i]['flags'] and nodes[i]['flags']['legacy']:
+ if nodes[i]['name']=='':
+ nodes[i]['name']=nodes[i]['id']
+ nodes[i]['last_seen_h']=h_datetime
+ nodes[i]['last_seen']=timestamp
+ with open(folder+nodes[i]['name'], 'w') as fp:
+ json.dump(nodes[i], fp)
+ fp.close()
+ hostnames.append(nodes[i]['name'])
+
+ons=[]
+
diff --git a/bin/update-json.sh b/bin/update-json.sh
new file mode 100755
index 0000000..7588db3
--- /dev/null
+++ b/bin/update-json.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+# This script downloads the latest alfred.json and optional legacy nodes.json, creates logs by date in the www folder of your allpplication under logs/
+# and calls alfred-log.py that splits all nodes in single files under logs/nodes and creates an alfred_offline.json file
+
+BINPATH="$1"
+WWWPATH="$2"
+URL="$3"
+LEGACY_URL="$4"
+
+# call this script regularly as a cron job to get the latest alfred.json file
+
+if [ $# -ne 3 -a $# -ne 4 ] ; then
+ if [ "$1" != "--help" ]; then
+ echo "ERROR: wrong number of parameters"
+ echo
+ fi
+ echo "Syntax:"
+ echo "$0 /path/to/bin /path/to/www http://freifunk.path/to/.../alfred.json [http://freifunk.path/to/.../nodes.json]"
+ echo
+ echo "the fourth parameter for legacy nodes.json is optional"
+ echo
+ exit 1
+fi
+
+cd "${WWWPATH}"
+
+# get the actual json file from $URL
+curl -o alfred.json "$URL"
+
+mkdir -p logs/nodes
+
+# copy the new file to a backupped version by date for possible analysation use later
+cp alfred.json logs/alfred_$(date +%y%m%d-%H%M%S).json
+
+# call alfred-log.py to split all single nodes under logs/nodes
+${BINPATH}/alfred-log.py < alfred.json
+
+# optional download legacy nodes.json
+if [ "$LEGACY_URL" != "" ]; then
+ curl -o nodes.json "$LEGACY_URL"
+ cp nodes.json logs/nodes_$(date +%y%m%d-%H%M%S).json
+ ${BINPATH}/nodes-log.py < nodes.json
+fi
\ No newline at end of file
diff --git a/crontab b/crontab
new file mode 100644
index 0000000..67f8aa8
--- /dev/null
+++ b/crontab
@@ -0,0 +1,2 @@
+# to download the alfred json file every 10 minutes:
+*/10 * * * * www-data /opt/ff/gluon-alfred-vis/bin/update-json.sh /opt/ff/gluon-alfred-vis/bin /opt/ff/gluon-alfred-vis/www http://.../alfred.json > /dev/null
diff --git a/update-json.sh b/update-json.sh
deleted file mode 100644
index 207eabf..0000000
--- a/update-json.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-# call this script regularly as a cron job to get the latest alfred.json file
-
-wget http://freifunk.in-kiel.de/alfred.json -O alfred.json
-mkdir -p logs
-cp alfred.json logs/alfred_`date "+%y%m%d-%H%M%S"`.json
-
-# to download the alfred json file every 10 minutes add this to your crontab:
-#*/10 * * * * some_user cd /path/to/gluon-alfred-vis; bash update-json.sh > /dev/null
-
diff --git a/www/alfred.html b/www/alfred.html
new file mode 100644
index 0000000..6c91936
--- /dev/null
+++ b/www/alfred.html
@@ -0,0 +1,380 @@
+
+
+
+ FFKI - Alfred JSON Status
+
+
+
+
+
+
+
+
+
+
+