#!/bin/bash

typeset -i counter batch_counter

batch_size=10000

IFS=","

datei=/tmp/measurements_list
counter=0

influx query 'import "influxdata/influxdb/schema" schema.measurements(bucket: "iobroker/global")' --raw > $datei
sed -i '$d' $datei

while read datenbank result table measurement; do
        if [ $counter -gt 3 ]
          then
	    IFS=","
	    meas=$(echo $measurement|tr -d '\t\r\n')
            echo $measurement
	    query_string="from(bucket:\"iobroker/global\") |> range(start:-20y, stop: now()) |> filter(fn: (r) => r._measurement == \"$meas\") |> filter(fn: (r) => r._field == \"value\") |> drop(columns: [\"ack\", \"from\", \"q\"]) |> sort(columns: [\"_time\"]) |> count() |> drop(columns: [\"_start\", \"_stop\", \"_field\"])"
	    temp="/usr/bin/influx query "\'"${query_string}""' --raw"
	    temp1=`eval "($temp)"`
            temp1=$(head -n 5 <<< $temp1 | tail -n1)
# Anzahl Datenpunkte bestimmen
	    meas_count=$(echo $temp1 | awk '{print $3}')
	    meas_count=$(echo $meas_count|tr -d '\t\r\n')
# Zeitpunkt des ersten Datenpunktes bestimmen
	    query_first_string="from(bucket:\"iobroker/global\") |> range(start:-20y, stop: now()) |> filter(fn: (r) => r._measurement == \"$meas\") |> filter(fn: (r) => r._field == \"value\") |> drop(columns: [\"ack\", \"from\", \"q\"]) |> sort(columns: [\"_time\"]) |> first() |> drop(columns: [\"_start\", \"_stop\", \"_field\"])"
            temp="/usr/bin/influx query "\'"${query_first_string}""' --raw"
	    temp1=`eval "($temp)"`
            temp1=$(head -n 5 <<< $temp1 | tail -n1)
	    meas_first=$(echo $temp1 | awk '{print $2}')
# Zeitpunkt des letzten Datenpunktes bestimmen
            query_last_string="from(bucket:\"iobroker/global\") |> range(start:-20y, stop: now()) |> filter(fn: (r) => r._measurement == \"$meas\") |> filter(fn: (r) => r._field == \"value\") |> drop(columns: [\"ack\", \"from\", \"q\"]) |> sort(columns: [\"_time\"]) |> last() |> drop(columns: [\"_start\", \"_stop\", \"_field\"])"
            temp="/usr/bin/influx query "\'"${query_last_string}""' --raw"
            temp1=`eval "($temp)"`
            temp1=$(head -n 5 <<< $temp1 | tail -n1)
            meas_last=$(echo $temp1 | awk '{print $2}')
            echo "Kopiere Daten vom "$meas_first" bis "$meas_last
            meas_first_sec=$(date +%s --date=$meas_first)
	    meas_last_sec=$(date +%s --date=$meas_last)
	    meas_first_sec=$((meas_first_sec - 1))
	    meas_last_sec=$((meas_last_sec + 1))
	    diff_sec=$((meas_last_sec - meas_first_sec))
	    echo "Gesamt-Zeitraum: "$diff_sec" Sekunden"
            number_batches=$(($meas_count / $batch_size))
	    number_batches=$((number_batches + 1))
	    time_incr_sec=$((diff_sec / number_batches))
            echo "Anzahl Datenpunkte: "$meas_count
	    echo "Anzahl der Batches: "$number_batches
	    echo "Zeitschritt pro Batch: "$time_incr_sec" Sekunden"
	    start_sec=$meas_first_sec
	    end_sec=$((start_sec + time_incr_sec))
	    batch_counter=1
	    IFS="!"
	    while [ $start_sec -lt $meas_last_sec ]; do
		    # Warten auf genug freien Speicher
		    free_mem=$(/usr/bin/free | awk 'NR == 2 { print $7}')
                    while [ $free_mem -lt 700000 ]; do
			    echo "Nur "$free_mem" Byte frei. Warte 10 Sekunden"
                            sleep 10
                            free_mem=$(/usr/bin/free | awk 'NR == 2 { print $7}')
	            done
		    # Hier Daten Senden
                    query_copy_string="from(bucket: \"iobroker/global\") |> range(start: ${start_sec}, stop: ${end_sec}) |> filter(fn: (r) => r._measurement == \"$meas\") |> pivot(rowKey: [\"_time\"], columnKey: [\"_field\"], valueColumn: \"_value\") |> map(fn: (r) => ({r with ack: string(v: r.ack)})) |> map(fn: (r) => ({r with q: string(v: r.q)})) |> map(fn: (r) => ({r with from: string(v: r.from)})) |> fill(column: \"ack\", value: \"false\") |> fill(column: \"q\", value: \"0\") |> fill(column: \"from\", value: \"manual\") |> group(columns: [\"_measurement\", \"ack\", \"from\", \"q\"]) |> set(key: \"_measurement\", value: \"$meas\") |> wideTo(bucket: \"iobroker2/global\")"
                    temp="/usr/bin/influx query "\'"${query_copy_string}""' --raw"
		    temp1=`eval "($temp)"`

		    if [ $((batch_counter % 10)) == 0 ]; then
			    echo "Batches bis " $(date --date="@$end_sec") " erledigt"
	            fi
		    # Nächstes Batch setzen
		    start_sec=$end_sec
                    end_sec=$((start_sec + time_incr_sec))
		    batch_counter=$((batch_counter + 1))
            done
	    IFS=","
        fi
        counter+=1
done < $datei

