Initial commit: gather scripts from different hosts
This commit is contained in:
commit
0d3b5a9c19
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
*/
|
37
Makefile
Normal file
37
Makefile
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
PYTHON=python
|
||||||
|
|
||||||
|
SRCS:=$(wildcard logs/*.csv)
|
||||||
|
IMGS1:=$(patsubst %.csv, %-avg.webp, $(SRCS))
|
||||||
|
IMGS2:=$(patsubst %.csv, %-peak.webp, $(SRCS))
|
||||||
|
IMGS:=$(patsubst logs/%, imgs/%, $(IMGS1) $(IMGS2))
|
||||||
|
|
||||||
|
.PHONY: default
|
||||||
|
all: imgs/ $(IMGS)
|
||||||
|
|
||||||
|
imgs/:
|
||||||
|
mkdir -p $@
|
||||||
|
|
||||||
|
imgs/%-avg.webp: logs/%.csv
|
||||||
|
tmpfile=$$(mktemp /tmp/spec.XXXXX.png) && \
|
||||||
|
$(PYTHON) ./plot_wf.py avg 60 $< $$tmpfile && \
|
||||||
|
convert $$tmpfile -quality 100 $@ && \
|
||||||
|
rm $$tmpfile
|
||||||
|
|
||||||
|
imgs/%-peak.webp: logs/%.csv
|
||||||
|
tmpfile=$$(mktemp /tmp/spec.XXXXX.png) && \
|
||||||
|
$(PYTHON) ./plot_wf.py peak 60 $< $$tmpfile && \
|
||||||
|
convert $$tmpfile -quality 100 $@ && \
|
||||||
|
rm $$tmpfile
|
||||||
|
|
||||||
|
# high-resolution (in time) images, not generated by default
|
||||||
|
|
||||||
|
IMGS_HIGHRES:=$(patsubst %.csv, %-highres.webp, $(SRCS))
|
||||||
|
|
||||||
|
.PHONY: highres
|
||||||
|
highres: imgs/ $(IMGS_HIGHRES)
|
||||||
|
|
||||||
|
imgs/%-highres.webp: logs/%.csv
|
||||||
|
tmpfile=$$(mktemp /tmp/spec.XXXXX.png) && \
|
||||||
|
$(PYTHON) ./plot_wf.py avg 10 $< $$tmpfile && \
|
||||||
|
convert $$tmpfile -quality 100 $@ && \
|
||||||
|
rm $$tmpfile
|
107
plot_wf.py
Executable file
107
plot_wf.py
Executable file
|
@ -0,0 +1,107 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as pp
|
||||||
|
from matplotlib.dates import MinuteLocator, HourLocator, DateFormatter, drange, date2num
|
||||||
|
from matplotlib.ticker import MultipleLocator
|
||||||
|
|
||||||
|
accumulation = sys.argv[1]
|
||||||
|
resolution = int(sys.argv[2])
|
||||||
|
logname = sys.argv[3]
|
||||||
|
outname = sys.argv[4]
|
||||||
|
|
||||||
|
print(f"Accumulation: {accumulation}")
|
||||||
|
print(f"Resolution: {resolution} seconds")
|
||||||
|
print(f"Input Log: {logname}")
|
||||||
|
print(f"Output Image: {outname}")
|
||||||
|
|
||||||
|
accumulate = {
|
||||||
|
"avg": np.mean,
|
||||||
|
"peak": np.max,
|
||||||
|
}[accumulation]
|
||||||
|
|
||||||
|
data = -1000 * np.ones((86400//resolution, 1024), dtype=float)
|
||||||
|
counts = np.zeros(86400//resolution, dtype=int)
|
||||||
|
|
||||||
|
basetime = None
|
||||||
|
last_lineidx = 0
|
||||||
|
accumulated_data = []
|
||||||
|
|
||||||
|
with open(logname, 'r') as logfile:
|
||||||
|
for line in logfile:
|
||||||
|
parts = line.split(";", maxsplit=1)
|
||||||
|
timestamp = float(parts[0])
|
||||||
|
|
||||||
|
if not basetime:
|
||||||
|
basetime = np.floor(timestamp / 86400) * 86400
|
||||||
|
|
||||||
|
lineidx = int((timestamp - basetime) // resolution)
|
||||||
|
if lineidx != last_lineidx:
|
||||||
|
if len(accumulated_data) != 0:
|
||||||
|
# apply the accumulation function
|
||||||
|
data[last_lineidx] = accumulate(np.array(accumulated_data).astype(float), 0)
|
||||||
|
|
||||||
|
accumulated_data = []
|
||||||
|
last_lineidx = lineidx
|
||||||
|
|
||||||
|
tmp_data = np.fromstring(parts[1], sep=";", dtype=int)
|
||||||
|
if tmp_data.size == 1024:
|
||||||
|
accumulated_data.append(tmp_data)
|
||||||
|
else:
|
||||||
|
print(f"Warning: line ignored due to wrong number of elements: has {tmp_data.size}, expected 1024.")
|
||||||
|
|
||||||
|
x = np.linspace(0, 30, data.shape[1])
|
||||||
|
y = np.linspace(basetime, basetime + resolution * data.shape[0])
|
||||||
|
|
||||||
|
extent = [
|
||||||
|
0, # MHz
|
||||||
|
30, # MHz
|
||||||
|
date2num(datetime.utcfromtimestamp(basetime + resolution * data.shape[0])),
|
||||||
|
date2num(datetime.utcfromtimestamp(basetime))]
|
||||||
|
|
||||||
|
fig_x_scale = 0.85
|
||||||
|
fig_y_scale = 0.90
|
||||||
|
fig_x_off = 0.05
|
||||||
|
fig_y_off = 0.05
|
||||||
|
|
||||||
|
dpi = pp.rcParams['figure.dpi'] #get the default dpi value
|
||||||
|
fig_size = (data.shape[1]/dpi/fig_x_scale, data.shape[0]/dpi/fig_y_scale) # convert pixels to DPI
|
||||||
|
|
||||||
|
fix, ax = pp.subplots(figsize=fig_size)
|
||||||
|
|
||||||
|
image = ax.imshow(data, vmin=-100, vmax=0, cmap='inferno',
|
||||||
|
extent=extent)
|
||||||
|
|
||||||
|
ax.set_position(pos=[fig_x_off, fig_y_off, fig_x_scale, fig_y_scale])
|
||||||
|
ax.set_title('Spektrogramm vom ' + datetime.fromtimestamp(basetime).strftime('%Y-%m-%d'))
|
||||||
|
|
||||||
|
xrange = extent[1] - extent[0]
|
||||||
|
yrange = extent[2] - extent[3]
|
||||||
|
|
||||||
|
ax.set_aspect((data.shape[0] / yrange) / (data.shape[1] / xrange))
|
||||||
|
|
||||||
|
ax.yaxis_date()
|
||||||
|
|
||||||
|
ax.yaxis.set_major_locator(HourLocator(range(0, 25, 1)))
|
||||||
|
ax.yaxis.set_minor_locator(MinuteLocator(range(0, 60, 20)))
|
||||||
|
ax.yaxis.set_major_formatter(DateFormatter('%H:%M'))
|
||||||
|
ax.yaxis.set_minor_formatter(DateFormatter('%M'))
|
||||||
|
|
||||||
|
ax.xaxis.set_major_locator(MultipleLocator(1.0))
|
||||||
|
ax.set_xlabel('Frequenz [MHz]')
|
||||||
|
|
||||||
|
ax_top = ax.twiny()
|
||||||
|
ax_top.xaxis.set_major_locator(MultipleLocator(1.0))
|
||||||
|
ax_top.set_xlabel('Frequenz [MHz]')
|
||||||
|
ax_top.set_xlim(ax.get_xlim())
|
||||||
|
ax_top.set_position(pos=[fig_x_off, fig_y_off, fig_x_scale, fig_y_scale])
|
||||||
|
|
||||||
|
cax = pp.axes([fig_x_scale+fig_x_off+0.02, fig_y_off, 0.03, fig_y_scale])
|
||||||
|
pp.colorbar(cax=cax, mappable=image)
|
||||||
|
cax.set_title('dBm')
|
||||||
|
|
||||||
|
pp.savefig(outname)
|
197
plot_wf_longterm.py
Executable file
197
plot_wf_longterm.py
Executable file
|
@ -0,0 +1,197 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from lzma import LZMAFile
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as pp
|
||||||
|
from matplotlib.dates import MinuteLocator, HourLocator, DayLocator, DateFormatter, AutoDateLocator, drange, date2num
|
||||||
|
from matplotlib.ticker import MultipleLocator
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Plots long term waterfalls from KiwiSDR spectrum recordings.")
|
||||||
|
|
||||||
|
parser.add_argument('-a', '--accumulation', choices=['avg_db', 'avg_pwr', 'peak'], default='avg_pwr', help='Accumulation method.')
|
||||||
|
parser.add_argument('-r', '--resolution', type=int, default=600, help='Spectrogram time resolution in seconds.')
|
||||||
|
parser.add_argument('-o', '--output', type=argparse.FileType('wb'), required=True, help='The image output name.')
|
||||||
|
parser.add_argument('input', nargs='+', type=str, help='KiwiSDR spectrum dumps to load (uncompressed or xz-compressed)')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
print(args)
|
||||||
|
|
||||||
|
print(f"Accumulation: {args.accumulation}")
|
||||||
|
print(f"Resolution: {args.resolution} seconds")
|
||||||
|
print(f"Input Logs: {args.input}")
|
||||||
|
print(f"Output Image: {args.output}")
|
||||||
|
|
||||||
|
loaded_data = {} # format: base_timestamp => {count, spectrum data as np.array}
|
||||||
|
|
||||||
|
for inputfilename in args.input:
|
||||||
|
try:
|
||||||
|
if inputfilename[-2:].lower() == "xz":
|
||||||
|
print(f"Loading '{inputfilename}' as XZ stream...")
|
||||||
|
stream = LZMAFile(filename=inputfilename, mode='r')
|
||||||
|
else:
|
||||||
|
print(f"Loading '{inputfilename}' as uncompressed stream...")
|
||||||
|
stream = open(inputfilename, 'r')
|
||||||
|
|
||||||
|
for line in stream:
|
||||||
|
# extract the timestamp
|
||||||
|
parts = line.decode('ascii').split(";", maxsplit=1)
|
||||||
|
timestamp = float(parts[0])
|
||||||
|
|
||||||
|
# calculate which accumulation block this spectrum fits into
|
||||||
|
basetime = (timestamp // args.resolution) * args.resolution
|
||||||
|
|
||||||
|
# split the spectrum data line
|
||||||
|
tmp_data = np.fromstring(parts[1], sep=";", dtype=int)
|
||||||
|
if tmp_data.size != 1024:
|
||||||
|
print(f"Warning: line ignored due to wrong number of elements: has {tmp_data.size}, expected 1024.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# get the block data loaded so far or initialize it
|
||||||
|
if basetime not in loaded_data.keys():
|
||||||
|
if args.accumulation == 'peak':
|
||||||
|
loaded_data[basetime] = {'data': -1000 * np.ones(1024)}
|
||||||
|
else:
|
||||||
|
loaded_data[basetime] = {'count': 0, 'data': np.zeros(1024)}
|
||||||
|
|
||||||
|
block_data = loaded_data[basetime]
|
||||||
|
|
||||||
|
# on the fly accumulation
|
||||||
|
if args.accumulation == 'peak':
|
||||||
|
block_data['data'] = np.max(np.stack([tmp_data, block_data['data']]), axis=0)
|
||||||
|
elif args.accumulation == 'avg_db':
|
||||||
|
block_data['data'] += tmp_data
|
||||||
|
block_data['count'] += 1
|
||||||
|
else: # avg_pwr
|
||||||
|
block_data['data'] += 10**(tmp_data/10) # convert dBm to mW
|
||||||
|
block_data['count'] += 1
|
||||||
|
|
||||||
|
loaded_data[basetime] = block_data
|
||||||
|
|
||||||
|
stream.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error while processing '{inputfilename}': {e}")
|
||||||
|
print("This file’s (remaining) data is skipped.")
|
||||||
|
|
||||||
|
print("Post-processing accumulation...")
|
||||||
|
|
||||||
|
if args.accumulation == 'avg_db':
|
||||||
|
for v in loaded_data.values():
|
||||||
|
v['data'] /= v['count']
|
||||||
|
elif args.accumulation == 'avg_pwr':
|
||||||
|
for v in loaded_data.values():
|
||||||
|
v['count'] = max(1, v['count']) # prevent division by 0
|
||||||
|
v['data'] = 10 * np.log10(v['data'] / v['count']) # convert averaged value back to dBm
|
||||||
|
# peak accumulation does not need finalization
|
||||||
|
|
||||||
|
print(f"Loading completed. {len(loaded_data)} blocks accumulated.")
|
||||||
|
|
||||||
|
# create a contiguous array from the spectrum lines
|
||||||
|
timestamps = loaded_data.keys()
|
||||||
|
|
||||||
|
first_timestamp = min(timestamps)
|
||||||
|
last_timestamp = max(timestamps)
|
||||||
|
|
||||||
|
num_spectrums = int((last_timestamp - first_timestamp) // args.resolution) + 1
|
||||||
|
shape = (num_spectrums, loaded_data[first_timestamp]['data'].size)
|
||||||
|
|
||||||
|
print(f"Reserving an array with shape {shape}...")
|
||||||
|
|
||||||
|
spectrogram = -1000.0 * np.ones(shape, dtype=float)
|
||||||
|
|
||||||
|
print(f"Copying the data in...")
|
||||||
|
|
||||||
|
for k, v in loaded_data.items():
|
||||||
|
idx = int((k - first_timestamp) // args.resolution)
|
||||||
|
|
||||||
|
spectrogram[idx, :] = v['data']
|
||||||
|
|
||||||
|
del loaded_data # not needed anymore
|
||||||
|
|
||||||
|
print("Plotting image...")
|
||||||
|
|
||||||
|
basetime = first_timestamp
|
||||||
|
|
||||||
|
x = np.linspace(0, 30, spectrogram.shape[1])
|
||||||
|
y = np.linspace(basetime, basetime + args.resolution * spectrogram.shape[0])
|
||||||
|
|
||||||
|
extent = [
|
||||||
|
0, # MHz
|
||||||
|
30, # MHz
|
||||||
|
date2num(datetime.utcfromtimestamp(basetime + args.resolution * spectrogram.shape[0])),
|
||||||
|
date2num(datetime.utcfromtimestamp(basetime))]
|
||||||
|
|
||||||
|
pic_w_px = 1024 + 127 + 150
|
||||||
|
|
||||||
|
fig_x_scale = 1024 / pic_w_px
|
||||||
|
fig_x_off = 127 / pic_w_px # from left side
|
||||||
|
|
||||||
|
pic_h_px = spectrogram.shape[0] + 70 + 100
|
||||||
|
|
||||||
|
fig_y_scale = spectrogram.shape[0] / pic_h_px
|
||||||
|
fig_y_off = 70 / pic_h_px # from bottom
|
||||||
|
|
||||||
|
dpi = pp.rcParams['figure.dpi'] #get the default dpi value
|
||||||
|
fig_size = (spectrogram.shape[1]/dpi/fig_x_scale, spectrogram.shape[0]/dpi/fig_y_scale) # convert pixels to DPI
|
||||||
|
|
||||||
|
fix, ax = pp.subplots(figsize=fig_size)
|
||||||
|
|
||||||
|
image = ax.imshow(spectrogram, vmin=-100, vmax=0, cmap='inferno',
|
||||||
|
extent=extent)
|
||||||
|
|
||||||
|
ax.set_position(pos=[fig_x_off, fig_y_off, fig_x_scale, fig_y_scale])
|
||||||
|
ax.set_title('Spektrogramm von '
|
||||||
|
+ datetime.utcfromtimestamp(first_timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
+ ' bis '
|
||||||
|
+ datetime.utcfromtimestamp(last_timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
+ ' UTC')
|
||||||
|
|
||||||
|
xrange = extent[1] - extent[0]
|
||||||
|
yrange = extent[2] - extent[3]
|
||||||
|
|
||||||
|
ax.set_aspect((spectrogram.shape[0] / yrange) / (spectrogram.shape[1] / xrange))
|
||||||
|
|
||||||
|
print("Formatting...")
|
||||||
|
|
||||||
|
ax.yaxis_date()
|
||||||
|
|
||||||
|
#ax.yaxis.set_major_locator(HourLocator(range(0, 25, 1)))
|
||||||
|
#ax.yaxis.set_minor_locator(MinuteLocator(range(0, 60, 20)))
|
||||||
|
#ax.yaxis.set_major_formatter(DateFormatter('%H:%M'))
|
||||||
|
#ax.yaxis.set_minor_formatter(DateFormatter('%M'))
|
||||||
|
|
||||||
|
#loc = AutoDateLocator()
|
||||||
|
#ax.yaxis.set_major_locator(loc)
|
||||||
|
#ax.yaxis.set_major_formatter(DateFormatter('%Y-%m-%d %H:%M'))
|
||||||
|
|
||||||
|
ax.yaxis.set_major_locator(DayLocator(interval=max(1, args.resolution // 600)))
|
||||||
|
ax.yaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
|
||||||
|
|
||||||
|
if args.resolution <= 600:
|
||||||
|
ax.yaxis.set_minor_locator(HourLocator(interval=max(1, args.resolution // 100)))
|
||||||
|
ax.yaxis.set_minor_formatter(DateFormatter('%H'))
|
||||||
|
|
||||||
|
ax.xaxis.set_major_locator(MultipleLocator(1.0))
|
||||||
|
ax.set_xlabel('Frequenz [MHz]')
|
||||||
|
|
||||||
|
ax_top = ax.twiny()
|
||||||
|
ax_top.xaxis.set_major_locator(MultipleLocator(1.0))
|
||||||
|
ax_top.set_xlabel('Frequenz [MHz]')
|
||||||
|
ax_top.set_xlim(ax.get_xlim())
|
||||||
|
ax_top.set_position(pos=[fig_x_off, fig_y_off, fig_x_scale, fig_y_scale])
|
||||||
|
|
||||||
|
cax = pp.axes([fig_x_scale+fig_x_off+0.02, fig_y_off, 0.03, fig_y_scale])
|
||||||
|
pp.colorbar(cax=cax, mappable=image)
|
||||||
|
cax.set_title('dBm')
|
||||||
|
|
||||||
|
print("Saving...")
|
||||||
|
|
||||||
|
pp.savefig(args.output)
|
||||||
|
|
||||||
|
print("All done!")
|
82
record_wf.py
Executable file
82
record_wf.py
Executable file
|
@ -0,0 +1,82 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import websocket
|
||||||
|
import time
|
||||||
|
import numpy as np
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
last_date = None
|
||||||
|
logfile = None
|
||||||
|
|
||||||
|
os.makedirs("logs", exist_ok=True)
|
||||||
|
|
||||||
|
def write_log(dt, line):
|
||||||
|
global last_date, logfile
|
||||||
|
|
||||||
|
date = dt.date()
|
||||||
|
|
||||||
|
# reopen log file if day has changed
|
||||||
|
if last_date != date:
|
||||||
|
if logfile:
|
||||||
|
logfile.close()
|
||||||
|
|
||||||
|
logname = os.path.join("logs", date.strftime("%Y-%m-%d.csv"))
|
||||||
|
print(f"(Re-)opening logfile: {logname}")
|
||||||
|
logfile = open(logname, "a")
|
||||||
|
|
||||||
|
last_date = date
|
||||||
|
|
||||||
|
if line[-1] != '\n':
|
||||||
|
line += '\n'
|
||||||
|
|
||||||
|
logfile.write(line)
|
||||||
|
|
||||||
|
|
||||||
|
last_keepalive = datetime.datetime.now()
|
||||||
|
|
||||||
|
def on_open(wsapp):
|
||||||
|
wsapp.send("SET auth t=kiwi p=#")
|
||||||
|
wsapp.send("SERVER DE CLIENT openwebrx.js W/F")
|
||||||
|
wsapp.send("SET send_dB=1")
|
||||||
|
wsapp.send("SET zoom=0 start=0")
|
||||||
|
wsapp.send("SET maxdb=0 mindb=-100")
|
||||||
|
wsapp.send("SET interp=13")
|
||||||
|
wsapp.send("SET wf_comp=0")
|
||||||
|
wsapp.send("SET wf_speed=1")
|
||||||
|
#wsapp.send("SET MARKER min=0.000 max=30000.000 zoom=0 width=1904")
|
||||||
|
#wsapp.send("SET aper=1 algo=3 param=0.00")
|
||||||
|
|
||||||
|
def on_message(wsapp, message):
|
||||||
|
global last_keepalive
|
||||||
|
|
||||||
|
msgtype = message[:3].decode('ascii')
|
||||||
|
msgdata = message[4:]
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
|
||||||
|
if msgtype == 'MSG':
|
||||||
|
msgstr = msgdata.decode('utf-8')
|
||||||
|
print(f"Status message: {msgstr}")
|
||||||
|
elif msgtype == 'W/F':
|
||||||
|
#print(msgdata)
|
||||||
|
print(f"Received waterfall data: {len(msgdata)} bytes.")
|
||||||
|
#print(f"Header: {msgdata[:12]}")
|
||||||
|
specdata = msgdata[12:]
|
||||||
|
spectrum = np.frombuffer(specdata, dtype=np.uint8).astype(int)
|
||||||
|
spectrum = spectrum - 255 # converts to dBm
|
||||||
|
logline = f"{now.timestamp():.3f};" + ";".join([f"{dBm:d}" for dBm in spectrum]) + "\n"
|
||||||
|
|
||||||
|
write_log(now, logline)
|
||||||
|
|
||||||
|
if now - last_keepalive > datetime.timedelta(seconds=10):
|
||||||
|
wsapp.send("SET keepalive")
|
||||||
|
last_keepalive = now
|
||||||
|
else:
|
||||||
|
print(f"Unknown type: {msgtype} – data: {msgdata}")
|
||||||
|
|
||||||
|
url = f"ws://192.168.75.14:8073/kiwi/{int(time.time())}/W/F"
|
||||||
|
wsapp = websocket.WebSocketApp(url,
|
||||||
|
on_message=on_message,
|
||||||
|
on_open=on_open)
|
||||||
|
wsapp.run_forever()
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
websocket-client
|
Loading…
Reference in a new issue