First run reducing number of problems

This commit is contained in:
DJ2LS 2022-05-23 09:37:24 +02:00
parent 9f8f17b633
commit b6face744b
5 changed files with 259 additions and 249 deletions

View file

@ -1,20 +1,18 @@
import atexit import atexit
import json
import multiprocessing import multiprocessing
import sys
import sounddevice as sd import sounddevice as sd
atexit.register(sd._terminate) atexit.register(sd._terminate)
def get_audio_devices(): def get_audio_devices():
""" """
return list of input and output audio devices in own process to avoid crashes of portaudio on raspberry pi return list of input and output audio devices in own process to avoid crashes of portaudio on raspberry pi
also uses a process data manager also uses a process data manager
""" """
# we need to run this on windows for multiprocessing support # we need to run this on Windows for multiprocessing support
# multiprocessing.freeze_support() # multiprocessing.freeze_support()
# multiprocessing.get_context('spawn') # multiprocessing.get_context('spawn')
@ -33,34 +31,34 @@ def get_audio_devices():
return list(proxy_input_devices), list(proxy_output_devices) return list(proxy_input_devices), list(proxy_output_devices)
def fetch_audio_devices(input_devices, output_devices): def fetch_audio_devices(input_devices, output_devices):
""" """
get audio devices from portaudio get audio devices from portaudio
Args: Args:
input_devices: proxy variable for input devices input_devices: proxy variable for input devices
output_devices: proxy variable for outout devices output_devices: proxy variable for output devices
Returns: Returns:
""" """
devices = sd.query_devices(device=None, kind=None) devices = sd.query_devices(device=None, kind=None)
for index, device in enumerate(devices): for index, device in enumerate(devices):
#for i in range(0, p.get_device_count()): # we need to do a try exception, because for windows there's no audio device range
# we need to do a try exception, beacuse for windows theres no audio device range
try: try:
name = device["name"] name = device["name"]
maxOutputChannels = device["max_output_channels"] max_output_channels = device["max_output_channels"]
maxInputChannels = device["max_input_channels"] max_input_channels = device["max_input_channels"]
except Exception as e: except Exception as e:
print(e) print(e)
maxInputChannels = 0 max_input_channels = 0
maxOutputChannels = 0 max_output_channels = 0
name = '' name = ''
if maxInputChannels > 0: if max_input_channels > 0:
input_devices.append({"id": index, "name": name}) input_devices.append({"id": index, "name": name})
if maxOutputChannels > 0: if max_output_channels > 0:
output_devices.append({"id": index, "name": name}) output_devices.append({"id": index, "name": name})

View file

@ -42,6 +42,7 @@ def freedv_get_mode_value_by_name(mode: str) -> int:
""" """
return FREEDV_MODE[mode].value return FREEDV_MODE[mode].value
# Function for returning the mode name # Function for returning the mode name
def freedv_get_mode_name_by_value(mode: int) -> str: def freedv_get_mode_name_by_value(mode: int) -> str:
""" """
@ -54,6 +55,7 @@ def freedv_get_mode_name_by_value(mode: int) -> str:
""" """
return FREEDV_MODE(mode).name return FREEDV_MODE(mode).name
# Check if we are running in a pyinstaller environment # Check if we are running in a pyinstaller environment
if hasattr(sys, "_MEIPASS"): if hasattr(sys, "_MEIPASS"):
sys.path.append(getattr(sys, "_MEIPASS")) sys.path.append(getattr(sys, "_MEIPASS"))
@ -144,6 +146,7 @@ api.FREEDV_MODE_DATAC3 = 12
api.FREEDV_MODE_DATAC0 = 14 api.FREEDV_MODE_DATAC0 = 14
api.FREEDV_MODE_FSK_LDPC = 9 api.FREEDV_MODE_FSK_LDPC = 9
# -------------------------------- FSK LDPC MODE SETTINGS # -------------------------------- FSK LDPC MODE SETTINGS
# Advanced structure for fsk modes # Advanced structure for fsk modes
@ -159,6 +162,7 @@ class ADVANCED(ctypes.Structure):
("codename", ctypes.c_char_p), ("codename", ctypes.c_char_p),
] ]
''' '''
adv.interleave_frames = 0 # max amplitude adv.interleave_frames = 0 # max amplitude
adv.M = 2 # number of fsk tones 2/4 adv.M = 2 # number of fsk tones 2/4
@ -209,6 +213,7 @@ MODEM_STATS_NSPEC = 512
MODEM_STATS_MAX_F_HZ = 4000 MODEM_STATS_MAX_F_HZ = 4000
MODEM_STATS_MAX_F_EST = 4 MODEM_STATS_MAX_F_EST = 4
# Modem stats structure # Modem stats structure
class MODEMSTATS(ctypes.Structure): class MODEMSTATS(ctypes.Structure):
""" """ """ """
@ -231,6 +236,7 @@ class MODEMSTATS(ctypes.Structure):
("fft_buf", (ctypes.c_float * MODEM_STATS_NSPEC * 2)), ("fft_buf", (ctypes.c_float * MODEM_STATS_NSPEC * 2)),
] ]
# Return code flags for freedv_get_rx_status() function # Return code flags for freedv_get_rx_status() function
api.FREEDV_RX_TRIAL_SYNC = 0x1 # demodulator has trial sync api.FREEDV_RX_TRIAL_SYNC = 0x1 # demodulator has trial sync
api.FREEDV_RX_SYNC = 0x2 # demodulator has sync api.FREEDV_RX_SYNC = 0x2 # demodulator has sync
@ -255,6 +261,7 @@ api.rx_sync_flags_to_text = [
"EBS-", "EBS-",
"EBST"] "EBST"]
# Audio buffer --------------------------------------------------------- # Audio buffer ---------------------------------------------------------
class audio_buffer: class audio_buffer:
""" """
@ -262,6 +269,7 @@ class audio_buffer:
made by David Rowe, VK5DGR made by David Rowe, VK5DGR
""" """
# A buffer of int16 samples, using a fixed length numpy array self.buffer for storage # A buffer of int16 samples, using a fixed length numpy array self.buffer for storage
# self.nbuffer is the current number of samples in the buffer # self.nbuffer is the current number of samples in the buffer
def __init__(self, size): def __init__(self, size):
@ -304,14 +312,16 @@ class audio_buffer:
assert self.nbuffer >= 0 assert self.nbuffer >= 0
self.mutex.release() self.mutex.release()
# Resampler --------------------------------------------------------- # Resampler ---------------------------------------------------------
api.FDMDV_OS_48 = int(6) # oversampling rate api.FDMDV_OS_48 = 6 # oversampling rate
api.FDMDV_OS_TAPS_48K = int(48) # number of OS filter taps at 48kHz api.FDMDV_OS_TAPS_48K = 48 # number of OS filter taps at 48kHz
api.FDMDV_OS_TAPS_48_8K = int(api.FDMDV_OS_TAPS_48K/api.FDMDV_OS_48) # number of OS filter taps at 8kHz api.FDMDV_OS_TAPS_48_8K = api.FDMDV_OS_TAPS_48K // api.FDMDV_OS_48 # number of OS filter taps at 8kHz
api.fdmdv_8_to_48_short.argtype = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] api.fdmdv_8_to_48_short.argtype = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int]
api.fdmdv_48_to_8_short.argtype = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] api.fdmdv_48_to_8_short.argtype = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int]
class resampler: class resampler:
""" """
Re-sampler class Re-sampler class
@ -375,7 +385,7 @@ class resampler:
# In C: pin8=&in8_mem[MEM8] # In C: pin8=&in8_mem[MEM8]
pin8 = ctypes.byref(np.ctypeslib.as_ctypes(in8_mem), 2 * self.MEM8) pin8 = ctypes.byref(np.ctypeslib.as_ctypes(in8_mem), 2 * self.MEM8)
out48 = np.zeros(api.FDMDV_OS_48 * len(in8), dtype=np.int16) out48 = np.zeros(api.FDMDV_OS_48 * len(in8), dtype=np.int16)
api.fdmdv_8_to_48_short(out48.ctypes, pin8, len(in8)); api.fdmdv_8_to_48_short(out48.ctypes, pin8, len(in8))
# Store memory for next time # Store memory for next time
self.filter_mem8 = in8_mem[:self.MEM8] self.filter_mem8 = in8_mem[:self.MEM8]

View file

@ -15,8 +15,6 @@ import argparse
import atexit import atexit
import multiprocessing import multiprocessing
import os import os
import queue
import re
import signal import signal
import socketserver import socketserver
import subprocess import subprocess
@ -25,13 +23,11 @@ import threading
import time import time
import crcengine import crcengine
import psutil
import serial.tools.list_ports import serial.tools.list_ports
import structlog import structlog
import ujson as json import ujson as json
import audio import audio
import helpers
import log_handler import log_handler
import sock import sock
import static import static
@ -51,9 +47,11 @@ def signal_handler(sig, frame):
sock.CLOSE_SIGNAL = True sock.CLOSE_SIGNAL = True
sys.exit(0) sys.exit(0)
signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGINT, signal_handler)
class DAEMON():
class DAEMON:
""" """
Daemon class Daemon class
@ -215,6 +213,7 @@ class DAEMON():
options.append('--tuning_range_fmax') options.append('--tuning_range_fmax')
options.append(data[20]) options.append(data[20])
# overriding FSK mode # overriding FSK mode
# if data[21] == 'True': # if data[21] == 'True':
# options.append('--fsk') # options.append('--fsk')
@ -304,7 +303,7 @@ class DAEMON():
serialspeed=serialspeed, pttport=pttport, data_bits=data_bits, stop_bits=stop_bits, serialspeed=serialspeed, pttport=pttport, data_bits=data_bits, stop_bits=stop_bits,
handshake=handshake, rigctld_ip=rigctld_ip, rigctld_port = rigctld_port) handshake=handshake, rigctld_ip=rigctld_ip, rigctld_port = rigctld_port)
hamlib_version = rig.hamlib_version # hamlib_version = rig.hamlib_version
hamlib.set_ptt(True) hamlib.set_ptt(True)
pttstate = hamlib.get_ptt() pttstate = hamlib.get_ptt()
@ -327,10 +326,10 @@ class DAEMON():
except Exception as e: except Exception as e:
structlog.get_logger("structlog").error("[DMN] worker: Exception: ", e=e) structlog.get_logger("structlog").error("[DMN] worker: Exception: ", e=e)
# print(e)
if __name__ == '__main__': if __name__ == '__main__':
# we need to run this on windows for multiprocessing support # we need to run this on Windows for multiprocessing support
multiprocessing.freeze_support() multiprocessing.freeze_support()
# --------------------------------------------GET PARAMETER INPUTS # --------------------------------------------GET PARAMETER INPUTS

View file

@ -8,9 +8,7 @@ Created on Sun Dec 27 20:43:40 2020
# pylint: disable=invalid-name, line-too-long, c-extension-no-member # pylint: disable=invalid-name, line-too-long, c-extension-no-member
# pylint: disable=import-outside-toplevel # pylint: disable=import-outside-toplevel
import asyncio
import base64 import base64
import logging
import queue import queue
import sys import sys
import threading import threading
@ -25,7 +23,6 @@ import ujson as json
import codec2 import codec2
import helpers import helpers
import log_handler
import modem import modem
import sock import sock
import static import static
@ -362,11 +359,11 @@ class DATA():
self.enqueue_frame_for_tx(ack_frame, copies=3, repeat_delay=100) self.enqueue_frame_for_tx(ack_frame, copies=3, repeat_delay=100)
def send_retransmit_request_frame(self, freedv): def send_retransmit_request_frame(self, freedv):
# check where a None is in our burst buffer and do frame+1, beacuse lists start at 0 # check where a None is in our burst buffer and do frame+1, because lists start at 0
missing_frames = [frame + 1 for frame, element in enumerate(static.RX_BURST_BUFFER) if element is None] missing_frames = [frame + 1 for frame, element in enumerate(static.RX_BURST_BUFFER) if element is None]
# set n frames per burst to modem # set n frames per burst to modem
# this is an idea so its not getting lost.... # this is an idea, so it's not getting lost....
# we need to work on this # we need to work on this
codec2.api.freedv_set_frames_per_burst(freedv,len(missing_frames)) codec2.api.freedv_set_frames_per_burst(freedv,len(missing_frames))
@ -490,7 +487,7 @@ class DATA():
# Here we are going to search for our data in the last received bytes. # Here we are going to search for our data in the last received bytes.
# This reduces the chance we will lose the entire frame in the case of signalling frame loss # This reduces the chance we will lose the entire frame in the case of signalling frame loss
# static.RX_FRAME_BUFFER --> exisitng data # static.RX_FRAME_BUFFER --> existing data
# temp_burst_buffer --> new data # temp_burst_buffer --> new data
# search_area --> area where we want to search # search_area --> area where we want to search
search_area = 510 search_area = 510
@ -504,7 +501,7 @@ class DATA():
static.RX_FRAME_BUFFER = static.RX_FRAME_BUFFER[:search_position + get_position] static.RX_FRAME_BUFFER = static.RX_FRAME_BUFFER[:search_position + get_position]
static.RX_FRAME_BUFFER += temp_burst_buffer static.RX_FRAME_BUFFER += temp_burst_buffer
structlog.get_logger("structlog").warning("[TNC] ARQ | RX | replacing existing buffer data", area=search_area, pos=get_position) structlog.get_logger("structlog").warning("[TNC] ARQ | RX | replacing existing buffer data", area=search_area, pos=get_position)
# if we dont find data n this range, we really have new data and going to replace it # if we don't find data n this range, we really have new data and going to replace it
else: else:
static.RX_FRAME_BUFFER += temp_burst_buffer static.RX_FRAME_BUFFER += temp_burst_buffer
structlog.get_logger("structlog").debug("[TNC] ARQ | RX | appending data to buffer") structlog.get_logger("structlog").debug("[TNC] ARQ | RX | appending data to buffer")
@ -609,7 +606,6 @@ class DATA():
jsondata = {"arq":"received", "uuid" : uniqueid, "timestamp": timestamp, "mycallsign" : str(mycallsign, 'utf-8'), "dxcallsign": str(static.DXCALLSIGN, 'utf-8'), "dxgrid": str(static.DXGRID, 'utf-8'), "data": base64_data} jsondata = {"arq":"received", "uuid" : uniqueid, "timestamp": timestamp, "mycallsign" : str(mycallsign, 'utf-8'), "dxcallsign": str(static.DXCALLSIGN, 'utf-8'), "dxgrid": str(static.DXGRID, 'utf-8'), "data": base64_data}
json_data_out = json.dumps(jsondata) json_data_out = json.dumps(jsondata)
structlog.get_logger("structlog").debug("[TNC] arq_data_received:", jsondata=jsondata) structlog.get_logger("structlog").debug("[TNC] arq_data_received:", jsondata=jsondata)
# print(jsondata)
sock.SOCKET_QUEUE.put(json_data_out) sock.SOCKET_QUEUE.put(json_data_out)
static.INFO.append("ARQ;RECEIVING;SUCCESS") static.INFO.append("ARQ;RECEIVING;SUCCESS")
@ -710,10 +706,10 @@ class DATA():
structlog.get_logger("structlog").debug("[TNC] FIXED MODE:", mode=data_mode) structlog.get_logger("structlog").debug("[TNC] FIXED MODE:", mode=data_mode)
else: else:
# we are doing a modulo check of transmission retries of the actual burst # we are doing a modulo check of transmission retries of the actual burst
# every 2nd retry which failes, decreases speedlevel by 1. # every 2nd retry which fails, decreases speedlevel by 1.
# as soon as we received an ACK for the current burst, speed_level will increase # as soon as we received an ACK for the current burst, speed_level will increase
# by 1. # by 1.
# the can be optimised by checking the optimal speed level for the current conditions # They can be optimised by checking the optimal speed level for the current conditions
''' '''
if not self.tx_n_retry_of_burst % 2 and self.tx_n_retry_of_burst > 0: if not self.tx_n_retry_of_burst % 2 and self.tx_n_retry_of_burst > 0:
self.speed_level -= 1 self.speed_level -= 1
@ -980,7 +976,7 @@ class DATA():
Returns: Returns:
""" """
# das hier müssen wir checken. Sollte vielleicht in INIT!!! # TODO: we need to check this, maybe placing it to class init
self.datachannel_timeout = False self.datachannel_timeout = False
structlog.get_logger("structlog").info("[TNC] SESSION [" + str(self.mycallsign, 'utf-8') + "]>> <<[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE) structlog.get_logger("structlog").info("[TNC] SESSION [" + str(self.mycallsign, 'utf-8') + "]>> <<[" + str(static.DXCALLSIGN, 'utf-8') + "]", state=static.ARQ_SESSION_STATE)
@ -1028,7 +1024,6 @@ class DATA():
time.sleep(0.01) time.sleep(0.01)
# break if data channel is opened # break if data channel is opened
if static.ARQ_SESSION: if static.ARQ_SESSION:
# eventuell einfach nur return true um die nächste break ebene zu vermeiden?
return True return True
# if static.ARQ_SESSION: # if static.ARQ_SESSION:
# break # break
@ -1233,7 +1228,6 @@ class DATA():
if attempt == self.data_channel_max_retries: if attempt == self.data_channel_max_retries:
static.INFO.append("DATACHANNEL;FAILED") static.INFO.append("DATACHANNEL;FAILED")
structlog.get_logger("structlog").debug("[TNC] arq_open_data_channel:", transmission_uuid=self.transmission_uuid) structlog.get_logger("structlog").debug("[TNC] arq_open_data_channel:", transmission_uuid=self.transmission_uuid)
# print(self.transmission_uuid)
jsondata = {"arq":"transmission", "status" :"failed", "uuid" : self.transmission_uuid, "percent" : static.ARQ_TRANSMISSION_PERCENT, "bytesperminute" : static.ARQ_BYTES_PER_MINUTE} jsondata = {"arq":"transmission", "status" :"failed", "uuid" : self.transmission_uuid, "percent" : static.ARQ_TRANSMISSION_PERCENT, "bytesperminute" : static.ARQ_BYTES_PER_MINUTE}
json_data_out = json.dumps(jsondata) json_data_out = json.dumps(jsondata)
sock.SOCKET_QUEUE.put(json_data_out) sock.SOCKET_QUEUE.put(json_data_out)
@ -1486,7 +1480,7 @@ class DATA():
""" """
Controlling funktion for running a beacon Controlling funktion for running a beacon
Args: Args:
interval:int: self:
Returns: Returns:
@ -1686,7 +1680,7 @@ class DATA():
def calculate_transfer_rate_tx(self, tx_start_of_transmission:float, sentbytes:int, tx_buffer_length:int) -> list: def calculate_transfer_rate_tx(self, tx_start_of_transmission:float, sentbytes:int, tx_buffer_length:int) -> list:
""" """
Calcualte Transferrate for transmission Calculate transfer rate for transmission
Args: Args:
tx_start_of_transmission:float: tx_start_of_transmission:float:
sentbytes:int: sentbytes:int:

View file

@ -27,6 +27,7 @@ def wait(seconds: float) -> bool:
time.sleep(0.01) time.sleep(0.01)
return True return True
def get_crc_8(data) -> bytes: def get_crc_8(data) -> bytes:
"""Author: DJ2LS """Author: DJ2LS
@ -45,6 +46,7 @@ def get_crc_8(data) -> bytes:
crc_data = crc_data.to_bytes(1, byteorder='big') crc_data = crc_data.to_bytes(1, byteorder='big')
return crc_data return crc_data
def get_crc_16(data) -> bytes: def get_crc_16(data) -> bytes:
"""Author: DJ2LS """Author: DJ2LS
@ -63,6 +65,7 @@ def get_crc_16(data) -> bytes:
crc_data = crc_data.to_bytes(2, byteorder='big') crc_data = crc_data.to_bytes(2, byteorder='big')
return crc_data return crc_data
def get_crc_24(data) -> bytes: def get_crc_24(data) -> bytes:
"""Author: DJ2LS """Author: DJ2LS
@ -84,6 +87,7 @@ def get_crc_24(data) -> bytes:
crc_data = crc_data.to_bytes(3, byteorder='big') crc_data = crc_data.to_bytes(3, byteorder='big')
return crc_data return crc_data
def get_crc_32(data: bytes) -> bytes: def get_crc_32(data: bytes) -> bytes:
"""Author: DJ2LS """Author: DJ2LS
@ -102,6 +106,7 @@ def get_crc_32(data: bytes) -> bytes:
crc_data = crc_data.to_bytes(4, byteorder='big') crc_data = crc_data.to_bytes(4, byteorder='big')
return crc_data return crc_data
def add_to_heard_stations(dxcallsign, dxgrid, datatype, snr, offset, frequency): def add_to_heard_stations(dxcallsign, dxgrid, datatype, snr, offset, frequency):
""" """
@ -136,6 +141,7 @@ def add_to_heard_stations(dxcallsign, dxgrid, datatype, snr, offset, frequency):
# item = [dxcallsign, int(time.time())] # item = [dxcallsign, int(time.time())]
# static.HEARD_STATIONS[idx] = item # static.HEARD_STATIONS[idx] = item
def callsign_to_bytes(callsign) -> bytes: def callsign_to_bytes(callsign) -> bytes:
""" """
@ -235,6 +241,7 @@ def bytes_to_callsign(bytestring: bytes) -> bytes:
ssid = ord(bytes(decoded[-1], "utf-8")) ssid = ord(bytes(decoded[-1], "utf-8"))
return bytes(f"{callsign}-{ssid}", "utf-8") return bytes(f"{callsign}-{ssid}", "utf-8")
def check_callsign(callsign:bytes, crc_to_check:bytes): def check_callsign(callsign:bytes, crc_to_check:bytes):
""" """
Funktion to check a crc against a callsign to calculate the ssid by generating crc until we got it Funktion to check a crc against a callsign to calculate the ssid by generating crc until we got it
@ -270,9 +277,10 @@ def check_callsign(callsign:bytes, crc_to_check:bytes):
return [False, ""] return [False, ""]
def encode_grid(grid): def encode_grid(grid):
""" """
@auther: DB1UJ @author: DB1UJ
Args: Args:
grid:string: maidenhead QTH locater [a-r][a-r][0-9][0-9][a-x][a-x] grid:string: maidenhead QTH locater [a-r][a-r][0-9][0-9][a-x][a-x]
Returns: Returns:
@ -305,7 +313,7 @@ def encode_grid(grid):
def decode_grid(b_code_word:bytes): def decode_grid(b_code_word:bytes):
""" """
@auther: DB1UJ @author: DB1UJ
Args: Args:
b_code_word:bytes: 4 bytes with 26 bit valid data LSB b_code_word:bytes: 4 bytes with 26 bit valid data LSB
Returns: Returns:
@ -334,7 +342,7 @@ def decode_grid(b_code_word:bytes):
def encode_call(call): def encode_call(call):
""" """
@auther: DB1UJ @author: DB1UJ
Args: Args:
call:string: ham radio call sign [A-Z,0-9], last char SSID 0-63 call:string: ham radio call sign [A-Z,0-9], last char SSID 0-63
@ -355,9 +363,10 @@ def encode_call(call):
return out_code_word.to_bytes(length=6, byteorder='big') return out_code_word.to_bytes(length=6, byteorder='big')
def decode_call(b_code_word:bytes): def decode_call(b_code_word:bytes):
""" """
@auther: DB1UJ @author: DB1UJ
Args: Args:
b_code_word:bytes: 6 bytes with 6 bits/sign valid data char signs LSB b_code_word:bytes: 6 bytes with 6 bits/sign valid data char signs LSB