Merge pull request #666 from DJ2LS/develop

0.14.0
This commit is contained in:
DJ2LS 2024-02-28 21:19:47 +01:00 committed by GitHub
commit 744ed425c3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 280 additions and 142 deletions

View file

@ -48,6 +48,7 @@ jobs:
brew install portaudio
python -m pip install --upgrade pip
pip3 install pyaudio
export PYTHONPATH=/opt/homebrew/opt/portaudio/lib/:$PYTHONPATH
- name: Install Python dependencies
run: |

View file

@ -2,7 +2,7 @@
"name": "FreeDATA",
"description": "FreeDATA Client application for connecting to FreeDATA server",
"private": true,
"version": "0.13.7-alpha",
"version": "0.14.0-alpha",
"main": "dist-electron/main/index.js",
"scripts": {
"start": "vite",

View file

@ -485,8 +485,8 @@ function quickfill() {
<i class="bi bi-grip-vertical h5"></i>
</button>
<div class="grid-container" style="height: calc(100vh - 51px);">
<div class="grid-stack">
<div class="grid-container z-0" style="height: calc(100vh - 51px);">
<div class="grid-stack z-0">
<div
v-for="(w, indexs) in items"
class="grid-stack-item"

View file

@ -190,6 +190,12 @@ export function eventDispatcher(data) {
100;
stateStore.arq_total_bytes =
data["arq-transfer-outbound"].received_bytes;
stateStore.arq_speed_list_timestamp =
data["arq-transfer-outbound"].statistics.time_histogram;
stateStore.arq_speed_list_bpm =
data["arq-transfer-outbound"].statistics.bpm_histogram;
stateStore.arq_speed_list_snr =
data["arq-transfer-outbound"].statistics.snr_histogram;
return;
case "ABORTING":
@ -232,6 +238,13 @@ export function eventDispatcher(data) {
stateStore.dxcallsign = data["arq-transfer-inbound"].dxcall;
stateStore.arq_transmission_percent = 0;
stateStore.arq_total_bytes = 0;
stateStore.arq_speed_list_timestamp =
data["arq-transfer-inbound"].statistics.time_histogram;
stateStore.arq_speed_list_bpm =
data["arq-transfer-inbound"].statistics.bpm_histogram;
stateStore.arq_speed_list_snr =
data["arq-transfer-inbound"].statistics.snr_histogram;
return;
case "OPEN_ACK_SENT":

View file

@ -53,6 +53,7 @@ export const useStateStore = defineStore("stateStore", () => {
var arq_speed_list_bpm = ref([]);
var arq_speed_list_snr = ref([]);
/* TODO Those 3 can be removed I guess , DJ2LS*/
var arq_seconds_until_finish = ref();
var arq_seconds_until_timeout = ref();
var arq_seconds_until_timeout_percent = ref();

View file

@ -1,5 +1,16 @@
import re
def validate_remote_config(config):
if not config:
return
mygrid = config["STATION"]["mygrid"]
if len(mygrid) != 6:
raise ValueError(f"Gridsquare must be 6 characters!")
return True
def validate_freedata_callsign(callsign):
#regexp = "^[a-zA-Z]+\d+\w+-\d{1,2}$"
regexp = "^[A-Za-z0-9]{1,7}-[0-9]{1,3}$" # still broken - we need to allow all ssids form 0 - 255

View file

@ -52,23 +52,23 @@ class ARQDataTypeHandler:
return session_type
return None
def dispatch(self, type_byte: int, data: bytearray):
def dispatch(self, type_byte: int, data: bytearray, statistics: dict):
session_type = self.get_session_type_from_value(type_byte)
self.state_manager.setARQ(False)
if session_type and session_type in self.handlers and 'handle' in self.handlers[session_type]:
return self.handlers[session_type]['handle'](data)
return self.handlers[session_type]['handle'](data, statistics)
else:
self.log(f"Unknown handling endpoint for type: {type_byte}", isWarning=True)
def failed(self, type_byte: int, data: bytearray):
def failed(self, type_byte: int, data: bytearray, statistics: dict):
session_type = self.get_session_type_from_value(type_byte)
self.state_manager.setARQ(False)
if session_type in self.handlers and 'failed' in self.handlers[session_type]:
return self.handlers[session_type]['failed'](data)
return self.handlers[session_type]['failed'](data, statistics)
else:
self.log(f"Unknown handling endpoint: {session_type}", isWarning=True)
@ -78,13 +78,13 @@ class ARQDataTypeHandler:
else:
self.log(f"Unknown preparation endpoint: {session_type}", isWarning=True)
def transmitted(self, type_byte: int, data: bytearray):
def transmitted(self, type_byte: int, data: bytearray, statistics: dict):
session_type = self.get_session_type_from_value(type_byte)
self.state_manager.setARQ(False)
if session_type in self.handlers and 'transmitted' in self.handlers[session_type]:
return self.handlers[session_type]['transmitted'](data)
return self.handlers[session_type]['transmitted'](data, statistics)
else:
self.log(f"Unknown handling endpoint: {session_type}", isWarning=True)
@ -97,14 +97,14 @@ class ARQDataTypeHandler:
self.log(f"Preparing uncompressed data: {len(data)} Bytes")
return data
def handle_raw(self, data):
def handle_raw(self, data, statistics):
self.log(f"Handling uncompressed data: {len(data)} Bytes")
return data
def failed_raw(self, data):
def failed_raw(self, data, statistics):
return
def transmitted_raw(self, data):
def transmitted_raw(self, data, statistics):
return data
def prepare_raw_lzma(self, data):
@ -112,15 +112,15 @@ class ARQDataTypeHandler:
self.log(f"Preparing LZMA compressed data: {len(data)} Bytes >>> {len(compressed_data)} Bytes")
return compressed_data
def handle_raw_lzma(self, data):
def handle_raw_lzma(self, data, statistics):
decompressed_data = lzma.decompress(data)
self.log(f"Handling LZMA compressed data: {len(decompressed_data)} Bytes from {len(data)} Bytes")
return decompressed_data
def failed_raw_lzma(self, data):
def failed_raw_lzma(self, data, statistics):
return
def transmitted_raw_lzma(self, data):
def transmitted_raw_lzma(self, data, statistics):
decompressed_data = lzma.decompress(data)
return decompressed_data
@ -129,15 +129,15 @@ class ARQDataTypeHandler:
self.log(f"Preparing GZIP compressed data: {len(data)} Bytes >>> {len(compressed_data)} Bytes")
return compressed_data
def handle_raw_gzip(self, data):
def handle_raw_gzip(self, data, statistics):
decompressed_data = gzip.decompress(data)
self.log(f"Handling GZIP compressed data: {len(decompressed_data)} Bytes from {len(data)} Bytes")
return decompressed_data
def failed_raw_gzip(self, data):
def failed_raw_gzip(self, data, statistics):
return
def transmitted_raw_gzip(self, data):
def transmitted_raw_gzip(self, data, statistics):
decompressed_data = gzip.decompress(data)
return decompressed_data
@ -146,19 +146,19 @@ class ARQDataTypeHandler:
self.log(f"Preparing LZMA compressed P2PMSG data: {len(data)} Bytes >>> {len(compressed_data)} Bytes")
return compressed_data
def handle_p2pmsg_lzma(self, data):
def handle_p2pmsg_lzma(self, data, statistics):
decompressed_data = lzma.decompress(data)
self.log(f"Handling LZMA compressed P2PMSG data: {len(decompressed_data)} Bytes from {len(data)} Bytes")
message_received(self.event_manager, self.state_manager, decompressed_data)
message_received(self.event_manager, self.state_manager, decompressed_data, statistics)
return decompressed_data
def failed_p2pmsg_lzma(self, data):
def failed_p2pmsg_lzma(self, data, statistics):
decompressed_data = lzma.decompress(data)
self.log(f"Handling failed LZMA compressed P2PMSG data: {len(decompressed_data)} Bytes from {len(data)} Bytes", isWarning=True)
message_failed(self.event_manager, self.state_manager, decompressed_data)
message_failed(self.event_manager, self.state_manager, decompressed_data, statistics)
return decompressed_data
def transmitted_p2pmsg_lzma(self, data):
def transmitted_p2pmsg_lzma(self, data, statistics):
decompressed_data = lzma.decompress(data)
message_transmitted(self.event_manager, self.state_manager, decompressed_data)
message_transmitted(self.event_manager, self.state_manager, decompressed_data, statistics)
return decompressed_data

View file

@ -1,3 +1,4 @@
import datetime
import queue, threading
import codec2
import data_frame_factory
@ -44,6 +45,8 @@ class ARQSession():
self.modem = modem
self.speed_level = 0
self.previous_speed_level = 0
self.frames_per_burst = 1
self.frame_factory = data_frame_factory.DataFrameFactory(self.config)
@ -55,6 +58,11 @@ class ARQSession():
self.session_ended = 0
self.session_max_age = 500
# histogram lists for storing statistics
self.snr_histogram = []
self.bpm_histogram = []
self.time_histogram = []
def log(self, message, isWarning = False):
msg = f"[{type(self).__name__}][id={self.id}][state={self.state}]: {message}"
logger = self.logger.warn if isWarning else self.logger.info
@ -84,7 +92,7 @@ class ARQSession():
)
def set_details(self, snr, frequency_offset):
self.snr.append(snr)
self.snr = snr
self.frequency_offset = frequency_offset
def on_frame_received(self, frame):
@ -96,8 +104,7 @@ class ARQSession():
action_name = self.STATE_TRANSITION[self.state][frame_type]
received_data, type_byte = getattr(self, action_name)(frame)
if isinstance(received_data, bytearray) and isinstance(type_byte, int):
self.arq_data_type_handler.dispatch(type_byte, received_data)
self.arq_data_type_handler.dispatch(type_byte, received_data, self.update_histograms(len(received_data), len(received_data)))
return
self.log(f"Ignoring unknown transition from state {self.state.name} with frame {frame['frame_type']}")
@ -109,22 +116,49 @@ class ARQSession():
return False
def calculate_session_duration(self):
if self.session_ended == 0:
return time.time() - self.session_started
return self.session_ended - self.session_started
def calculate_session_statistics(self):
def calculate_session_statistics(self, confirmed_bytes, total_bytes):
duration = self.calculate_session_duration()
total_bytes = self.total_length
#total_bytes = self.total_length
# self.total_length
duration_in_minutes = duration / 60 # Convert duration from seconds to minutes
# Calculate bytes per minute
if duration_in_minutes > 0:
bytes_per_minute = int(total_bytes / duration_in_minutes)
bytes_per_minute = int(confirmed_bytes / duration_in_minutes)
else:
bytes_per_minute = 0
# Convert histograms lists to dictionaries
time_histogram_dict = {i: timestamp for i, timestamp in enumerate(self.time_histogram)}
snr_histogram_dict = {i: snr for i, snr in enumerate(self.snr_histogram)}
bpm_histogram_dict = {i: bpm for i, bpm in enumerate(self.bpm_histogram)}
return {
'total_bytes': total_bytes,
'duration': duration,
'bytes_per_minute': bytes_per_minute
}
'total_bytes': total_bytes,
'duration': duration,
'bytes_per_minute': bytes_per_minute,
'time_histogram': time_histogram_dict,
'snr_histogram': snr_histogram_dict,
'bpm_histogram': bpm_histogram_dict,
}
def update_histograms(self, confirmed_bytes, total_bytes):
stats = self.calculate_session_statistics(confirmed_bytes, total_bytes)
self.snr_histogram.append(self.snr)
self.bpm_histogram.append(stats['bytes_per_minute'])
self.time_histogram.append(datetime.datetime.now().isoformat())
return stats
def get_appropriate_speed_level(self, snr):
# Start with the lowest speed level as default
# In case of a not fitting SNR, we return the lowest speed level
appropriate_speed_level = min(self.SPEED_LEVEL_DICT.keys())
for level, details in self.SPEED_LEVEL_DICT.items():
if snr >= details['min_snr'] and level > appropriate_speed_level:
appropriate_speed_level = level
return appropriate_speed_level

View file

@ -76,13 +76,8 @@ class ARQSessionIRS(arq_session.ARQSession):
self.received_bytes = 0
self.received_crc = None
self.transmitted_acks = 0
self.abort = False
def set_decode_mode(self):
self.modem.demodulator.set_decode_mode(self.get_mode_by_speed_level(self.speed_level))
def all_data_received(self):
return self.total_length == self.received_bytes
@ -110,7 +105,7 @@ class ARQSessionIRS(arq_session.ARQSession):
self.id,
self.dxcall,
self.version,
self.snr[0], flag_abort=self.abort)
self.snr, flag_abort=self.abort)
self.launch_transmit_and_wait(ack_frame, self.TIMEOUT_CONNECT, mode=FREEDV_MODE.signalling)
if not self.abort:
self.set_state(IRS_State.OPEN_ACK_SENT)
@ -124,14 +119,13 @@ class ARQSessionIRS(arq_session.ARQSession):
self.dx_snr.append(info_frame['snr'])
self.type_byte = info_frame['type']
self.calibrate_speed_settings()
self.log(f"New transfer of {self.total_length} bytes")
self.event_manager.send_arq_session_new(False, self.id, self.dxcall, self.total_length, self.state.name)
self.calibrate_speed_settings()
self.set_decode_mode()
info_ack = self.frame_factory.build_arq_session_info_ack(
self.id, self.total_crc, self.snr[0],
self.id, self.total_crc, self.snr,
self.speed_level, self.frames_per_burst, flag_abort=self.abort)
self.launch_transmit_and_wait(info_ack, self.TIMEOUT_CONNECT, mode=FREEDV_MODE.signalling)
if not self.abort:
@ -157,23 +151,26 @@ class ARQSessionIRS(arq_session.ARQSession):
self.received_bytes += len(data_part)
self.log(f"Received {self.received_bytes}/{self.total_length} bytes")
self.event_manager.send_arq_session_progress(
False, self.id, self.dxcall, self.received_bytes, self.total_length, self.state.name)
False, self.id, self.dxcall, self.received_bytes, self.total_length, self.state.name, self.calculate_session_statistics(self.received_bytes, self.total_length))
return True
def receive_data(self, burst_frame):
self.process_incoming_data(burst_frame)
self.calibrate_speed_settings()
# update statistics
self.update_histograms(self.received_bytes, self.total_length)
if not self.all_data_received():
self.calibrate_speed_settings(burst_frame=burst_frame)
ack = self.frame_factory.build_arq_burst_ack(
self.id, self.received_bytes,
self.speed_level, self.frames_per_burst, self.snr[0], flag_abort=self.abort)
self.id,
self.received_bytes,
self.speed_level,
self.frames_per_burst,
self.snr,
flag_abort=self.abort
)
self.set_decode_mode()
# increase ack counter
# self.transmitted_acks += 1
self.set_state(IRS_State.BURST_REPLY_SENT)
self.launch_transmit_and_wait(ack, self.TIMEOUT_DATA, mode=FREEDV_MODE.signalling)
return None, None
@ -184,7 +181,7 @@ class ARQSessionIRS(arq_session.ARQSession):
self.received_bytes,
self.speed_level,
self.frames_per_burst,
self.snr[0],
self.snr,
flag_final=True,
flag_checksum=True)
self.transmit_frame(ack, mode=FREEDV_MODE.signalling)
@ -192,7 +189,7 @@ class ARQSessionIRS(arq_session.ARQSession):
self.session_ended = time.time()
self.set_state(IRS_State.ENDED)
self.event_manager.send_arq_session_finished(
False, self.id, self.dxcall, True, self.state.name, data=self.received_data, statistics=self.calculate_session_statistics())
False, self.id, self.dxcall, True, self.state.name, data=self.received_data, statistics=self.calculate_session_statistics(self.received_bytes, self.total_length))
return self.received_data, self.type_byte
else:
@ -201,24 +198,53 @@ class ARQSessionIRS(arq_session.ARQSession):
self.received_bytes,
self.speed_level,
self.frames_per_burst,
self.snr[0],
self.snr,
flag_final=True,
flag_checksum=False)
self.transmit_frame(ack, mode=FREEDV_MODE.signalling)
self.log("CRC fail at the end of transmission!")
self.transmission_failed()
return self.transmission_failed()
def calibrate_speed_settings(self):
self.speed_level = 0 # for now stay at lowest speed level
return
# if we have two ACKS, then consider increasing speed level
if self.transmitted_acks >= 2:
self.transmitted_acks = 0
new_speed_level = min(self.speed_level + 1, len(self.SPEED_LEVEL_DICT) - 1)
def calibrate_speed_settings(self, burst_frame=None):
if burst_frame:
received_speed_level = burst_frame['speed_level']
else:
received_speed_level = 0
# check first if the next mode supports the actual snr
if self.snr[0] >= self.SPEED_LEVEL_DICT[new_speed_level]["min_snr"]:
self.speed_level = new_speed_level
latest_snr = self.snr if self.snr else -10
appropriate_speed_level = self.get_appropriate_speed_level(latest_snr)
modes_to_decode = {}
# Log the latest SNR, current, appropriate speed levels, and the previous speed level
self.log(
f"Latest SNR: {latest_snr}, Current Speed Level: {self.speed_level}, Appropriate Speed Level: {appropriate_speed_level}, Previous Speed Level: {self.previous_speed_level}",
isWarning=True)
# Adjust the speed level by one step towards the appropriate level, if needed
if appropriate_speed_level > self.speed_level and self.speed_level < len(self.SPEED_LEVEL_DICT) - 1:
# we need to ensure, the received data is equal to our speed level before changing it
if received_speed_level == self.speed_level:
self.speed_level += 1
elif appropriate_speed_level < self.speed_level and self.speed_level > 0:
# we need to ensure, the received data is equal to our speed level before changing it
if received_speed_level == self.speed_level:
self.speed_level -= 1
# Always decode the current mode
current_mode = self.get_mode_by_speed_level(self.speed_level).value
modes_to_decode[current_mode] = True
# Decode the previous speed level mode
if self.previous_speed_level != self.speed_level:
previous_mode = self.get_mode_by_speed_level(self.previous_speed_level).value
modes_to_decode[previous_mode] = True
self.previous_speed_level = self.speed_level # Update the previous speed level
self.log(f"Modes to Decode: {list(modes_to_decode.keys())}", isWarning=True)
# Apply the new decode mode based on the updated and previous speed levels
self.modem.demodulator.set_decode_mode(modes_to_decode)
return self.speed_level
def abort_transmission(self):
self.log(f"Aborting transmission... setting abort flag")
@ -228,8 +254,9 @@ class ARQSessionIRS(arq_session.ARQSession):
stop_ack = self.frame_factory.build_arq_stop_ack(self.id)
self.launch_transmit_and_wait(stop_ack, self.TIMEOUT_CONNECT, mode=FREEDV_MODE.signalling)
self.set_state(IRS_State.ABORTED)
self.states.setARQ(False)
self.event_manager.send_arq_session_finished(
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics(self.received_bytes, self.total_length))
return None, None
def transmission_failed(self, irs_frame=None):
@ -237,6 +264,6 @@ class ARQSessionIRS(arq_session.ARQSession):
self.session_ended = time.time()
self.set_state(IRS_State.FAILED)
self.log(f"Transmission failed!")
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics())
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics(self.received_bytes, self.total_length))
self.states.setARQ(False)
return None, None

View file

@ -76,8 +76,7 @@ class ARQSessionISS(arq_session.ARQSession):
if len(self.state_manager.arq_iss_sessions) >= 255:
return False
def transmit_wait_and_retry(self, frame_or_burst, timeout, retries, mode):
def transmit_wait_and_retry(self, frame_or_burst, timeout, retries, mode, isARQBurst=False, ):
while retries > 0:
self.event_frame_received = threading.Event()
if isinstance(frame_or_burst, list): burst = frame_or_burst
@ -90,12 +89,19 @@ class ARQSessionISS(arq_session.ARQSession):
return
self.log("Timeout!")
retries = retries - 1
# TODO TEMPORARY TEST FOR SENDING IN LOWER SPEED LEVEL IF WE HAVE TWO FAILED TRANSMISSIONS!!!
if retries == 8 and isARQBurst and self.speed_level > 0:
self.log("SENDING IN FALLBACK SPEED LEVEL", isWarning=True)
self.speed_level = 0
self.send_data({'flag':{'ABORT': False, 'FINAL': False}, 'speed_level': self.speed_level})
return
self.set_state(ISS_State.FAILED)
self.transmission_failed()
def launch_twr(self, frame_or_burst, timeout, retries, mode):
twr = threading.Thread(target = self.transmit_wait_and_retry, args=[frame_or_burst, timeout, retries, mode], daemon=True)
def launch_twr(self, frame_or_burst, timeout, retries, mode, isARQBurst=False):
twr = threading.Thread(target = self.transmit_wait_and_retry, args=[frame_or_burst, timeout, retries, mode, isARQBurst], daemon=True)
twr.start()
def start(self):
@ -105,11 +111,27 @@ class ARQSessionISS(arq_session.ARQSession):
self.launch_twr(session_open_frame, self.TIMEOUT_CONNECT_ACK, self.RETRIES_CONNECT, mode=FREEDV_MODE.signalling)
self.set_state(ISS_State.OPEN_SENT)
def set_speed_and_frames_per_burst(self, frame):
self.speed_level = frame['speed_level']
self.log(f"Speed level set to {self.speed_level}")
self.frames_per_burst = frame['frames_per_burst']
self.log(f"Frames per burst set to {self.frames_per_burst}")
def update_speed_level(self, frame):
self.log("---------------------------------------------------------", isWarning=True)
# Log the received frame for debugging
self.log(f"Received frame: {frame}", isWarning=True)
# Extract the speed_level directly from the frame
if 'speed_level' in frame:
new_speed_level = frame['speed_level']
# Ensure the new speed level is within the allowable range
if 0 <= new_speed_level < len(self.SPEED_LEVEL_DICT):
# Log the speed level change if it's different from the current speed level
if new_speed_level != self.speed_level:
self.log(f"Changing speed level from {self.speed_level} to {new_speed_level}", isWarning=True)
self.speed_level = new_speed_level # Update the current speed level
else:
self.log("Received speed level is the same as the current speed level.", isWarning=True)
else:
self.log(f"Received speed level {new_speed_level} is out of allowable range.", isWarning=True)
else:
self.log("No speed level specified in the received frame.", isWarning=True)
def send_info(self, irs_frame):
# check if we received an abort flag
@ -119,7 +141,7 @@ class ARQSessionISS(arq_session.ARQSession):
info_frame = self.frame_factory.build_arq_session_info(self.id, self.total_length,
helpers.get_crc_32(self.data),
self.snr[0], self.type_byte)
self.snr, self.type_byte)
self.launch_twr(info_frame, self.TIMEOUT_CONNECT_ACK, self.RETRIES_CONNECT, mode=FREEDV_MODE.signalling)
self.set_state(ISS_State.INFO_SENT)
@ -127,14 +149,15 @@ class ARQSessionISS(arq_session.ARQSession):
return None, None
def send_data(self, irs_frame):
# update statistics
self.update_histograms(self.confirmed_bytes, self.total_length)
self.set_speed_and_frames_per_burst(irs_frame)
self.update_speed_level(irs_frame)
if 'offset' in irs_frame:
self.confirmed_bytes = irs_frame['offset']
self.log(f"IRS confirmed {self.confirmed_bytes}/{self.total_length} bytes")
self.event_manager.send_arq_session_progress(
True, self.id, self.dxcall, self.confirmed_bytes, self.total_length, self.state.name)
True, self.id, self.dxcall, self.confirmed_bytes, self.total_length, self.state.name, statistics=self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
# check if we received an abort flag
if irs_frame["flag"]["ABORT"]:
@ -156,9 +179,9 @@ class ARQSessionISS(arq_session.ARQSession):
payload = self.data[offset : offset + payload_size]
data_frame = self.frame_factory.build_arq_burst_frame(
self.SPEED_LEVEL_DICT[self.speed_level]["mode"],
self.id, self.confirmed_bytes, payload)
self.id, self.confirmed_bytes, payload, self.speed_level)
burst.append(data_frame)
self.launch_twr(burst, self.TIMEOUT_TRANSFER, self.RETRIES_CONNECT, mode='auto')
self.launch_twr(burst, self.TIMEOUT_TRANSFER, self.RETRIES_CONNECT, mode='auto', isARQBurst=True)
self.set_state(ISS_State.BURST_SENT)
return None, None
@ -167,10 +190,10 @@ class ARQSessionISS(arq_session.ARQSession):
self.session_ended = time.time()
self.set_state(ISS_State.ENDED)
self.log(f"All data transfered! flag_final={irs_frame['flag']['FINAL']}, flag_checksum={irs_frame['flag']['CHECKSUM']}")
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,True, self.state.name, statistics=self.calculate_session_statistics())
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,True, self.state.name, statistics=self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
self.arq_data_type_handler.transmitted(self.type_byte, self.data, self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
self.state_manager.remove_arq_iss_session(self.id)
self.states.setARQ(False)
self.arq_data_type_handler.transmitted(self.type_byte, self.data)
return None, None
def transmission_failed(self, irs_frame=None):
@ -178,10 +201,10 @@ class ARQSessionISS(arq_session.ARQSession):
self.session_ended = time.time()
self.set_state(ISS_State.FAILED)
self.log(f"Transmission failed!")
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics())
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
self.states.setARQ(False)
self.arq_data_type_handler.failed(self.type_byte, self.data)
self.arq_data_type_handler.failed(self.type_byte, self.data, self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
return None, None
def abort_transmission(self, irs_frame=None):
@ -190,7 +213,7 @@ class ARQSessionISS(arq_session.ARQSession):
self.set_state(ISS_State.ABORTING)
self.event_manager.send_arq_session_finished(
True, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
True, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
# break actual retries
self.event_frame_received.set()
@ -210,7 +233,7 @@ class ARQSessionISS(arq_session.ARQSession):
self.event_frame_received.set()
self.event_manager.send_arq_session_finished(
True, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
True, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics(self.confirmed_bytes, self.total_length))
self.state_manager.remove_arq_iss_session(self.id)
self.states.setARQ(False)
return None, None

View file

@ -15,7 +15,7 @@ class SendMessageCommand(TxCommand):
def set_params_from_api(self, apiParams):
origin = f"{self.config['STATION']['mycall']}-{self.config['STATION']['myssid']}"
self.message = MessageP2P.from_api_params(origin, apiParams)
DatabaseManagerMessages(self.event_manager).add_message(self.message.to_dict(), direction='transmit', status='queued')
DatabaseManagerMessages(self.event_manager).add_message(self.message.to_dict(), statistics={}, direction='transmit', status='queued')
def transmit(self, modem):

View file

@ -3,7 +3,7 @@ modemport = 5000
[STATION]
mycall = AA1AAA
mygrid = AA12aa
mygrid = JN48ea
myssid = 1
ssid_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
enable_explorer = True

View file

@ -144,6 +144,7 @@ class DataFrameFactory:
self.template_list[FR_TYPE.ARQ_BURST_FRAME.value] = {
"frame_length": None,
"session_id": 1,
"speed_level": 1,
"offset": 4,
"data": "dynamic",
}
@ -394,9 +395,10 @@ class DataFrameFactory:
}
return self.construct(FR_TYPE.ARQ_SESSION_INFO_ACK, payload)
def build_arq_burst_frame(self, freedv_mode: codec2.FREEDV_MODE, session_id: int, offset: int, data: bytes):
def build_arq_burst_frame(self, freedv_mode: codec2.FREEDV_MODE, session_id: int, offset: int, data: bytes, speed_level: int):
payload = {
"session_id": session_id.to_bytes(1, 'big'),
"speed_level": speed_level.to_bytes(1, 'big'),
"offset": offset.to_bytes(4, 'big'),
"data": data,
}
@ -415,7 +417,6 @@ class DataFrameFactory:
if flag_abort:
flag = helpers.set_flag(flag, 'ABORT', True, self.ARQ_FLAGS)
payload = {
"session_id": session_id.to_bytes(1, 'big'),
"offset": offset.to_bytes(4, 'big'),

View file

@ -382,15 +382,18 @@ class Demodulator():
for mode in self.MODE_DICT:
codec2.api.freedv_set_sync(self.MODE_DICT[mode]["instance"], 0)
def set_decode_mode(self, mode):
for m in self.MODE_DICT: self.MODE_DICT[m]["decode"] = False
def set_decode_mode(self, modes_to_decode):
# Reset all modes to not decode
for m in self.MODE_DICT:
self.MODE_DICT[m]["decode"] = False
# signalling is always true
self.MODE_DICT[codec2.FREEDV_MODE.signalling.value]["decode"] = True
# Enable mode based on speed_level
self.MODE_DICT[mode.value]["decode"] = True
self.log.info(f"[MDM] [demod_audio] set data mode: {mode.name}")
# lowest speed level is alwys true
self.MODE_DICT[codec2.FREEDV_MODE.datac4.value]["decode"] = True
return
# Enable specified modes
for mode, decode in modes_to_decode.items():
if mode in self.MODE_DICT:
self.MODE_DICT[mode]["decode"] = decode

View file

@ -42,7 +42,10 @@ class EventManager:
}
self.broadcast(event)
def send_arq_session_progress(self, outbound: bool, session_id, dxcall, received_bytes, total_bytes, state):
def send_arq_session_progress(self, outbound: bool, session_id, dxcall, received_bytes, total_bytes, state, statistics=None):
if statistics is None:
statistics = {}
direction = 'outbound' if outbound else 'inbound'
event = {
"type": "arq",
@ -52,6 +55,7 @@ class EventManager:
'received_bytes': received_bytes,
'total_bytes': total_bytes,
'state': state,
'statistics': statistics,
}
}
self.broadcast(event)

View file

@ -9,7 +9,7 @@ Created on 05.11.23
import requests
import threading
import ujson as json
import json
import structlog
import sched
import time

View file

@ -91,7 +91,6 @@ class FrameHandler():
def add_to_heard_stations(self):
frame = self.details['frame']
print(frame)
if 'origin' not in frame:
return

View file

@ -7,23 +7,28 @@ from message_system_db_messages import DatabaseManagerMessages
#import command_message_send
def message_received(event_manager, state_manager, data):
def message_received(event_manager, state_manager, data, statistics):
decompressed_json_string = data.decode('utf-8')
received_message_obj = MessageP2P.from_payload(decompressed_json_string)
received_message_dict = MessageP2P.to_dict(received_message_obj)
DatabaseManagerMessages(event_manager).add_message(received_message_dict, direction='receive', status='received', is_read=False)
DatabaseManagerMessages(event_manager).add_message(received_message_dict, statistics, direction='receive', status='received', is_read=False)
def message_transmitted(event_manager, state_manager, data):
def message_transmitted(event_manager, state_manager, data, statistics):
decompressed_json_string = data.decode('utf-8')
payload_message_obj = MessageP2P.from_payload(decompressed_json_string)
payload_message = MessageP2P.to_dict(payload_message_obj)
# Todo we need to optimize this - WIP
DatabaseManagerMessages(event_manager).update_message(payload_message["id"], update_data={'status': 'transmitted'})
DatabaseManagerMessages(event_manager).update_message(payload_message["id"], update_data={'statistics': statistics})
def message_failed(event_manager, state_manager, data):
def message_failed(event_manager, state_manager, data, statistics):
decompressed_json_string = data.decode('utf-8')
payload_message_obj = MessageP2P.from_payload(decompressed_json_string)
payload_message = MessageP2P.to_dict(payload_message_obj)
# Todo we need to optimize this - WIP
DatabaseManagerMessages(event_manager).update_message(payload_message["id"], update_data={'status': 'failed'})
DatabaseManagerMessages(event_manager).update_message(payload_message["id"], update_data={'statistics': statistics})
class MessageP2P:
def __init__(self, id: str, origin: str, destination: str, body: str, attachments: list) -> None:

View file

@ -11,7 +11,7 @@ class DatabaseManagerMessages(DatabaseManager):
super().__init__(uri)
self.attachments_manager = DatabaseManagerAttachments(uri)
def add_message(self, message_data, direction='receive', status=None, is_read=True):
def add_message(self, message_data, statistics, direction='receive', status=None, is_read=True):
session = self.get_thread_scoped_session()
try:
# Create and add the origin and destination Stations
@ -34,7 +34,8 @@ class DatabaseManagerMessages(DatabaseManager):
direction=direction,
status_id=status.id if status else None,
is_read=is_read,
attempt=0
attempt=0,
statistics=statistics
)
session.add(new_message)
@ -130,6 +131,8 @@ class DatabaseManagerMessages(DatabaseManager):
message.body = update_data['body']
if 'status' in update_data:
message.status = self.get_or_create_status(session, update_data['status'])
if 'statistics' in update_data:
message.statistics = update_data['statistics']
session.commit()
self.log(f"Updated: {message_id}")

View file

@ -65,24 +65,36 @@ class ScheduleManager:
self.scheduler_thread.join()
def transmit_beacon(self):
if not self.state_manager.getARQ() and self.state_manager.is_beacon_running:
cmd = command_beacon.BeaconCommand(self.config, self.state_manager, self.event_manager)
cmd.run(self.event_manager, self.modem)
try:
if not self.state_manager.getARQ() and self.state_manager.is_beacon_running:
cmd = command_beacon.BeaconCommand(self.config, self.state_manager, self.event_manager)
cmd.run(self.event_manager, self.modem)
except Exception as e:
print(e)
def delete_beacons(self):
DatabaseManagerBeacon(self.event_manager).beacon_cleanup_older_than_days(2)
try:
DatabaseManagerBeacon(self.event_manager).beacon_cleanup_older_than_days(2)
except Exception as e:
print(e)
def push_to_explorer(self):
self.config = self.config_manager.read()
if self.config['STATION']['enable_explorer']:
explorer.explorer(self.modem_version, self.config_manager, self.state_manager).push()
try:
explorer.explorer(self.modem_version, self.config_manager, self.state_manager).push()
except Exception as e:
print(e)
def check_for_queued_messages(self):
if not self.state_manager.getARQ():
if DatabaseManagerMessages(self.event_manager).get_first_queued_message():
params = DatabaseManagerMessages(self.event_manager).get_first_queued_message()
command = command_message_send.SendMessageCommand(self.config_manager.read(), self.state_manager, self.event_manager, params)
command.transmit(self.modem)
try:
if first_queued_message := DatabaseManagerMessages(
self.event_manager
).get_first_queued_message():
command = command_message_send.SendMessageCommand(self.config_manager.read(), self.state_manager, self.event_manager, first_queued_message)
command.transmit(self.modem)
except Exception as e:
print(e)
return

View file

@ -9,7 +9,7 @@ import audio
import queue
import service_manager
import state_manager
import ujson as json
import json
import websocket_manager as wsm
import api_validations as validations
import command_cq
@ -29,7 +29,7 @@ app = Flask(__name__)
CORS(app)
CORS(app, resources={r"/*": {"origins": "*"}})
sock = Sock(app)
MODEM_VERSION = "0.13.7-alpha"
MODEM_VERSION = "0.14.0-alpha"
# set config file to use
def set_config():
@ -96,6 +96,9 @@ def index():
@app.route('/config', methods=['GET', 'POST'])
def config():
if request.method in ['POST']:
if not validations.validate_remote_config(request.json):
return api_abort("wrong config", 500)
# check if config already exists
if app.config_manager.read() == request.json:
return api_response(request.json)

View file

@ -1,5 +1,4 @@
import time
import ujson as json
import threading
import numpy as np
class StateManager:

View file

@ -8,7 +8,7 @@ Created on 05.11.23
# pylint: disable=import-outside-toplevel, attribute-defined-outside-init
import requests
import ujson as json
import json
import structlog
log = structlog.get_logger("stats")

View file

@ -5,7 +5,6 @@ PyAudio
pyserial
sounddevice
structlog
ujson
requests
chardet
colorama

View file

@ -144,7 +144,7 @@ class TestARQSession(unittest.TestCase):
self.waitAndCloseChannels()
del cmd
def DisabledtestARQSessionLargePayload(self):
def testARQSessionLargePayload(self):
# set Packet Error Rate (PER) / frame loss probability
self.loss_probability = 0

View file

@ -49,7 +49,7 @@ class TestDataFrameFactory(unittest.TestCase):
offset = 40
payload = b'Hello World!'
frame = self.factory.build_arq_burst_frame(FREEDV_MODE.datac3,
session_id, offset, payload)
session_id, offset, payload, 0)
frame_data = self.factory.deconstruct(frame)
self.assertEqual(frame_data['session_id'], session_id)
self.assertEqual(frame_data['offset'], offset)
@ -58,11 +58,11 @@ class TestDataFrameFactory(unittest.TestCase):
payload = payload * 1000
self.assertRaises(OverflowError, self.factory.build_arq_burst_frame,
FREEDV_MODE.datac3, session_id, offset, payload)
FREEDV_MODE.datac3, session_id, offset, payload, 0)
def testAvailablePayload(self):
avail = self.factory.get_available_data_payload_for_mode(FRAME_TYPE.ARQ_BURST_FRAME, FREEDV_MODE.datac3)
self.assertEqual(avail, 120) # 128 bytes datac3 frame payload - BURST frame overhead
self.assertEqual(avail, 119) # 128 bytes datac3 frame payload - BURST frame overhead
if __name__ == '__main__':
unittest.main()

View file

@ -22,21 +22,21 @@ class TestDispatcher(unittest.TestCase):
# Example usage
example_data = b"Hello FreeDATA!"
formatted_data, type_byte = self.arq_data_type_handler.prepare(example_data, ARQ_SESSION_TYPES.raw)
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data)
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data, statistics={})
self.assertEqual(example_data, dispatched_data)
def testDataTypeHandlerLZMA(self):
# Example usage
example_data = b"Hello FreeDATA!"
formatted_data, type_byte = self.arq_data_type_handler.prepare(example_data, ARQ_SESSION_TYPES.raw_lzma)
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data)
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data, statistics={})
self.assertEqual(example_data, dispatched_data)
def testDataTypeHandlerGZIP(self):
# Example usage
example_data = b"Hello FreeDATA!"
formatted_data, type_byte = self.arq_data_type_handler.prepare(example_data, ARQ_SESSION_TYPES.raw_gzip)
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data)
dispatched_data = self.arq_data_type_handler.dispatch(type_byte, formatted_data, statistics={})
self.assertEqual(example_data, dispatched_data)

View file

@ -37,7 +37,7 @@ class TestDataFrameFactory(unittest.TestCase):
payload = message.to_payload()
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
self.database_manager.add_message(received_message_dict)
self.database_manager.add_message(received_message_dict, statistics={})
result = self.database_manager.get_message_by_id(message.id)
self.assertEqual(result["destination"], message.destination)
@ -53,7 +53,7 @@ class TestDataFrameFactory(unittest.TestCase):
payload = message.to_payload()
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
self.database_manager.add_message(received_message_dict)
self.database_manager.add_message(received_message_dict, statistics={})
result = self.database_manager.get_all_messages()
message_id = result[0]["id"]
@ -75,7 +75,7 @@ class TestDataFrameFactory(unittest.TestCase):
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
print(received_message_dict)
message_id = self.database_manager.add_message(received_message_dict, direction='receive')
message_id = self.database_manager.add_message(received_message_dict, statistics={}, direction='receive')
print(message_id)
self.database_manager.update_message(message_id, {'body' : 'hello123'})
@ -103,7 +103,7 @@ class TestDataFrameFactory(unittest.TestCase):
payload = message.to_payload()
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
message_id = self.database_manager.add_message(received_message_dict)
message_id = self.database_manager.add_message(received_message_dict, statistics={})
result = self.database_manager_attachments.get_attachments_by_message_id(message_id)
attachment_names = [attachment['name'] for attachment in result]
self.assertIn('test1.gif', attachment_names)
@ -116,7 +116,7 @@ class TestDataFrameFactory(unittest.TestCase):
payload = message.to_payload()
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
message_id = self.database_manager.add_message(received_message_dict)
message_id = self.database_manager.add_message(received_message_dict,statistics={},)
self.database_manager.increment_message_attempts(message_id)
@ -129,7 +129,7 @@ class TestDataFrameFactory(unittest.TestCase):
payload = message.to_payload()
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
message_id = self.database_manager.add_message(received_message_dict, is_read=False)
message_id = self.database_manager.add_message(received_message_dict, statistics={},is_read=False)
self.database_manager.mark_message_as_read(message_id)
result = self.database_manager.get_message_by_id(message_id)

View file

@ -60,7 +60,7 @@ class TestDataFrameFactory(unittest.TestCase):
payload = message.to_payload()
received_message = MessageP2P.from_payload(payload)
received_message_dict = MessageP2P.to_dict(received_message)
self.database_manager.add_message(received_message_dict)
self.database_manager.add_message(received_message_dict, statistics={})
self.assertEqual(message.origin, received_message.origin)
self.assertEqual(message.destination, received_message.destination)

View file

@ -48,7 +48,7 @@ class TestIntegration(unittest.TestCase):
self.assertIn('RADIO', config)
def test_config_post(self):
config = {'NETWORK': {'modemport' : 3050}}
config = {'STATION': {'mygrid' : 'JN48ea'}}
r = requests.post(self.url + '/config',
headers={'Content-type': 'application/json'},
data = json.dumps(config))
@ -57,7 +57,7 @@ class TestIntegration(unittest.TestCase):
r = requests.get(self.url + '/config')
self.assertEqual(r.status_code, 200)
config = r.json()
self.assertEqual(config['NETWORK']['modemport'], 3050)
self.assertEqual(config['NETWORK']['modemport'], 5000)
if __name__ == '__main__':
unittest.main()