mirror of
https://github.com/DJ2LS/FreeDATA
synced 2024-05-14 08:04:33 +00:00
Merge branch 'develop' into dependabot/npm_and_yarn/gui/develop/vite-5.1.3
This commit is contained in:
commit
91941eec7b
10 changed files with 112 additions and 35 deletions
|
@ -5,9 +5,24 @@ import { setActivePinia } from "pinia";
|
|||
import pinia from "../store/index";
|
||||
setActivePinia(pinia);
|
||||
|
||||
import { settingsStore as settings } from "../store/settingsStore.js";
|
||||
import { settingsStore as settings, onChange } from "../store/settingsStore.js";
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<h5>...soon...</h5>
|
||||
<div class="input-group input-group-sm mb-1">
|
||||
<label class="input-group-text w-50">Enable message auto repeat</label>
|
||||
<label class="input-group-text w-50">
|
||||
<div class="form-check form-switch form-check-inline ms-2">
|
||||
<input
|
||||
class="form-check-input"
|
||||
type="checkbox"
|
||||
@change="onChange"
|
||||
v-model="settings.remote.MESSAGES.enable_auto_repeat"
|
||||
/>
|
||||
<label class="form-check-label" for="enableMessagesAutoRepeatSwitch"
|
||||
>Re-send message on beacon</label
|
||||
>
|
||||
</div>
|
||||
</label>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -54,7 +54,6 @@ const defaultConfig = {
|
|||
enable_protocol: false,
|
||||
},
|
||||
MODEM: {
|
||||
enable_fft: false,
|
||||
enable_fsk: false,
|
||||
enable_low_bandwidth_mode: false,
|
||||
respond_to_cq: false,
|
||||
|
@ -98,6 +97,9 @@ const defaultConfig = {
|
|||
tci_ip: "127.0.0.1",
|
||||
tci_port: 0,
|
||||
},
|
||||
MESSAGES: {
|
||||
enable_auto_repeat: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -96,9 +96,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.log(f"Waiting {timeout} seconds...")
|
||||
if not self.event_frame_received.wait(timeout):
|
||||
self.log("Timeout waiting for ISS. Session failed.")
|
||||
self.session_ended = time.time()
|
||||
self.set_state(IRS_State.FAILED)
|
||||
self.event_manager.send_arq_session_finished(False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
self.transmission_failed()
|
||||
|
||||
def launch_transmit_and_wait(self, frame, timeout, mode):
|
||||
thread_wait = threading.Thread(target = self.transmit_and_wait,
|
||||
|
@ -208,11 +206,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
flag_checksum=False)
|
||||
self.transmit_frame(ack, mode=FREEDV_MODE.signalling)
|
||||
self.log("CRC fail at the end of transmission!")
|
||||
self.session_ended = time.time()
|
||||
self.set_state(IRS_State.FAILED)
|
||||
self.event_manager.send_arq_session_finished(
|
||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
return False, False
|
||||
self.transmission_failed()
|
||||
|
||||
def calibrate_speed_settings(self):
|
||||
self.speed_level = 0 # for now stay at lowest speed level
|
||||
|
@ -237,3 +231,12 @@ class ARQSessionIRS(arq_session.ARQSession):
|
|||
self.event_manager.send_arq_session_finished(
|
||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
return None, None
|
||||
|
||||
def transmission_failed(self, irs_frame=None):
|
||||
# final function for failed transmissions
|
||||
self.session_ended = time.time()
|
||||
self.set_state(IRS_State.FAILED)
|
||||
self.log(f"Transmission failed!")
|
||||
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics())
|
||||
self.states.setARQ(False)
|
||||
return None, None
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
[NETWORK]
|
||||
modemport = 3050
|
||||
modemport = 5000
|
||||
|
||||
[STATION]
|
||||
mycall = XX1XXX
|
||||
mycall = AA1AAA
|
||||
mygrid = AA12aa
|
||||
myssid = 6
|
||||
myssid = 1
|
||||
ssid_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
enable_explorer = True
|
||||
enable_stats = True
|
||||
|
@ -55,3 +55,6 @@ rx_buffer_size = 64
|
|||
tx_delay = 200
|
||||
beacon_interval = 300
|
||||
|
||||
[MESSAGES]
|
||||
enable_auto_repeat = False
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ class CONFIG:
|
|||
'control': str,
|
||||
'serial_port': str,
|
||||
'model_id': int,
|
||||
'serial_port': str,
|
||||
'serial_speed': int,
|
||||
'data_bits': int,
|
||||
'stop_bits': int,
|
||||
|
@ -67,6 +66,9 @@ class CONFIG:
|
|||
'tx_delay': int,
|
||||
'beacon_interval': int,
|
||||
},
|
||||
'MESSAGES': {
|
||||
'enable_auto_repeat': bool,
|
||||
}
|
||||
}
|
||||
|
||||
default_values = {
|
||||
|
@ -147,9 +149,7 @@ class CONFIG:
|
|||
self.parser.set(section, setting, str(default_value))
|
||||
self.log.info(f"[CFG] Adding missing setting: {section}.{setting}")
|
||||
|
||||
self.write_to_file()
|
||||
|
||||
|
||||
return self.write_to_file()
|
||||
|
||||
# Handle special setting data type conversion
|
||||
# is_writing means data from a dict being writen to the config file
|
||||
|
@ -177,7 +177,6 @@ class CONFIG:
|
|||
def write(self, data):
|
||||
# Validate config data before writing
|
||||
self.validate_data(data)
|
||||
|
||||
for section in data:
|
||||
# init section if it doesn't exist yet
|
||||
if not section.upper() in self.parser.keys():
|
||||
|
@ -186,9 +185,11 @@ class CONFIG:
|
|||
for setting in data[section]:
|
||||
new_value = self.handle_setting(
|
||||
section, setting, data[section][setting], True)
|
||||
self.parser[section][setting] = str(new_value)
|
||||
|
||||
self.write_to_file()
|
||||
try:
|
||||
self.parser[section][setting] = str(new_value)
|
||||
except Exception as e:
|
||||
self.log.error("[CFG] error setting config key", e=e)
|
||||
return self.write_to_file()
|
||||
|
||||
def write_to_file(self):
|
||||
# Write config data to file
|
||||
|
|
|
@ -4,6 +4,7 @@ import data_frame_factory
|
|||
import frame_handler
|
||||
import datetime
|
||||
from message_system_db_beacon import DatabaseManagerBeacon
|
||||
from message_system_db_messages import DatabaseManagerMessages
|
||||
|
||||
|
||||
from message_system_db_manager import DatabaseManager
|
||||
|
@ -15,3 +16,7 @@ class BeaconFrameHandler(frame_handler.FrameHandler):
|
|||
self.details["snr"],
|
||||
self.details['frame']["gridsquare"]
|
||||
)
|
||||
|
||||
if self.config["MESSAGES"]["enable_auto_repeat"]:
|
||||
# set message to queued if beacon received
|
||||
DatabaseManagerMessages(self.event_manager).set_message_to_queued_for_callsign(self.details['frame']["origin"])
|
||||
|
|
|
@ -2,6 +2,9 @@ import frame_handler_ping
|
|||
import helpers
|
||||
import data_frame_factory
|
||||
import frame_handler
|
||||
from message_system_db_messages import DatabaseManagerMessages
|
||||
|
||||
|
||||
class CQFrameHandler(frame_handler_ping.PingFrameHandler):
|
||||
|
||||
def should_respond(self):
|
||||
|
@ -14,3 +17,7 @@ class CQFrameHandler(frame_handler_ping.PingFrameHandler):
|
|||
self.details['snr']
|
||||
)
|
||||
self.transmit(qrv_frame)
|
||||
|
||||
if self.config["MESSAGES"]["enable_auto_repeat"]:
|
||||
# set message to queued if CQ received
|
||||
DatabaseManagerMessages(self.event_manager).set_message_to_queued_for_callsign(self.details['frame']["origin"])
|
||||
|
|
|
@ -202,3 +202,43 @@ class DatabaseManagerMessages(DatabaseManager):
|
|||
self.log(f"An error occurred while marking message {message_id} as read: {e}")
|
||||
finally:
|
||||
session.remove()
|
||||
|
||||
def set_message_to_queued_for_callsign(self, callsign):
|
||||
session = self.get_thread_scoped_session()
|
||||
try:
|
||||
# Find the 'failed' status object
|
||||
failed_status = session.query(Status).filter_by(name='failed').first()
|
||||
# Find the 'queued' status object
|
||||
queued_status = session.query(Status).filter_by(name='queued').first()
|
||||
|
||||
# Ensure both statuses are found
|
||||
if not failed_status or not queued_status:
|
||||
self.log("Failed or queued status not found", isWarning=True)
|
||||
return
|
||||
|
||||
# Query for messages with the specified callsign, 'failed' status, and fewer than 10 attempts
|
||||
messages = session.query(P2PMessage) \
|
||||
.filter(P2PMessage.origin_callsign == callsign) \
|
||||
.filter(P2PMessage.status_id == failed_status.id) \
|
||||
.filter(P2PMessage.attempt < 10) \
|
||||
.all()
|
||||
|
||||
if messages:
|
||||
# Update each message's status to 'queued'
|
||||
for message in messages:
|
||||
# Increment attempt count using the existing function
|
||||
self.increment_message_attempts(message.id)
|
||||
|
||||
message.status_id = queued_status.id
|
||||
self.log(f"Set message {message.id} to queued and incremented attempt")
|
||||
|
||||
session.commit()
|
||||
return {'status': 'success', 'message': f'{len(messages)} message(s) set to queued'}
|
||||
else:
|
||||
return {'status': 'failure', 'message': 'No eligible messages found'}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
self.log(f"An error occurred while setting messages to queued: {e}", isWarning=True)
|
||||
return {'status': 'failure', 'message': str(e)}
|
||||
finally:
|
||||
session.remove()
|
||||
|
|
|
@ -94,10 +94,10 @@ def index():
|
|||
def config():
|
||||
if request.method in ['POST']:
|
||||
set_config = app.config_manager.write(request.json)
|
||||
app.modem_service.put("restart")
|
||||
if not set_config:
|
||||
response = api_response(None, 'error writing config')
|
||||
else:
|
||||
app.modem_service.put("restart")
|
||||
response = api_response(set_config)
|
||||
return response
|
||||
elif request.method == 'GET':
|
||||
|
@ -222,10 +222,11 @@ def post_modem_send_raw_stop():
|
|||
if not app.state_manager.is_modem_running:
|
||||
api_abort('Modem not running', 503)
|
||||
|
||||
for id in app.state_manager.arq_irs_sessions:
|
||||
app.state_manager.arq_irs_sessions[id].abort_transmission()
|
||||
for id in app.state_manager.arq_iss_sessions:
|
||||
app.state_manager.arq_iss_sessions[id].abort_transmission()
|
||||
if app.state_manager.getARQ():
|
||||
for id in app.state_manager.arq_irs_sessions:
|
||||
app.state_manager.arq_irs_sessions[id].abort_transmission()
|
||||
for id in app.state_manager.arq_iss_sessions:
|
||||
app.state_manager.arq_iss_sessions[id].abort_transmission()
|
||||
|
||||
return api_response(request.json)
|
||||
|
||||
|
|
|
@ -130,11 +130,11 @@ class TestARQSession(unittest.TestCase):
|
|||
|
||||
def testARQSessionSmallPayload(self):
|
||||
# set Packet Error Rate (PER) / frame loss probability
|
||||
self.loss_probability = 0
|
||||
self.loss_probability = 30
|
||||
|
||||
self.establishChannels()
|
||||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'dxcall': "AA1AAA-1",
|
||||
'data': base64.b64encode(bytes("Hello world!", encoding="utf-8")),
|
||||
'type': "raw_lzma"
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ class TestARQSession(unittest.TestCase):
|
|||
|
||||
self.establishChannels()
|
||||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'dxcall': "AA1AAA-1",
|
||||
'data': base64.b64encode(np.random.bytes(1000)),
|
||||
'type': "raw_lzma"
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ class TestARQSession(unittest.TestCase):
|
|||
|
||||
self.establishChannels()
|
||||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'dxcall': "AA1AAA-1",
|
||||
'data': base64.b64encode(np.random.bytes(100)),
|
||||
}
|
||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||
|
@ -184,7 +184,7 @@ class TestARQSession(unittest.TestCase):
|
|||
|
||||
self.establishChannels()
|
||||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'dxcall': "AA1AAA-1",
|
||||
'data': base64.b64encode(np.random.bytes(100)),
|
||||
}
|
||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||
|
@ -200,7 +200,7 @@ class TestARQSession(unittest.TestCase):
|
|||
def testSessionCleanupISS(self):
|
||||
|
||||
params = {
|
||||
'dxcall': "XX1XXX-1",
|
||||
'dxcall': "AA1AAA-1",
|
||||
'data': base64.b64encode(np.random.bytes(100)),
|
||||
}
|
||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||
|
|
Loading…
Reference in a new issue