mirror of
https://github.com/DJ2LS/FreeDATA
synced 2024-05-14 08:04:33 +00:00
Merge branch 'develop' of github.com:DJ2LS/FreeDATA into develop
This commit is contained in:
commit
d87579f9ac
10 changed files with 113 additions and 33 deletions
|
@ -5,9 +5,24 @@ import { setActivePinia } from "pinia";
|
||||||
import pinia from "../store/index";
|
import pinia from "../store/index";
|
||||||
setActivePinia(pinia);
|
setActivePinia(pinia);
|
||||||
|
|
||||||
import { settingsStore as settings } from "../store/settingsStore.js";
|
import { settingsStore as settings, onChange } from "../store/settingsStore.js";
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<template>
|
<template>
|
||||||
<h5>...soon...</h5>
|
<div class="input-group input-group-sm mb-1">
|
||||||
|
<label class="input-group-text w-50">Enable message auto repeat</label>
|
||||||
|
<label class="input-group-text w-50">
|
||||||
|
<div class="form-check form-switch form-check-inline ms-2">
|
||||||
|
<input
|
||||||
|
class="form-check-input"
|
||||||
|
type="checkbox"
|
||||||
|
@change="onChange"
|
||||||
|
v-model="settings.remote.MESSAGES.enable_auto_repeat"
|
||||||
|
/>
|
||||||
|
<label class="form-check-label" for="enableMessagesAutoRepeatSwitch"
|
||||||
|
>Re-send message on beacon</label
|
||||||
|
>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
|
@ -97,6 +97,9 @@ const defaultConfig = {
|
||||||
tci_ip: "127.0.0.1",
|
tci_ip: "127.0.0.1",
|
||||||
tci_port: 0,
|
tci_port: 0,
|
||||||
},
|
},
|
||||||
|
MESSAGES: {
|
||||||
|
enable_auto_repeat: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -96,9 +96,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
||||||
self.log(f"Waiting {timeout} seconds...")
|
self.log(f"Waiting {timeout} seconds...")
|
||||||
if not self.event_frame_received.wait(timeout):
|
if not self.event_frame_received.wait(timeout):
|
||||||
self.log("Timeout waiting for ISS. Session failed.")
|
self.log("Timeout waiting for ISS. Session failed.")
|
||||||
self.session_ended = time.time()
|
self.transmission_failed()
|
||||||
self.set_state(IRS_State.FAILED)
|
|
||||||
self.event_manager.send_arq_session_finished(False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
|
||||||
|
|
||||||
def launch_transmit_and_wait(self, frame, timeout, mode):
|
def launch_transmit_and_wait(self, frame, timeout, mode):
|
||||||
thread_wait = threading.Thread(target = self.transmit_and_wait,
|
thread_wait = threading.Thread(target = self.transmit_and_wait,
|
||||||
|
@ -208,11 +206,7 @@ class ARQSessionIRS(arq_session.ARQSession):
|
||||||
flag_checksum=False)
|
flag_checksum=False)
|
||||||
self.transmit_frame(ack, mode=FREEDV_MODE.signalling)
|
self.transmit_frame(ack, mode=FREEDV_MODE.signalling)
|
||||||
self.log("CRC fail at the end of transmission!")
|
self.log("CRC fail at the end of transmission!")
|
||||||
self.session_ended = time.time()
|
self.transmission_failed()
|
||||||
self.set_state(IRS_State.FAILED)
|
|
||||||
self.event_manager.send_arq_session_finished(
|
|
||||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
|
||||||
return False, False
|
|
||||||
|
|
||||||
def calibrate_speed_settings(self):
|
def calibrate_speed_settings(self):
|
||||||
self.speed_level = 0 # for now stay at lowest speed level
|
self.speed_level = 0 # for now stay at lowest speed level
|
||||||
|
@ -236,4 +230,13 @@ class ARQSessionIRS(arq_session.ARQSession):
|
||||||
self.set_state(IRS_State.ABORTED)
|
self.set_state(IRS_State.ABORTED)
|
||||||
self.event_manager.send_arq_session_finished(
|
self.event_manager.send_arq_session_finished(
|
||||||
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
def transmission_failed(self, irs_frame=None):
|
||||||
|
# final function for failed transmissions
|
||||||
|
self.session_ended = time.time()
|
||||||
|
self.set_state(IRS_State.FAILED)
|
||||||
|
self.log(f"Transmission failed!")
|
||||||
|
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics())
|
||||||
|
self.states.setARQ(False)
|
||||||
|
return None, None
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
[NETWORK]
|
[NETWORK]
|
||||||
modemport = 3050
|
modemport = 5000
|
||||||
|
|
||||||
[STATION]
|
[STATION]
|
||||||
mycall = XX1XXX
|
mycall = AA1AAA
|
||||||
mygrid = AA12aa
|
mygrid = AA12aa
|
||||||
myssid = 6
|
myssid = 1
|
||||||
ssid_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
ssid_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
enable_explorer = True
|
enable_explorer = True
|
||||||
enable_stats = True
|
enable_stats = True
|
||||||
|
@ -55,3 +55,6 @@ rx_buffer_size = 64
|
||||||
tx_delay = 200
|
tx_delay = 200
|
||||||
beacon_interval = 300
|
beacon_interval = 300
|
||||||
|
|
||||||
|
[MESSAGES]
|
||||||
|
enable_auto_repeat = False
|
||||||
|
|
||||||
|
|
|
@ -66,6 +66,9 @@ class CONFIG:
|
||||||
'tx_delay': int,
|
'tx_delay': int,
|
||||||
'beacon_interval': int,
|
'beacon_interval': int,
|
||||||
},
|
},
|
||||||
|
'MESSAGES': {
|
||||||
|
'enable_auto_repeat': bool,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
default_values = {
|
default_values = {
|
||||||
|
@ -146,9 +149,7 @@ class CONFIG:
|
||||||
self.parser.set(section, setting, str(default_value))
|
self.parser.set(section, setting, str(default_value))
|
||||||
self.log.info(f"[CFG] Adding missing setting: {section}.{setting}")
|
self.log.info(f"[CFG] Adding missing setting: {section}.{setting}")
|
||||||
|
|
||||||
self.write_to_file()
|
return self.write_to_file()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Handle special setting data type conversion
|
# Handle special setting data type conversion
|
||||||
# is_writing means data from a dict being writen to the config file
|
# is_writing means data from a dict being writen to the config file
|
||||||
|
@ -176,7 +177,6 @@ class CONFIG:
|
||||||
def write(self, data):
|
def write(self, data):
|
||||||
# Validate config data before writing
|
# Validate config data before writing
|
||||||
self.validate_data(data)
|
self.validate_data(data)
|
||||||
|
|
||||||
for section in data:
|
for section in data:
|
||||||
# init section if it doesn't exist yet
|
# init section if it doesn't exist yet
|
||||||
if not section.upper() in self.parser.keys():
|
if not section.upper() in self.parser.keys():
|
||||||
|
@ -185,9 +185,11 @@ class CONFIG:
|
||||||
for setting in data[section]:
|
for setting in data[section]:
|
||||||
new_value = self.handle_setting(
|
new_value = self.handle_setting(
|
||||||
section, setting, data[section][setting], True)
|
section, setting, data[section][setting], True)
|
||||||
self.parser[section][setting] = str(new_value)
|
try:
|
||||||
|
self.parser[section][setting] = str(new_value)
|
||||||
self.write_to_file()
|
except Exception as e:
|
||||||
|
self.log.error("[CFG] error setting config key", e=e)
|
||||||
|
return self.write_to_file()
|
||||||
|
|
||||||
def write_to_file(self):
|
def write_to_file(self):
|
||||||
# Write config data to file
|
# Write config data to file
|
||||||
|
|
|
@ -4,6 +4,7 @@ import data_frame_factory
|
||||||
import frame_handler
|
import frame_handler
|
||||||
import datetime
|
import datetime
|
||||||
from message_system_db_beacon import DatabaseManagerBeacon
|
from message_system_db_beacon import DatabaseManagerBeacon
|
||||||
|
from message_system_db_messages import DatabaseManagerMessages
|
||||||
|
|
||||||
|
|
||||||
from message_system_db_manager import DatabaseManager
|
from message_system_db_manager import DatabaseManager
|
||||||
|
@ -15,3 +16,7 @@ class BeaconFrameHandler(frame_handler.FrameHandler):
|
||||||
self.details["snr"],
|
self.details["snr"],
|
||||||
self.details['frame']["gridsquare"]
|
self.details['frame']["gridsquare"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.config["MESSAGES"]["enable_auto_repeat"]:
|
||||||
|
# set message to queued if beacon received
|
||||||
|
DatabaseManagerMessages(self.event_manager).set_message_to_queued_for_callsign(self.details['frame']["origin"])
|
||||||
|
|
|
@ -2,6 +2,9 @@ import frame_handler_ping
|
||||||
import helpers
|
import helpers
|
||||||
import data_frame_factory
|
import data_frame_factory
|
||||||
import frame_handler
|
import frame_handler
|
||||||
|
from message_system_db_messages import DatabaseManagerMessages
|
||||||
|
|
||||||
|
|
||||||
class CQFrameHandler(frame_handler_ping.PingFrameHandler):
|
class CQFrameHandler(frame_handler_ping.PingFrameHandler):
|
||||||
|
|
||||||
def should_respond(self):
|
def should_respond(self):
|
||||||
|
@ -14,3 +17,7 @@ class CQFrameHandler(frame_handler_ping.PingFrameHandler):
|
||||||
self.details['snr']
|
self.details['snr']
|
||||||
)
|
)
|
||||||
self.transmit(qrv_frame)
|
self.transmit(qrv_frame)
|
||||||
|
|
||||||
|
if self.config["MESSAGES"]["enable_auto_repeat"]:
|
||||||
|
# set message to queued if CQ received
|
||||||
|
DatabaseManagerMessages(self.event_manager).set_message_to_queued_for_callsign(self.details['frame']["origin"])
|
||||||
|
|
|
@ -201,4 +201,44 @@ class DatabaseManagerMessages(DatabaseManager):
|
||||||
session.rollback()
|
session.rollback()
|
||||||
self.log(f"An error occurred while marking message {message_id} as read: {e}")
|
self.log(f"An error occurred while marking message {message_id} as read: {e}")
|
||||||
finally:
|
finally:
|
||||||
session.remove()
|
session.remove()
|
||||||
|
|
||||||
|
def set_message_to_queued_for_callsign(self, callsign):
|
||||||
|
session = self.get_thread_scoped_session()
|
||||||
|
try:
|
||||||
|
# Find the 'failed' status object
|
||||||
|
failed_status = session.query(Status).filter_by(name='failed').first()
|
||||||
|
# Find the 'queued' status object
|
||||||
|
queued_status = session.query(Status).filter_by(name='queued').first()
|
||||||
|
|
||||||
|
# Ensure both statuses are found
|
||||||
|
if not failed_status or not queued_status:
|
||||||
|
self.log("Failed or queued status not found", isWarning=True)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Query for messages with the specified callsign, 'failed' status, and fewer than 10 attempts
|
||||||
|
messages = session.query(P2PMessage) \
|
||||||
|
.filter(P2PMessage.origin_callsign == callsign) \
|
||||||
|
.filter(P2PMessage.status_id == failed_status.id) \
|
||||||
|
.filter(P2PMessage.attempt < 10) \
|
||||||
|
.all()
|
||||||
|
|
||||||
|
if messages:
|
||||||
|
# Update each message's status to 'queued'
|
||||||
|
for message in messages:
|
||||||
|
# Increment attempt count using the existing function
|
||||||
|
self.increment_message_attempts(message.id)
|
||||||
|
|
||||||
|
message.status_id = queued_status.id
|
||||||
|
self.log(f"Set message {message.id} to queued and incremented attempt")
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
return {'status': 'success', 'message': f'{len(messages)} message(s) set to queued'}
|
||||||
|
else:
|
||||||
|
return {'status': 'failure', 'message': 'No eligible messages found'}
|
||||||
|
except Exception as e:
|
||||||
|
session.rollback()
|
||||||
|
self.log(f"An error occurred while setting messages to queued: {e}", isWarning=True)
|
||||||
|
return {'status': 'failure', 'message': str(e)}
|
||||||
|
finally:
|
||||||
|
session.remove()
|
||||||
|
|
|
@ -93,11 +93,12 @@ def index():
|
||||||
@app.route('/config', methods=['GET', 'POST'])
|
@app.route('/config', methods=['GET', 'POST'])
|
||||||
def config():
|
def config():
|
||||||
if request.method in ['POST']:
|
if request.method in ['POST']:
|
||||||
|
print(request.json)
|
||||||
set_config = app.config_manager.write(request.json)
|
set_config = app.config_manager.write(request.json)
|
||||||
app.modem_service.put("restart")
|
|
||||||
if not set_config:
|
if not set_config:
|
||||||
response = api_response(None, 'error writing config')
|
response = api_response(None, 'error writing config')
|
||||||
else:
|
else:
|
||||||
|
app.modem_service.put("restart")
|
||||||
response = api_response(set_config)
|
response = api_response(set_config)
|
||||||
return response
|
return response
|
||||||
elif request.method == 'GET':
|
elif request.method == 'GET':
|
||||||
|
@ -222,10 +223,11 @@ def post_modem_send_raw_stop():
|
||||||
if not app.state_manager.is_modem_running:
|
if not app.state_manager.is_modem_running:
|
||||||
api_abort('Modem not running', 503)
|
api_abort('Modem not running', 503)
|
||||||
|
|
||||||
for id in app.state_manager.arq_irs_sessions:
|
if app.state_manager.getARQ():
|
||||||
app.state_manager.arq_irs_sessions[id].abort_transmission()
|
for id in app.state_manager.arq_irs_sessions:
|
||||||
for id in app.state_manager.arq_iss_sessions:
|
app.state_manager.arq_irs_sessions[id].abort_transmission()
|
||||||
app.state_manager.arq_iss_sessions[id].abort_transmission()
|
for id in app.state_manager.arq_iss_sessions:
|
||||||
|
app.state_manager.arq_iss_sessions[id].abort_transmission()
|
||||||
|
|
||||||
return api_response(request.json)
|
return api_response(request.json)
|
||||||
|
|
||||||
|
|
|
@ -130,11 +130,11 @@ class TestARQSession(unittest.TestCase):
|
||||||
|
|
||||||
def testARQSessionSmallPayload(self):
|
def testARQSessionSmallPayload(self):
|
||||||
# set Packet Error Rate (PER) / frame loss probability
|
# set Packet Error Rate (PER) / frame loss probability
|
||||||
self.loss_probability = 0
|
self.loss_probability = 30
|
||||||
|
|
||||||
self.establishChannels()
|
self.establishChannels()
|
||||||
params = {
|
params = {
|
||||||
'dxcall': "XX1XXX-1",
|
'dxcall': "AA1AAA-1",
|
||||||
'data': base64.b64encode(bytes("Hello world!", encoding="utf-8")),
|
'data': base64.b64encode(bytes("Hello world!", encoding="utf-8")),
|
||||||
'type': "raw_lzma"
|
'type': "raw_lzma"
|
||||||
}
|
}
|
||||||
|
@ -149,7 +149,7 @@ class TestARQSession(unittest.TestCase):
|
||||||
|
|
||||||
self.establishChannels()
|
self.establishChannels()
|
||||||
params = {
|
params = {
|
||||||
'dxcall': "XX1XXX-1",
|
'dxcall': "AA1AAA-1",
|
||||||
'data': base64.b64encode(np.random.bytes(1000)),
|
'data': base64.b64encode(np.random.bytes(1000)),
|
||||||
'type': "raw_lzma"
|
'type': "raw_lzma"
|
||||||
}
|
}
|
||||||
|
@ -165,7 +165,7 @@ class TestARQSession(unittest.TestCase):
|
||||||
|
|
||||||
self.establishChannels()
|
self.establishChannels()
|
||||||
params = {
|
params = {
|
||||||
'dxcall': "XX1XXX-1",
|
'dxcall': "AA1AAA-1",
|
||||||
'data': base64.b64encode(np.random.bytes(100)),
|
'data': base64.b64encode(np.random.bytes(100)),
|
||||||
}
|
}
|
||||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||||
|
@ -184,7 +184,7 @@ class TestARQSession(unittest.TestCase):
|
||||||
|
|
||||||
self.establishChannels()
|
self.establishChannels()
|
||||||
params = {
|
params = {
|
||||||
'dxcall': "XX1XXX-1",
|
'dxcall': "AA1AAA-1",
|
||||||
'data': base64.b64encode(np.random.bytes(100)),
|
'data': base64.b64encode(np.random.bytes(100)),
|
||||||
}
|
}
|
||||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||||
|
@ -200,7 +200,7 @@ class TestARQSession(unittest.TestCase):
|
||||||
def testSessionCleanupISS(self):
|
def testSessionCleanupISS(self):
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
'dxcall': "XX1XXX-1",
|
'dxcall': "AA1AAA-1",
|
||||||
'data': base64.b64encode(np.random.bytes(100)),
|
'data': base64.b64encode(np.random.bytes(100)),
|
||||||
}
|
}
|
||||||
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
|
||||||
|
|
Loading…
Reference in a new issue