Merge branch 'develop' into dependabot/npm_and_yarn/gui/develop/eslint-plugin-prettier-5.1.3

This commit is contained in:
DJ2LS 2024-02-19 07:23:22 +01:00 committed by GitHub
commit 084c1143ee
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 201 additions and 62 deletions

View file

@ -19,14 +19,6 @@ jobs:
python -m pip install --upgrade pip
pip install -r requirements.txt
- uses: robinraju/release-downloader@v1.9
with:
repository: "Hamlib/Hamlib"
fileName: " hamlib-w32-*.zip"
latest: true
extract: true
out-file-path: "modem/lib/hamlib/"
- uses: robinraju/release-downloader@v1.9
with:
repository: "Hamlib/Hamlib"

View file

@ -2,7 +2,7 @@
"name": "FreeDATA",
"description": "FreeDATA Client application for connecting to FreeDATA server",
"private": true,
"version": "0.13.4-alpha",
"version": "0.13.6-alpha",
"main": "dist-electron/main/index.js",
"scripts": {
"start": "vite",
@ -75,7 +75,7 @@
"eslint-plugin-promise": "6.1.1",
"eslint-plugin-vue": "9.20.1",
"typescript": "5.3.3",
"vite": "5.0.12",
"vite": "5.1.3",
"vite-plugin-electron": "0.28.0",
"vite-plugin-electron-renderer": "0.14.5",
"vitest": "1.2.2",

View file

@ -5,9 +5,24 @@ import { setActivePinia } from "pinia";
import pinia from "../store/index";
setActivePinia(pinia);
import { settingsStore as settings } from "../store/settingsStore.js";
import { settingsStore as settings, onChange } from "../store/settingsStore.js";
</script>
<template>
<h5>...soon...</h5>
<div class="input-group input-group-sm mb-1">
<label class="input-group-text w-50">Enable message auto repeat</label>
<label class="input-group-text w-50">
<div class="form-check form-switch form-check-inline ms-2">
<input
class="form-check-input"
type="checkbox"
@change="onChange"
v-model="settings.remote.MESSAGES.enable_auto_repeat"
/>
<label class="form-check-label" for="enableMessagesAutoRepeatSwitch"
>Re-send message on beacon</label
>
</div>
</label>
</div>
</template>

View file

@ -54,7 +54,6 @@ const defaultConfig = {
enable_protocol: false,
},
MODEM: {
enable_fft: false,
enable_fsk: false,
enable_low_bandwidth_mode: false,
respond_to_cq: false,
@ -98,6 +97,9 @@ const defaultConfig = {
tci_ip: "127.0.0.1",
tci_port: 0,
},
MESSAGES: {
enable_auto_repeat: false,
},
},
};

View file

@ -54,6 +54,9 @@ class ARQDataTypeHandler:
def dispatch(self, type_byte: int, data: bytearray):
session_type = self.get_session_type_from_value(type_byte)
self.state_manager.setARQ(False)
if session_type and session_type in self.handlers and 'handle' in self.handlers[session_type]:
return self.handlers[session_type]['handle'](data)
else:
@ -61,6 +64,9 @@ class ARQDataTypeHandler:
def failed(self, type_byte: int, data: bytearray):
session_type = self.get_session_type_from_value(type_byte)
self.state_manager.setARQ(False)
if session_type in self.handlers and 'failed' in self.handlers[session_type]:
return self.handlers[session_type]['failed'](data)
else:
@ -74,6 +80,9 @@ class ARQDataTypeHandler:
def transmitted(self, type_byte: int, data: bytearray):
session_type = self.get_session_type_from_value(type_byte)
self.state_manager.setARQ(False)
if session_type in self.handlers and 'transmitted' in self.handlers[session_type]:
return self.handlers[session_type]['transmitted'](data)
else:

View file

@ -98,7 +98,6 @@ class ARQSession():
if isinstance(received_data, bytearray) and isinstance(type_byte, int):
self.arq_data_type_handler.dispatch(type_byte, received_data)
self.states.setARQ(False)
return
self.log(f"Ignoring unknown transition from state {self.state.name} with frame {frame['frame_type']}")

View file

@ -96,9 +96,7 @@ class ARQSessionIRS(arq_session.ARQSession):
self.log(f"Waiting {timeout} seconds...")
if not self.event_frame_received.wait(timeout):
self.log("Timeout waiting for ISS. Session failed.")
self.session_ended = time.time()
self.set_state(IRS_State.FAILED)
self.event_manager.send_arq_session_finished(False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
self.transmission_failed()
def launch_transmit_and_wait(self, frame, timeout, mode):
thread_wait = threading.Thread(target = self.transmit_and_wait,
@ -208,11 +206,7 @@ class ARQSessionIRS(arq_session.ARQSession):
flag_checksum=False)
self.transmit_frame(ack, mode=FREEDV_MODE.signalling)
self.log("CRC fail at the end of transmission!")
self.session_ended = time.time()
self.set_state(IRS_State.FAILED)
self.event_manager.send_arq_session_finished(
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
return False, False
self.transmission_failed()
def calibrate_speed_settings(self):
self.speed_level = 0 # for now stay at lowest speed level
@ -237,3 +231,12 @@ class ARQSessionIRS(arq_session.ARQSession):
self.event_manager.send_arq_session_finished(
False, self.id, self.dxcall, False, self.state.name, statistics=self.calculate_session_statistics())
return None, None
def transmission_failed(self, irs_frame=None):
# final function for failed transmissions
self.session_ended = time.time()
self.set_state(IRS_State.FAILED)
self.log(f"Transmission failed!")
self.event_manager.send_arq_session_finished(True, self.id, self.dxcall,False, self.state.name, statistics=self.calculate_session_statistics())
self.states.setARQ(False)
return None, None

View file

@ -1,10 +1,10 @@
[NETWORK]
modemport = 3050
modemport = 5000
[STATION]
mycall = XX1XXX
mycall = AA1AAA
mygrid = AA12aa
myssid = 6
myssid = 1
ssid_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
enable_explorer = True
enable_stats = True
@ -21,7 +21,7 @@ ip = 127.0.0.1
port = 4532
path =
command =
arguments = --cenas
arguments =
[RADIO]
control = disabled
@ -55,3 +55,6 @@ rx_buffer_size = 64
tx_delay = 200
beacon_interval = 300
[MESSAGES]
enable_auto_repeat = False

View file

@ -31,7 +31,6 @@ class CONFIG:
'control': str,
'serial_port': str,
'model_id': int,
'serial_port': str,
'serial_speed': int,
'data_bits': int,
'stop_bits': int,
@ -56,7 +55,6 @@ class CONFIG:
'enable_protocol': bool,
},
'MODEM': {
'enable_fft': bool,
'tuning_range_fmax': int,
'tuning_range_fmin': int,
'enable_fsk': bool,
@ -68,12 +66,22 @@ class CONFIG:
'tx_delay': int,
'beacon_interval': int,
},
'MESSAGES': {
'enable_auto_repeat': bool,
}
}
default_values = {
list: '[]',
bool: 'False',
int: '0',
str: '',
}
def __init__(self, configfile: str):
# set up logger
self.log = structlog.get_logger("CONFIG")
self.log = structlog.get_logger(type(self).__name__)
# init configparser
self.parser = configparser.ConfigParser(inline_comment_prefixes="#", allow_no_value=True)
@ -88,6 +96,9 @@ class CONFIG:
# check if config file exists
self.config_exists()
# validate config structure
self.validate_config()
def config_exists(self):
"""
check if config file exists
@ -99,7 +110,7 @@ class CONFIG:
return False
# Validates config data
def validate(self, data):
def validate_data(self, data):
for section in data:
for setting in data[section]:
if not isinstance(data[section][setting], self.config_types[section][setting]):
@ -107,6 +118,39 @@ class CONFIG:
f" '{data[section][setting]}' {type(data[section][setting])} given.")
raise ValueError(message)
def validate_config(self):
"""
Updates the configuration file to match exactly what is defined in self.config_types.
It removes sections and settings not defined there and adds missing sections and settings.
"""
existing_sections = self.parser.sections()
# Remove sections and settings not defined in self.config_types
for section in existing_sections:
if section not in self.config_types:
self.parser.remove_section(section)
self.log.info(f"[CFG] Removing undefined section: {section}")
continue
existing_settings = self.parser.options(section)
for setting in existing_settings:
if setting not in self.config_types[section]:
self.parser.remove_option(section, setting)
self.log.info(f"[CFG] Removing undefined setting: {section}.{setting}")
# Add missing sections and settings from self.config_types
for section, settings in self.config_types.items():
if section not in existing_sections:
self.parser.add_section(section)
self.log.info(f"[CFG] Adding missing section: {section}")
for setting, value_type in settings.items():
if not self.parser.has_option(section, setting):
default_value = self.default_values.get(value_type, None)
self.parser.set(section, setting, str(default_value))
self.log.info(f"[CFG] Adding missing setting: {section}.{setting}")
return self.write_to_file()
# Handle special setting data type conversion
# is_writing means data from a dict being writen to the config file
# if False, it means the opposite direction
@ -132,8 +176,7 @@ class CONFIG:
# Sets and writes config data from a dict containing data settings
def write(self, data):
# Validate config data before writing
self.validate(data)
self.validate_data(data)
for section in data:
# init section if it doesn't exist yet
if not section.upper() in self.parser.keys():
@ -142,8 +185,13 @@ class CONFIG:
for setting in data[section]:
new_value = self.handle_setting(
section, setting, data[section][setting], True)
try:
self.parser[section][setting] = str(new_value)
except Exception as e:
self.log.error("[CFG] error setting config key", e=e)
return self.write_to_file()
def write_to_file(self):
# Write config data to file
try:
with open(self.config_name, 'w') as configfile:

View file

@ -4,6 +4,7 @@ import data_frame_factory
import frame_handler
import datetime
from message_system_db_beacon import DatabaseManagerBeacon
from message_system_db_messages import DatabaseManagerMessages
from message_system_db_manager import DatabaseManager
@ -15,3 +16,7 @@ class BeaconFrameHandler(frame_handler.FrameHandler):
self.details["snr"],
self.details['frame']["gridsquare"]
)
if self.config["MESSAGES"]["enable_auto_repeat"]:
# set message to queued if beacon received
DatabaseManagerMessages(self.event_manager).set_message_to_queued_for_callsign(self.details['frame']["origin"])

View file

@ -2,6 +2,9 @@ import frame_handler_ping
import helpers
import data_frame_factory
import frame_handler
from message_system_db_messages import DatabaseManagerMessages
class CQFrameHandler(frame_handler_ping.PingFrameHandler):
def should_respond(self):
@ -14,3 +17,7 @@ class CQFrameHandler(frame_handler_ping.PingFrameHandler):
self.details['snr']
)
self.transmit(qrv_frame)
if self.config["MESSAGES"]["enable_auto_repeat"]:
# set message to queued if CQ received
DatabaseManagerMessages(self.event_manager).set_message_to_queued_for_callsign(self.details['frame']["origin"])

View file

@ -18,6 +18,7 @@ from pathlib import Path
import platform
import subprocess
import psutil
import glob
log = structlog.get_logger("helpers")
@ -738,7 +739,8 @@ def find_binary_path(binary_name="rigctld", search_system_wide=False):
if platform.system() != 'Windows':
system_paths.extend(['/usr/bin', '/usr/local/bin', '/bin'])
else:
system_paths.extend(['C:\\Windows\\System32', 'C:\\Windows'])
system_paths.extend(glob.glob("C:\\Program Files\\Hamlib*\\bin"))
system_paths.extend(glob.glob("C:\\Program Files (x86)\\Hamlib*\\bin"))
for path in system_paths:
potential_path = os.path.join(path, binary_name)

View file

@ -202,3 +202,43 @@ class DatabaseManagerMessages(DatabaseManager):
self.log(f"An error occurred while marking message {message_id} as read: {e}")
finally:
session.remove()
def set_message_to_queued_for_callsign(self, callsign):
session = self.get_thread_scoped_session()
try:
# Find the 'failed' status object
failed_status = session.query(Status).filter_by(name='failed').first()
# Find the 'queued' status object
queued_status = session.query(Status).filter_by(name='queued').first()
# Ensure both statuses are found
if not failed_status or not queued_status:
self.log("Failed or queued status not found", isWarning=True)
return
# Query for messages with the specified callsign, 'failed' status, and fewer than 10 attempts
messages = session.query(P2PMessage) \
.filter(P2PMessage.origin_callsign == callsign) \
.filter(P2PMessage.status_id == failed_status.id) \
.filter(P2PMessage.attempt < 10) \
.all()
if messages:
# Update each message's status to 'queued'
for message in messages:
# Increment attempt count using the existing function
self.increment_message_attempts(message.id)
message.status_id = queued_status.id
self.log(f"Set message {message.id} to queued and incremented attempt")
session.commit()
return {'status': 'success', 'message': f'{len(messages)} message(s) set to queued'}
else:
return {'status': 'failure', 'message': 'No eligible messages found'}
except Exception as e:
session.rollback()
self.log(f"An error occurred while setting messages to queued: {e}", isWarning=True)
return {'status': 'failure', 'message': str(e)}
finally:
session.remove()

View file

@ -221,6 +221,7 @@ class radio:
def format_rigctld_args(self):
config = self.config['RADIO'] # Accessing the 'RADIO' section of the INI file
config_rigctld = self.config['RIGCTLD'] # Accessing the 'RIGCTLD' section of the INI file for custom args
args = []
# Helper function to check if the value should be ignored
@ -228,28 +229,40 @@ class radio:
return value in ['ignore', 0]
# Model ID, Serial Port, and Speed
if not should_ignore(config.get('model_id', "0")):
if not should_ignore(config.get('model_id')):
args += ['-m', str(config['model_id'])]
if not should_ignore(config.get('serial_port', "0")):
if not should_ignore(config.get('serial_port')):
args += ['-r', config['serial_port']]
if not should_ignore(config.get('serial_speed', "0")):
if not should_ignore(config.get('serial_speed')):
args += ['-s', str(config['serial_speed'])]
# PTT Port and Type
if not should_ignore(config.get('ptt_port', "0")):
if not should_ignore(config.get('ptt_port')):
args += ['--ptt-port', config['ptt_port']]
if not should_ignore(config.get('ptt_type', "0")):
if not should_ignore(config.get('ptt_type')):
args += ['--ptt-type', config['ptt_type']]
# Serial DCD and DTR
if not should_ignore(config.get('serial_dcd', "0")):
args += ['--set-dcd', config['serial_dcd']]
if not should_ignore(config.get('serial_dtr', "0")):
args += ['--set-dtr', config['serial_dtr']]
if not should_ignore(config.get('serial_dcd')):
args += ['--dcd-type', config['serial_dcd']]
# Handling Stop Bits with the corrected --set-conf syntax
if not should_ignore(config.get('stop_bits', "0")):
if not should_ignore(config.get('serial_dtr')):
args += ['--set-conf', f'dtr_state={config["serial_dtr"]}']
# Handling Data Bits and Stop Bits
if not should_ignore(config.get('data_bits')):
args += ['--set-conf', f'data_bits={config["data_bits"]}']
if not should_ignore(config.get('stop_bits')):
args += ['--set-conf', f'stop_bits={config["stop_bits"]}']
# Fixme #rts_state
# if not should_ignore(config.get('rts_state')):
# args += ['--set-conf', f'stop_bits={config["rts_state"]}']
# Handle custom arguments for rigctld
# Custom args are split via ' ' so python doesn't add extranaeous quotes on windows
args += config_rigctld["arguments"].split(" ")
#print("Hamlib args ==>" + str(args))
return args

View file

@ -70,7 +70,7 @@ class ScheduleManager:
cmd.run(self.event_manager, self.modem)
def delete_beacons(self):
DatabaseManagerBeacon(self.event_manager).beacon_cleanup_older_than_days(14)
DatabaseManagerBeacon(self.event_manager).beacon_cleanup_older_than_days(2)
def push_to_explorer(self):
self.config = self.config_manager.read()

View file

@ -29,7 +29,7 @@ app = Flask(__name__)
CORS(app)
CORS(app, resources={r"/*": {"origins": "*"}})
sock = Sock(app)
MODEM_VERSION = "0.13.4-alpha"
MODEM_VERSION = "0.13.6-alpha"
# set config file to use
def set_config():
@ -94,10 +94,10 @@ def index():
def config():
if request.method in ['POST']:
set_config = app.config_manager.write(request.json)
app.modem_service.put("restart")
if not set_config:
response = api_response(None, 'error writing config')
else:
app.modem_service.put("restart")
response = api_response(set_config)
return response
elif request.method == 'GET':
@ -222,6 +222,7 @@ def post_modem_send_raw_stop():
if not app.state_manager.is_modem_running:
api_abort('Modem not running', 503)
if app.state_manager.getARQ():
for id in app.state_manager.arq_irs_sessions:
app.state_manager.arq_irs_sessions[id].abort_transmission()
for id in app.state_manager.arq_iss_sessions:

View file

@ -130,11 +130,11 @@ class TestARQSession(unittest.TestCase):
def testARQSessionSmallPayload(self):
# set Packet Error Rate (PER) / frame loss probability
self.loss_probability = 0
self.loss_probability = 30
self.establishChannels()
params = {
'dxcall': "XX1XXX-1",
'dxcall': "AA1AAA-1",
'data': base64.b64encode(bytes("Hello world!", encoding="utf-8")),
'type': "raw_lzma"
}
@ -149,7 +149,7 @@ class TestARQSession(unittest.TestCase):
self.establishChannels()
params = {
'dxcall': "XX1XXX-1",
'dxcall': "AA1AAA-1",
'data': base64.b64encode(np.random.bytes(1000)),
'type': "raw_lzma"
}
@ -165,7 +165,7 @@ class TestARQSession(unittest.TestCase):
self.establishChannels()
params = {
'dxcall': "XX1XXX-1",
'dxcall': "AA1AAA-1",
'data': base64.b64encode(np.random.bytes(100)),
}
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
@ -184,7 +184,7 @@ class TestARQSession(unittest.TestCase):
self.establishChannels()
params = {
'dxcall': "XX1XXX-1",
'dxcall': "AA1AAA-1",
'data': base64.b64encode(np.random.bytes(100)),
}
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)
@ -200,7 +200,7 @@ class TestARQSession(unittest.TestCase):
def testSessionCleanupISS(self):
params = {
'dxcall': "XX1XXX-1",
'dxcall': "AA1AAA-1",
'data': base64.b64encode(np.random.bytes(100)),
}
cmd = ARQRawCommand(self.config, self.iss_state_manager, self.iss_event_queue, params)