]> code.octet-stream.net Git - m17rt/blob - m17codec2/src/lib.rs
Implement a fair bit of SoftModulator
[m17rt] / m17codec2 / src / lib.rs
1 use codec2::{Codec2, Codec2Mode};
2 use cpal::traits::DeviceTrait;
3 use cpal::traits::HostTrait;
4 use cpal::traits::StreamTrait;
5 use cpal::{Sample, SampleFormat, SampleRate};
6 use log::debug;
7 use m17app::adapter::StreamAdapter;
8 use m17app::app::TxHandle;
9 use m17core::protocol::LsfFrame;
10 use std::collections::VecDeque;
11 use std::fs::File;
12 use std::io::Write;
13 use std::path::Path;
14 use std::sync::{
15 mpsc::{channel, Receiver, Sender},
16 Arc, Mutex,
17 };
18
19 pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
20 let codec2 = Codec2::new(Codec2Mode::MODE_3200);
21 let var_name = codec2;
22 let mut codec = var_name;
23 let mut all_samples: Vec<i16> = vec![];
24 for i in 0..(data.len() / 8) {
25 let mut samples = vec![0; codec.samples_per_frame()];
26 codec.decode(&mut samples, &data[i * 8..((i + 1) * 8)]);
27 all_samples.append(&mut samples);
28 }
29
30 // dude this works
31 let mut speech_out = File::create(out_path).unwrap();
32 for b in &all_samples {
33 speech_out.write_all(&b.to_le_bytes()).unwrap();
34 }
35 all_samples
36 }
37
38 pub struct Codec2Adapter {
39 state: Arc<Mutex<AdapterState>>,
40 // TODO: make this configurable
41 output_card: String,
42 }
43
44 impl Codec2Adapter {
45 pub fn new() -> Self {
46 Self {
47 state: Arc::new(Mutex::new(AdapterState {
48 tx: None,
49 out_buf: VecDeque::new(),
50 codec2: Codec2::new(Codec2Mode::MODE_3200),
51 end_tx: None,
52 })),
53 // TODO: this doesn't work on rpi. Use default_output_device() by default
54 output_card: "default".to_owned(),
55 }
56 }
57 }
58
59 struct AdapterState {
60 tx: Option<TxHandle>,
61 /// Circular buffer of output samples for playback
62 out_buf: VecDeque<i16>,
63 codec2: Codec2,
64 end_tx: Option<Sender<()>>,
65 }
66
67 impl StreamAdapter for Codec2Adapter {
68 fn adapter_registered(&self, _id: usize, handle: TxHandle) {
69 self.state.lock().unwrap().tx = Some(handle);
70
71 let (end_tx, end_rx) = channel();
72 let state = self.state.clone();
73 let output_card = self.output_card.clone();
74 std::thread::spawn(move || stream_thread(end_rx, state, output_card));
75 self.state.lock().unwrap().end_tx = Some(end_tx);
76 }
77
78 fn adapter_removed(&self) {
79 let mut state = self.state.lock().unwrap();
80 state.tx = None;
81 state.end_tx = None;
82 }
83
84 fn tnc_started(&self) {}
85
86 fn tnc_closed(&self) {}
87
88 fn stream_began(&self, lsf: LsfFrame) {
89 // for now we will assume:
90 // - unencrypted
91 // - data type is Voice (Codec2 3200), not Voice+Data
92 // TODO: is encryption handled here or in M17App, such that we get a decrypted stream?
93 // TODO: handle the Voice+Data combination with Codec2 1600
94 self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
95 }
96
97 fn stream_data(&self, frame_number: u16, is_final: bool, data: Arc<[u8; 16]>) {
98 let mut state = self.state.lock().unwrap();
99 for encoded in data.chunks(8) {
100 if state.out_buf.len() < 1024 {
101 let mut samples = [i16::EQUILIBRIUM; 160]; // while assuming 3200
102 state.codec2.decode(&mut samples, encoded);
103 // TODO: maybe get rid of VecDeque so we can decode directly into ring buffer?
104 for s in samples {
105 state.out_buf.push_back(s);
106 }
107 } else {
108 debug!("out_buf overflow");
109 }
110 }
111 }
112 }
113
114 fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
115 let mut state = state.lock().unwrap();
116 debug!(
117 "sound card wants {} samples, we have {} in the buffer",
118 data.len(),
119 state.out_buf.len()
120 );
121 for d in data {
122 *d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
123 }
124 }
125
126 /// Create and manage the stream from a dedicated thread since it's `!Send`
127 fn stream_thread(end: Receiver<()>, state: Arc<Mutex<AdapterState>>, output_card: String) {
128 let host = cpal::default_host();
129 let device = host
130 .output_devices()
131 .unwrap()
132 .find(|d| d.name().unwrap() == output_card)
133 .unwrap();
134 let mut configs = device.supported_output_configs().unwrap();
135 // TODO: channels == 1 doesn't work on a Raspberry Pi
136 // make this configurable and support interleaving LRLR stereo samples if using 2 channels
137 let config = configs
138 .find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
139 .unwrap()
140 .with_sample_rate(SampleRate(8000));
141 let stream = device
142 .build_output_stream(
143 &config.into(),
144 move |data: &mut [i16], info: &cpal::OutputCallbackInfo| {
145 debug!(
146 "callback {:?} playback {:?}",
147 info.timestamp().callback,
148 info.timestamp().playback
149 );
150 output_cb(data, &state);
151 },
152 |e| {
153 // trigger end_tx here? always more edge cases
154 debug!("error occurred in codec2 playback: {e:?}");
155 },
156 None,
157 )
158 .unwrap();
159 stream.play().unwrap();
160 let _ = end.recv();
161 // it seems concrete impls of Stream have a Drop implementation that will handle termination
162 }