]> code.octet-stream.net Git - m17rt/blob - m17codec2/src/lib.rs
c338f6ae1cdeb46daac2a28059e7af2469abc4c6
[m17rt] / m17codec2 / src / lib.rs
1 #![doc = include_str!("../README.md")]
2
3 use codec2::{Codec2, Codec2Mode};
4 use cpal::traits::DeviceTrait;
5 use cpal::traits::HostTrait;
6 use cpal::traits::StreamTrait;
7 use cpal::{Sample, SampleFormat, SampleRate};
8 use log::debug;
9 use m17app::adapter::StreamAdapter;
10 use m17app::app::TxHandle;
11 use m17app::error::AdapterError;
12 use m17app::link_setup::LinkSetup;
13 use m17app::link_setup::M17Address;
14 use m17app::StreamFrame;
15 use std::collections::VecDeque;
16 use std::fs::File;
17 use std::io::Write;
18 use std::path::Path;
19 use std::path::PathBuf;
20 use std::sync::{
21 mpsc::{channel, Receiver, Sender},
22 Arc, Mutex,
23 };
24 use std::time::Duration;
25 use std::time::Instant;
26 use thiserror::Error;
27
28 pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
29 let codec2 = Codec2::new(Codec2Mode::MODE_3200);
30 let var_name = codec2;
31 let mut codec = var_name;
32 let mut all_samples: Vec<i16> = vec![];
33 for i in 0..(data.len() / 8) {
34 let mut samples = vec![0; codec.samples_per_frame()];
35 codec.decode(&mut samples, &data[i * 8..((i + 1) * 8)]);
36 all_samples.append(&mut samples);
37 }
38
39 // dude this works
40 let mut speech_out = File::create(out_path).unwrap();
41 for b in &all_samples {
42 speech_out.write_all(&b.to_le_bytes()).unwrap();
43 }
44 all_samples
45 }
46
47 /// Subscribes to M17 streams and attempts to play the decoded Codec2
48 pub struct Codec2Adapter {
49 state: Arc<Mutex<AdapterState>>,
50 // TODO: make this configurable
51 output_card: String,
52 }
53
54 impl Codec2Adapter {
55 pub fn new() -> Self {
56 Self {
57 state: Arc::new(Mutex::new(AdapterState {
58 tx: None,
59 out_buf: VecDeque::new(),
60 codec2: Codec2::new(Codec2Mode::MODE_3200),
61 end_tx: None,
62 })),
63 // TODO: this doesn't work on rpi. Use default_output_device() by default
64 output_card: "default".to_owned(),
65 }
66 }
67 }
68
69 impl Default for Codec2Adapter {
70 fn default() -> Self {
71 Self::new()
72 }
73 }
74
75 struct AdapterState {
76 tx: Option<TxHandle>,
77 /// Circular buffer of output samples for playback
78 out_buf: VecDeque<i16>,
79 codec2: Codec2,
80 end_tx: Option<Sender<()>>,
81 }
82
83 impl StreamAdapter for Codec2Adapter {
84 fn start(&self, handle: TxHandle) -> Result<(), AdapterError> {
85 self.state.lock().unwrap().tx = Some(handle);
86
87 let (end_tx, end_rx) = channel();
88 let (setup_tx, setup_rx) = channel();
89 let state = self.state.clone();
90 let output_card = self.output_card.clone();
91 std::thread::spawn(move || stream_thread(end_rx, setup_tx, state, output_card));
92 self.state.lock().unwrap().end_tx = Some(end_tx);
93 // Propagate any errors arising in the thread
94 Ok(setup_rx.recv()??)
95 }
96
97 fn close(&self) -> Result<(), AdapterError> {
98 let mut state = self.state.lock().unwrap();
99 state.tx = None;
100 state.end_tx = None;
101 Ok(())
102 }
103
104 fn stream_began(&self, _link_setup: LinkSetup) {
105 // for now we will assume:
106 // - unencrypted
107 // - data type is Voice (Codec2 3200), not Voice+Data
108 // TODO: is encryption handled here or in M17App, such that we get a decrypted stream?
109 // TODO: handle the Voice+Data combination with Codec2 1600
110 self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
111 }
112
113 fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
114 let mut state = self.state.lock().unwrap();
115 for encoded in data.chunks(8) {
116 if state.out_buf.len() < 1024 {
117 let mut samples = [i16::EQUILIBRIUM; 160]; // while assuming 3200
118 state.codec2.decode(&mut samples, encoded);
119 // TODO: maybe get rid of VecDeque so we can decode directly into ring buffer?
120 for s in samples {
121 state.out_buf.push_back(s);
122 }
123 } else {
124 debug!("out_buf overflow");
125 }
126 }
127 }
128 }
129
130 fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
131 let mut state = state.lock().unwrap();
132 for d in data {
133 *d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
134 }
135 }
136
137 /// Create and manage the stream from a dedicated thread since it's `!Send`
138 fn stream_thread(
139 end: Receiver<()>,
140 setup_tx: Sender<Result<(), AdapterError>>,
141 state: Arc<Mutex<AdapterState>>,
142 output_card: String,
143 ) {
144 let host = cpal::default_host();
145 let device = match host
146 .output_devices()
147 .unwrap()
148 .find(|d| d.name().unwrap() == output_card)
149 {
150 Some(d) => d,
151 None => {
152 let _ = setup_tx.send(Err(M17Codec2Error::CardUnavailable(output_card).into()));
153 return;
154 }
155 };
156 let mut configs = match device.supported_output_configs() {
157 Ok(c) => c,
158 Err(e) => {
159 let _ = setup_tx.send(Err(M17Codec2Error::OutputConfigsUnavailable(
160 output_card,
161 e,
162 )
163 .into()));
164 return;
165 }
166 };
167 // TODO: channels == 1 doesn't work on a Raspberry Pi
168 // make this configurable and support interleaving LRLR stereo samples if using 2 channels
169 let config = match configs.find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
170 {
171 Some(c) => c,
172 None => {
173 let _ = setup_tx.send(Err(
174 M17Codec2Error::SupportedOutputUnavailable(output_card).into()
175 ));
176 return;
177 }
178 };
179
180 let config = config.with_sample_rate(SampleRate(8000));
181 let stream = match device.build_output_stream(
182 &config.into(),
183 move |data: &mut [i16], _info: &cpal::OutputCallbackInfo| {
184 output_cb(data, &state);
185 },
186 |e| {
187 // trigger end_tx here? always more edge cases
188 debug!("error occurred in codec2 playback: {e:?}");
189 },
190 None,
191 ) {
192 Ok(s) => s,
193 Err(e) => {
194 let _ = setup_tx.send(Err(
195 M17Codec2Error::OutputStreamBuildError(output_card, e).into()
196 ));
197 return;
198 }
199 };
200 match stream.play() {
201 Ok(()) => (),
202 Err(e) => {
203 let _ = setup_tx.send(Err(
204 M17Codec2Error::OutputStreamPlayError(output_card, e).into()
205 ));
206 return;
207 }
208 }
209 let _ = setup_tx.send(Ok(()));
210 let _ = end.recv();
211 // it seems concrete impls of Stream have a Drop implementation that will handle termination
212 }
213
214 /// Transmits a wave file as an M17 stream
215 pub struct WavePlayer;
216
217 impl WavePlayer {
218 /// Plays a wave file (blocking).
219 ///
220 /// * `path`: wave file to transmit, must be 8 kHz mono and 16-bit LE
221 /// * `tx`: a `TxHandle` obtained from an `M17App`
222 /// * `source`: address of transmission source
223 /// * `destination`: address of transmission destination
224 /// * `channel_access_number`: from 0 to 15, usually 0
225 pub fn play(
226 path: PathBuf,
227 tx: TxHandle,
228 source: &M17Address,
229 destination: &M17Address,
230 channel_access_number: u8,
231 ) {
232 let mut reader = hound::WavReader::open(path).unwrap();
233 let mut samples = reader.samples::<i16>();
234
235 let mut codec = Codec2::new(Codec2Mode::MODE_3200);
236 let mut in_buf = [0i16; 160];
237 let mut out_buf = [0u8; 16];
238 let mut lsf_chunk: usize = 0;
239 const TICK: Duration = Duration::from_millis(40);
240 let mut next_tick = Instant::now() + TICK;
241 let mut frame_number = 0;
242
243 let mut setup = LinkSetup::new_voice(source, destination);
244 setup.set_channel_access_number(channel_access_number);
245 tx.transmit_stream_start(&setup);
246
247 loop {
248 let mut last_one = false;
249 for out in out_buf.chunks_mut(8) {
250 for i in in_buf.iter_mut() {
251 let sample = match samples.next() {
252 Some(Ok(sample)) => sample,
253 _ => {
254 last_one = true;
255 0
256 }
257 };
258 *i = sample;
259 }
260 codec.encode(out, &in_buf);
261 }
262 tx.transmit_stream_next(&StreamFrame {
263 lich_idx: lsf_chunk as u8,
264 lich_part: setup.lich_part(lsf_chunk as u8),
265 frame_number,
266 end_of_stream: last_one,
267 stream_data: out_buf,
268 });
269 frame_number += 1;
270 lsf_chunk = (lsf_chunk + 1) % 6;
271
272 if last_one {
273 break;
274 }
275
276 std::thread::sleep(next_tick.duration_since(Instant::now()));
277 next_tick += TICK;
278 }
279 }
280 }
281
282 #[derive(Debug, Error)]
283 pub enum M17Codec2Error {
284 #[error("selected card '{0}' does not exist or is in use")]
285 CardUnavailable(String),
286
287 #[error("selected card '{0}' failed to list available output configs: '{1}'")]
288 OutputConfigsUnavailable(String, #[source] cpal::SupportedStreamConfigsError),
289
290 #[error("selected card '{0}' did not offer a compatible output config type, either due to hardware limitations or because it is currently in use")]
291 SupportedOutputUnavailable(String),
292
293 #[error("selected card '{0}' was unable to build an output stream: '{1}'")]
294 OutputStreamBuildError(String, #[source] cpal::BuildStreamError),
295
296 #[error("selected card '{0}' was unable to play an output stream: '{1}'")]
297 OutputStreamPlayError(String, #[source] cpal::PlayStreamError),
298 }