examples: add simple visual rendering of the samples in melody.v
							parent
							
								
									86816b1aad
								
							
						
					
					
						commit
						77c29c6b6f
					
				| 
						 | 
				
			
			@ -1,12 +1,16 @@
 | 
			
		|||
import os
 | 
			
		||||
import gg
 | 
			
		||||
import gx
 | 
			
		||||
import sokol.audio
 | 
			
		||||
 | 
			
		||||
struct AContext {
 | 
			
		||||
struct AppState {
 | 
			
		||||
mut:
 | 
			
		||||
	frame_0 int
 | 
			
		||||
	gframe  int // the current graphical frame
 | 
			
		||||
	frame_0 int // offset of the current audio frames, relative to the start of the music
 | 
			
		||||
	frames  [2048]f32 // a copy of the last rendered audio frames
 | 
			
		||||
	gg      &gg.Context // used for drawing
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn my_audio_stream_callback(buffer &f32, num_frames, num_channels int, mut acontext AContext) {
 | 
			
		||||
fn my_audio_stream_callback(buffer &f32, num_frames, num_channels int, mut acontext AppState) {
 | 
			
		||||
	mut soundbuffer := buffer
 | 
			
		||||
	for frame := 0; frame < num_frames; frame++ {
 | 
			
		||||
		t := int(f32(acontext.frame_0 + frame) * 0.245)
 | 
			
		||||
| 
						 | 
				
			
			@ -18,7 +22,9 @@ fn my_audio_stream_callback(buffer &f32, num_frames, num_channels int, mut acont
 | 
			
		|||
		for ch := 0; ch < num_channels; ch++ {
 | 
			
		||||
			idx := frame * num_channels + ch
 | 
			
		||||
			unsafe {
 | 
			
		||||
				soundbuffer[idx] = f32(byte(y) - 127) / 255.0
 | 
			
		||||
				a := f32(byte(y) - 127) / 255.0
 | 
			
		||||
				soundbuffer[idx] = a
 | 
			
		||||
				acontext.frames[idx & 2047] = a
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
| 
						 | 
				
			
			@ -26,10 +32,44 @@ fn my_audio_stream_callback(buffer &f32, num_frames, num_channels int, mut acont
 | 
			
		|||
}
 | 
			
		||||
 | 
			
		||||
fn main() {
 | 
			
		||||
	mut state := &AppState{
 | 
			
		||||
		gg: 0
 | 
			
		||||
	}
 | 
			
		||||
	audio.setup({
 | 
			
		||||
		stream_userdata_cb: my_audio_stream_callback
 | 
			
		||||
		user_data: &AContext{}
 | 
			
		||||
		user_data: state
 | 
			
		||||
	})
 | 
			
		||||
	os.input('Press Enter to exit')
 | 
			
		||||
	state.gg = gg.new_context({
 | 
			
		||||
		bg_color: gx.rgb(50, 50, 50)
 | 
			
		||||
		width: 1024
 | 
			
		||||
		height: 400
 | 
			
		||||
		use_ortho: true
 | 
			
		||||
		create_window: true
 | 
			
		||||
		window_title: 'ByteBeat Music'
 | 
			
		||||
		frame_fn: graphics_frame
 | 
			
		||||
		user_data: state
 | 
			
		||||
	})
 | 
			
		||||
	state.gg.run()
 | 
			
		||||
	audio.shutdown()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn graphics_frame(mut state AppState) {
 | 
			
		||||
	state.gframe++
 | 
			
		||||
	state.gg.begin()
 | 
			
		||||
	state.draw()
 | 
			
		||||
	state.gg.end()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
[inline]
 | 
			
		||||
fn (mut state AppState) bsample(idx int) byte {
 | 
			
		||||
	return byte(127 + state.frames[(state.gframe + idx) & 2047] * 128)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn (mut state AppState) draw() {
 | 
			
		||||
	// first, reset and setup ortho projection
 | 
			
		||||
	for x in 0 .. 1024 {
 | 
			
		||||
		mut y := 100 * (state.frames[2*x] + state.frames[2*x+1])
 | 
			
		||||
		state.gg.draw_line(x, 200, x, 200 + y, gx.rgba(state.bsample(x), state.bsample(x + 300),
 | 
			
		||||
			state.bsample(x + 700), 255))
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue