I'm trying to write to pixel buffers in OpenGL and render them into textures. I first created pixel buffers, then textures, then sized both:
gl::BufferData(
gl::PIXEL_UNPACK_BUFFER as GLenum,
(texture_size[i as usize] as usize * std::mem::size_of::<u8>())
as gl::types::GLsizeiptr,
std::ptr::null(),
gl::STREAM_DRAW as GLenum,
);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RED as u32,
width[i as usize],
height[i as usize],
0,
gl::RED as GLenum,
gl::UNSIGNED_BYTE as GLenum,
std::ptr::null(),
);
Then, the draw function tries to upload data to them:
let plane_pointer = ffmpeg_decoded_packet.data(j.into()).as_ptr();
gl::BufferSubData(
gl::PIXEL_UNPACK_BUFFER as GLenum,
0,
width[j as usize] as gl::types::GLsizeiptr,
ffmpeg_decoded_packet.data(j as usize).as_ptr()
as *const libc::c_void,
);
gl::TexSubImage2D(
gl::TEXTURE_2D,
0,
0,
0,
width[j as usize],
height[j as usize],
gl::RED as GLenum,
gl::UNSIGNED_BYTE,
std::ptr::null(),
);
I think something is wrong with the way I pass the pointers.
Here are the signatures
pub unsafe fn BufferData(
target: types::GLenum,
size: types::GLsizeiptr,
data: *const __gl_imports::raw::c_void,
usage: types::GLenum,
) -> ()
pub unsafe fn TexSubImage2D(
target: types::GLenum,
level: types::GLint,
xoffset: types::GLint,
yoffset: types::GLint,
width: types::GLsizei,
height: types::GLsizei,
format: types::GLenum,
type_: types::GLenum,
pixels: *const __gl_imports::raw::c_void,
) -> ()
Some examples on github multiply the size by * std::mem::size_of::<T>
(https://github.com/Gopiandcode/rust-breakout/blob/f9755787a8f7f256df786fb5129abf4c91b0b2fd/src/test_game.rs#L54), while others just put the size of the data.
Here's my entire example: https://pastebin.com/EQVkStmt
Anybody knows what I'm doing wrong?