summaryrefslogtreecommitdiff
path: root/Userland/Libraries/LibGPU/Device.h
diff options
context:
space:
mode:
authorJelle Raaijmakers <jelle@gmta.nl>2022-09-04 21:38:39 +0200
committerLinus Groh <mail@linusgroh.de>2022-09-11 22:37:07 +0100
commitdda5987684227e31e1d7b2fca749d43f734bfc47 (patch)
tree0815e577932a70dff260f46adcfc8586314d1850 /Userland/Libraries/LibGPU/Device.h
parent44953a430159117fcfbbee6f3e47bfac89f5e215 (diff)
downloadserenity-dda5987684227e31e1d7b2fca749d43f734bfc47.zip
LibGL+LibGPU+LibSoftGPU: Remove concept of `layer` in favor of `depth`
Looking at how Khronos defines layers: https://www.khronos.org/opengl/wiki/Array_Texture We both have 3D textures and layers of 2D textures, which can both be encoded in our existing `Typed3DBuffer` as depth. Since we support depth already in the GPU API, remove layer everywhere. Also pass in `Texture2D::LOG2_MAX_TEXTURE_SIZE` as the maximum number of mipmap levels, so we do not allocate 999 levels on each Image instantiation.
Diffstat (limited to 'Userland/Libraries/LibGPU/Device.h')
-rw-r--r--Userland/Libraries/LibGPU/Device.h2
1 files changed, 1 insertions, 1 deletions
diff --git a/Userland/Libraries/LibGPU/Device.h b/Userland/Libraries/LibGPU/Device.h
index 3c019b68e5..279a1355ce 100644
--- a/Userland/Libraries/LibGPU/Device.h
+++ b/Userland/Libraries/LibGPU/Device.h
@@ -55,7 +55,7 @@ public:
virtual RasterizerOptions options() const = 0;
virtual LightModelParameters light_model() const = 0;
- virtual NonnullRefPtr<Image> create_image(PixelFormat const&, u32 width, u32 height, u32 depth, u32 levels, u32 layers) = 0;
+ virtual NonnullRefPtr<Image> create_image(PixelFormat const&, u32 width, u32 height, u32 depth, u32 max_levels) = 0;
virtual void set_sampler_config(unsigned, SamplerConfig const&) = 0;
virtual void set_light_state(unsigned, Light const&) = 0;