The 120MHz value hardcoded in the call to max_t to compute the HSM rate
is defined in the driver as HSM_MIN_CLOCK_FREQ, let's switch to it so
that it's more readable.

Signed-off-by: Maxime Ripard <max...@cerno.tech>
---
 drivers/gpu/drm/vc4/vc4_hdmi.c | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/drivers/gpu/drm/vc4/vc4_hdmi.c b/drivers/gpu/drm/vc4/vc4_hdmi.c
index 14628864487a..98fa306dbd24 100644
--- a/drivers/gpu/drm/vc4/vc4_hdmi.c
+++ b/drivers/gpu/drm/vc4/vc4_hdmi.c
@@ -1482,7 +1482,9 @@ static void vc4_hdmi_encoder_pre_crtc_configure(struct 
drm_encoder *encoder,
         * Additionally, the AXI clock needs to be at least 25% of
         * pixel clock, but HSM ends up being the limiting factor.
         */
-       hsm_rate = max_t(unsigned long, 120000000, (tmds_char_rate / 100) * 
101);
+       hsm_rate = max_t(unsigned long,
+                        HSM_MIN_CLOCK_FREQ,
+                        (tmds_char_rate / 100) * 101);
        ret = clk_set_min_rate(vc4_hdmi->hsm_clock, hsm_rate);
        if (ret) {
                DRM_ERROR("Failed to set HSM clock rate: %d\n", ret);

-- 
2.39.1

Reply via email to