From: Anna-Maria Behnsen <anna-ma...@linutronix.de>

To support multiple PTP clocks, the VDSO data structure needs to be
reworked. All clock specific data will end up in struct vdso_clock and in
struct vdso_time_data there will be array of it. By now, vdso_clock is
simply a define which maps vdso_clock to vdso_time_data.

To prepare for the rework of the data structures, replace the struct
vdso_time_data pointer with struct vdso_clock pointer whenever applicable.

No functional change.

Signed-off-by: Anna-Maria Behnsen <anna-ma...@linutronix.de>
Signed-off-by: Nam Cao <nam...@linutronix.de>
Signed-off-by: Thomas Weißschuh <thomas.weisssc...@linutronix.de>
---
 arch/x86/include/asm/vdso/gettimeofday.h | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/arch/x86/include/asm/vdso/gettimeofday.h 
b/arch/x86/include/asm/vdso/gettimeofday.h
index 
edec796832e08b73d6d58bda6408957048f4e80e..9e52cc46e1da99114312d85b34ae52e539dac9b6
 100644
--- a/arch/x86/include/asm/vdso/gettimeofday.h
+++ b/arch/x86/include/asm/vdso/gettimeofday.h
@@ -261,7 +261,7 @@ static inline u64 __arch_get_hw_counter(s32 clock_mode,
        return U64_MAX;
 }
 
-static inline bool arch_vdso_clocksource_ok(const struct vdso_time_data *vd)
+static inline bool arch_vdso_clocksource_ok(const struct vdso_clock *vc)
 {
        return true;
 }
@@ -300,34 +300,34 @@ static inline bool arch_vdso_cycles_ok(u64 cycles)
  * declares everything with the MSB/Sign-bit set as invalid. Therefore the
  * effective mask is S64_MAX.
  */
-static __always_inline u64 vdso_calc_ns(const struct vdso_time_data *vd, u64 
cycles, u64 base)
+static __always_inline u64 vdso_calc_ns(const struct vdso_clock *vc, u64 
cycles, u64 base)
 {
-       u64 delta = cycles - vd->cycle_last;
+       u64 delta = cycles - vc->cycle_last;
 
        /*
         * Negative motion and deltas which can cause multiplication
         * overflow require special treatment. This check covers both as
-        * negative motion is guaranteed to be greater than @vd::max_cycles
+        * negative motion is guaranteed to be greater than @vc::max_cycles
         * due to unsigned comparison.
         *
         * Due to the MSB/Sign-bit being used as invalid marker (see
         * arch_vdso_cycles_ok() above), the effective mask is S64_MAX, but that
         * case is also unlikely and will also take the unlikely path here.
         */
-       if (unlikely(delta > vd->max_cycles)) {
+       if (unlikely(delta > vc->max_cycles)) {
                /*
                 * Due to the above mentioned TSC wobbles, filter out
                 * negative motion.  Per the above masking, the effective
                 * sign bit is now bit 62.
                 */
                if (delta & (1ULL << 62))
-                       return base >> vd->shift;
+                       return base >> vc->shift;
 
                /* Handle multiplication overflow gracefully */
-               return mul_u64_u32_add_u64_shr(delta & S64_MAX, vd->mult, base, 
vd->shift);
+               return mul_u64_u32_add_u64_shr(delta & S64_MAX, vc->mult, base, 
vc->shift);
        }
 
-       return ((delta * vd->mult) + base) >> vd->shift;
+       return ((delta * vc->mult) + base) >> vc->shift;
 }
 #define vdso_calc_ns vdso_calc_ns
 

-- 
2.48.1


Reply via email to