diff mbox

xen/lockprof: Move .lockprofile.data into .rodata

Message ID 1456340856-3065-2-git-send-email-andrew.cooper3@citrix.com (mailing list archive)
State New, archived
Headers show

Commit Message

Andrew Cooper Feb. 24, 2016, 7:07 p.m. UTC
The entire contents of .lockprofile.data are unchanging pointers to
lock_profile structure in .data.  Annotate the type as such, and link the
section in .rodata.  As these are just pointers, 32byte alignment is
unnecessary.

Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com>
---
CC: Jan Beulich <JBeulich@suse.com>
CC: Ian Campbell <ian.campbell@citrix.com>
CC: Stefano Stabellini <stefano.stabellini@citrix.com>

v2:
 * New
v3:
 * Explicitly introduce POINTER_ALIGN to avoid forcing 8 byte alignment on
   arm32
---
 xen/arch/arm/xen.lds.S       | 15 ++++++++-------
 xen/arch/x86/xen.lds.S       | 14 +++++++-------
 xen/include/asm-arm/config.h |  1 +
 xen/include/asm-x86/config.h |  1 +
 xen/include/xen/spinlock.h   |  2 +-
 5 files changed, 18 insertions(+), 15 deletions(-)

Comments

Stefano Stabellini Feb. 25, 2016, 11:02 a.m. UTC | #1
On Wed, 24 Feb 2016, Andrew Cooper wrote:
> The entire contents of .lockprofile.data are unchanging pointers to
> lock_profile structure in .data.  Annotate the type as such, and link the
> section in .rodata.  As these are just pointers, 32byte alignment is
> unnecessary.
> 
> Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com>
> ---
> CC: Jan Beulich <JBeulich@suse.com>
> CC: Ian Campbell <ian.campbell@citrix.com>
> CC: Stefano Stabellini <stefano.stabellini@citrix.com>
> 
> v2:
>  * New
> v3:
>  * Explicitly introduce POINTER_ALIGN to avoid forcing 8 byte alignment on
>    arm32

Actually the way it is done in this patch, POINTER_ALIGN is 8 on arm64
and 4 on arm32 (see the definition of LONG_BYTEORDER in config.h). 

But I think it should still be OK.

Acked-by: Stefano Stabellini <stefano.stabellini@eu.citrix.com>


>  xen/arch/arm/xen.lds.S       | 15 ++++++++-------
>  xen/arch/x86/xen.lds.S       | 14 +++++++-------
>  xen/include/asm-arm/config.h |  1 +
>  xen/include/asm-x86/config.h |  1 +
>  xen/include/xen/spinlock.h   |  2 +-
>  5 files changed, 18 insertions(+), 15 deletions(-)
> 
> diff --git a/xen/arch/arm/xen.lds.S b/xen/arch/arm/xen.lds.S
> index f501a2f..1b5e516 100644
> --- a/xen/arch/arm/xen.lds.S
> +++ b/xen/arch/arm/xen.lds.S
> @@ -50,6 +50,14 @@ SECTIONS
>         __stop_bug_frames_2 = .;
>         *(.rodata)
>         *(.rodata.*)
> +
> +#ifdef LOCK_PROFILE
> +       . = ALIGN(POINTER_ALIGN);
> +       __lock_profile_start = .;
> +       *(.lockprofile.data)
> +       __lock_profile_end = .;
> +#endif
> +
>          _erodata = .;          /* End of read-only data */
>    } :text
>  
> @@ -83,13 +91,6 @@ SECTIONS
>         *(.data.rel.ro.*)
>    } :text
>  
> -#ifdef LOCK_PROFILE
> -  . = ALIGN(32);
> -  __lock_profile_start = .;
> -  .lockprofile.data : { *(.lockprofile.data) } :text
> -  __lock_profile_end = .;
> -#endif
> -
>    . = ALIGN(8);
>    .arch.info : {
>        _splatform = .;
> diff --git a/xen/arch/x86/xen.lds.S b/xen/arch/x86/xen.lds.S
> index 9fde1db..8390ec2 100644
> --- a/xen/arch/x86/xen.lds.S
> +++ b/xen/arch/x86/xen.lds.S
> @@ -65,6 +65,13 @@ SECTIONS
>  
>         *(.rodata)
>         *(.rodata.*)
> +
> +#ifdef LOCK_PROFILE
> +       . = ALIGN(POINTER_ALIGN);
> +       __lock_profile_start = .;
> +       *(.lockprofile.data)
> +       __lock_profile_end = .;
> +#endif
>    } :text
>  
>    . = ALIGN(SMP_CACHE_BYTES);
> @@ -97,13 +104,6 @@ SECTIONS
>         CONSTRUCTORS
>    } :text
>  
> -#ifdef LOCK_PROFILE
> -  . = ALIGN(32);
> -  __lock_profile_start = .;
> -  .lockprofile.data : { *(.lockprofile.data) } :text
> -  __lock_profile_end = .;
> -#endif
> -
>    . = ALIGN(PAGE_SIZE);             /* Init code and data */
>    __init_begin = .;
>    .init.text : {
> diff --git a/xen/include/asm-arm/config.h b/xen/include/asm-arm/config.h
> index c3a2c30..c0ad469 100644
> --- a/xen/include/asm-arm/config.h
> +++ b/xen/include/asm-arm/config.h
> @@ -15,6 +15,7 @@
>  
>  #define BYTES_PER_LONG (1 << LONG_BYTEORDER)
>  #define BITS_PER_LONG (BYTES_PER_LONG << 3)
> +#define POINTER_ALIGN BYTES_PER_LONG
>  
>  /* xen_ulong_t is always 64 bits */
>  #define BITS_PER_XEN_ULONG 64
> diff --git a/xen/include/asm-x86/config.h b/xen/include/asm-x86/config.h
> index 07f3c1f..7291b59 100644
> --- a/xen/include/asm-x86/config.h
> +++ b/xen/include/asm-x86/config.h
> @@ -13,6 +13,7 @@
>  #define BYTES_PER_LONG (1 << LONG_BYTEORDER)
>  #define BITS_PER_LONG (BYTES_PER_LONG << 3)
>  #define BITS_PER_BYTE 8
> +#define POINTER_ALIGN BYTES_PER_LONG
>  
>  #define BITS_PER_XEN_ULONG BITS_PER_LONG
>  
> diff --git a/xen/include/xen/spinlock.h b/xen/include/xen/spinlock.h
> index 77ab7d5..88b53f9 100644
> --- a/xen/include/xen/spinlock.h
> +++ b/xen/include/xen/spinlock.h
> @@ -79,7 +79,7 @@ struct lock_profile_qhead {
>  
>  #define _LOCK_PROFILE(name) { 0, #name, &name, 0, 0, 0, 0, 0 }
>  #define _LOCK_PROFILE_PTR(name)                                               \
> -    static struct lock_profile *__lock_profile_##name                         \
> +    static struct lock_profile * const __lock_profile_##name                  \
>      __used_section(".lockprofile.data") =                                     \
>      &__lock_profile_data_##name
>  #define _SPIN_LOCK_UNLOCKED(x) { { 0 }, SPINLOCK_NO_CPU, 0, _LOCK_DEBUG, x }
> -- 
> 2.1.4
>
Stefano Stabellini Feb. 25, 2016, 11:12 a.m. UTC | #2
On Thu, 25 Feb 2016, Stefano Stabellini wrote:
> On Wed, 24 Feb 2016, Andrew Cooper wrote:
> > The entire contents of .lockprofile.data are unchanging pointers to
> > lock_profile structure in .data.  Annotate the type as such, and link the
> > section in .rodata.  As these are just pointers, 32byte alignment is
> > unnecessary.
> > 
> > Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com>
> > ---
> > CC: Jan Beulich <JBeulich@suse.com>
> > CC: Ian Campbell <ian.campbell@citrix.com>
> > CC: Stefano Stabellini <stefano.stabellini@citrix.com>
> > 
> > v2:
> >  * New
> > v3:
> >  * Explicitly introduce POINTER_ALIGN to avoid forcing 8 byte alignment on
> >    arm32
> 
> Actually the way it is done in this patch, POINTER_ALIGN is 8 on arm64
> and 4 on arm32 (see the definition of LONG_BYTEORDER in config.h). 

I misread the sentence in the commit message, everything good then :-)


> But I think it should still be OK.
> 
> Acked-by: Stefano Stabellini <stefano.stabellini@eu.citrix.com>
> 
> 
> >  xen/arch/arm/xen.lds.S       | 15 ++++++++-------
> >  xen/arch/x86/xen.lds.S       | 14 +++++++-------
> >  xen/include/asm-arm/config.h |  1 +
> >  xen/include/asm-x86/config.h |  1 +
> >  xen/include/xen/spinlock.h   |  2 +-
> >  5 files changed, 18 insertions(+), 15 deletions(-)
> > 
> > diff --git a/xen/arch/arm/xen.lds.S b/xen/arch/arm/xen.lds.S
> > index f501a2f..1b5e516 100644
> > --- a/xen/arch/arm/xen.lds.S
> > +++ b/xen/arch/arm/xen.lds.S
> > @@ -50,6 +50,14 @@ SECTIONS
> >         __stop_bug_frames_2 = .;
> >         *(.rodata)
> >         *(.rodata.*)
> > +
> > +#ifdef LOCK_PROFILE
> > +       . = ALIGN(POINTER_ALIGN);
> > +       __lock_profile_start = .;
> > +       *(.lockprofile.data)
> > +       __lock_profile_end = .;
> > +#endif
> > +
> >          _erodata = .;          /* End of read-only data */
> >    } :text
> >  
> > @@ -83,13 +91,6 @@ SECTIONS
> >         *(.data.rel.ro.*)
> >    } :text
> >  
> > -#ifdef LOCK_PROFILE
> > -  . = ALIGN(32);
> > -  __lock_profile_start = .;
> > -  .lockprofile.data : { *(.lockprofile.data) } :text
> > -  __lock_profile_end = .;
> > -#endif
> > -
> >    . = ALIGN(8);
> >    .arch.info : {
> >        _splatform = .;
> > diff --git a/xen/arch/x86/xen.lds.S b/xen/arch/x86/xen.lds.S
> > index 9fde1db..8390ec2 100644
> > --- a/xen/arch/x86/xen.lds.S
> > +++ b/xen/arch/x86/xen.lds.S
> > @@ -65,6 +65,13 @@ SECTIONS
> >  
> >         *(.rodata)
> >         *(.rodata.*)
> > +
> > +#ifdef LOCK_PROFILE
> > +       . = ALIGN(POINTER_ALIGN);
> > +       __lock_profile_start = .;
> > +       *(.lockprofile.data)
> > +       __lock_profile_end = .;
> > +#endif
> >    } :text
> >  
> >    . = ALIGN(SMP_CACHE_BYTES);
> > @@ -97,13 +104,6 @@ SECTIONS
> >         CONSTRUCTORS
> >    } :text
> >  
> > -#ifdef LOCK_PROFILE
> > -  . = ALIGN(32);
> > -  __lock_profile_start = .;
> > -  .lockprofile.data : { *(.lockprofile.data) } :text
> > -  __lock_profile_end = .;
> > -#endif
> > -
> >    . = ALIGN(PAGE_SIZE);             /* Init code and data */
> >    __init_begin = .;
> >    .init.text : {
> > diff --git a/xen/include/asm-arm/config.h b/xen/include/asm-arm/config.h
> > index c3a2c30..c0ad469 100644
> > --- a/xen/include/asm-arm/config.h
> > +++ b/xen/include/asm-arm/config.h
> > @@ -15,6 +15,7 @@
> >  
> >  #define BYTES_PER_LONG (1 << LONG_BYTEORDER)
> >  #define BITS_PER_LONG (BYTES_PER_LONG << 3)
> > +#define POINTER_ALIGN BYTES_PER_LONG
> >  
> >  /* xen_ulong_t is always 64 bits */
> >  #define BITS_PER_XEN_ULONG 64
> > diff --git a/xen/include/asm-x86/config.h b/xen/include/asm-x86/config.h
> > index 07f3c1f..7291b59 100644
> > --- a/xen/include/asm-x86/config.h
> > +++ b/xen/include/asm-x86/config.h
> > @@ -13,6 +13,7 @@
> >  #define BYTES_PER_LONG (1 << LONG_BYTEORDER)
> >  #define BITS_PER_LONG (BYTES_PER_LONG << 3)
> >  #define BITS_PER_BYTE 8
> > +#define POINTER_ALIGN BYTES_PER_LONG
> >  
> >  #define BITS_PER_XEN_ULONG BITS_PER_LONG
> >  
> > diff --git a/xen/include/xen/spinlock.h b/xen/include/xen/spinlock.h
> > index 77ab7d5..88b53f9 100644
> > --- a/xen/include/xen/spinlock.h
> > +++ b/xen/include/xen/spinlock.h
> > @@ -79,7 +79,7 @@ struct lock_profile_qhead {
> >  
> >  #define _LOCK_PROFILE(name) { 0, #name, &name, 0, 0, 0, 0, 0 }
> >  #define _LOCK_PROFILE_PTR(name)                                               \
> > -    static struct lock_profile *__lock_profile_##name                         \
> > +    static struct lock_profile * const __lock_profile_##name                  \
> >      __used_section(".lockprofile.data") =                                     \
> >      &__lock_profile_data_##name
> >  #define _SPIN_LOCK_UNLOCKED(x) { { 0 }, SPINLOCK_NO_CPU, 0, _LOCK_DEBUG, x }
> > -- 
> > 2.1.4
> > 
>
diff mbox

Patch

diff --git a/xen/arch/arm/xen.lds.S b/xen/arch/arm/xen.lds.S
index f501a2f..1b5e516 100644
--- a/xen/arch/arm/xen.lds.S
+++ b/xen/arch/arm/xen.lds.S
@@ -50,6 +50,14 @@  SECTIONS
        __stop_bug_frames_2 = .;
        *(.rodata)
        *(.rodata.*)
+
+#ifdef LOCK_PROFILE
+       . = ALIGN(POINTER_ALIGN);
+       __lock_profile_start = .;
+       *(.lockprofile.data)
+       __lock_profile_end = .;
+#endif
+
         _erodata = .;          /* End of read-only data */
   } :text
 
@@ -83,13 +91,6 @@  SECTIONS
        *(.data.rel.ro.*)
   } :text
 
-#ifdef LOCK_PROFILE
-  . = ALIGN(32);
-  __lock_profile_start = .;
-  .lockprofile.data : { *(.lockprofile.data) } :text
-  __lock_profile_end = .;
-#endif
-
   . = ALIGN(8);
   .arch.info : {
       _splatform = .;
diff --git a/xen/arch/x86/xen.lds.S b/xen/arch/x86/xen.lds.S
index 9fde1db..8390ec2 100644
--- a/xen/arch/x86/xen.lds.S
+++ b/xen/arch/x86/xen.lds.S
@@ -65,6 +65,13 @@  SECTIONS
 
        *(.rodata)
        *(.rodata.*)
+
+#ifdef LOCK_PROFILE
+       . = ALIGN(POINTER_ALIGN);
+       __lock_profile_start = .;
+       *(.lockprofile.data)
+       __lock_profile_end = .;
+#endif
   } :text
 
   . = ALIGN(SMP_CACHE_BYTES);
@@ -97,13 +104,6 @@  SECTIONS
        CONSTRUCTORS
   } :text
 
-#ifdef LOCK_PROFILE
-  . = ALIGN(32);
-  __lock_profile_start = .;
-  .lockprofile.data : { *(.lockprofile.data) } :text
-  __lock_profile_end = .;
-#endif
-
   . = ALIGN(PAGE_SIZE);             /* Init code and data */
   __init_begin = .;
   .init.text : {
diff --git a/xen/include/asm-arm/config.h b/xen/include/asm-arm/config.h
index c3a2c30..c0ad469 100644
--- a/xen/include/asm-arm/config.h
+++ b/xen/include/asm-arm/config.h
@@ -15,6 +15,7 @@ 
 
 #define BYTES_PER_LONG (1 << LONG_BYTEORDER)
 #define BITS_PER_LONG (BYTES_PER_LONG << 3)
+#define POINTER_ALIGN BYTES_PER_LONG
 
 /* xen_ulong_t is always 64 bits */
 #define BITS_PER_XEN_ULONG 64
diff --git a/xen/include/asm-x86/config.h b/xen/include/asm-x86/config.h
index 07f3c1f..7291b59 100644
--- a/xen/include/asm-x86/config.h
+++ b/xen/include/asm-x86/config.h
@@ -13,6 +13,7 @@ 
 #define BYTES_PER_LONG (1 << LONG_BYTEORDER)
 #define BITS_PER_LONG (BYTES_PER_LONG << 3)
 #define BITS_PER_BYTE 8
+#define POINTER_ALIGN BYTES_PER_LONG
 
 #define BITS_PER_XEN_ULONG BITS_PER_LONG
 
diff --git a/xen/include/xen/spinlock.h b/xen/include/xen/spinlock.h
index 77ab7d5..88b53f9 100644
--- a/xen/include/xen/spinlock.h
+++ b/xen/include/xen/spinlock.h
@@ -79,7 +79,7 @@  struct lock_profile_qhead {
 
 #define _LOCK_PROFILE(name) { 0, #name, &name, 0, 0, 0, 0, 0 }
 #define _LOCK_PROFILE_PTR(name)                                               \
-    static struct lock_profile *__lock_profile_##name                         \
+    static struct lock_profile * const __lock_profile_##name                  \
     __used_section(".lockprofile.data") =                                     \
     &__lock_profile_data_##name
 #define _SPIN_LOCK_UNLOCKED(x) { { 0 }, SPINLOCK_NO_CPU, 0, _LOCK_DEBUG, x }