Use .global consistently rather than sometimes .globl.

Change-Id: I60395b78d11e79097ba2f778e41bdbcef8facb8d
diff --git a/src/arch/aarch64/hftest/cpu_entry.S b/src/arch/aarch64/hftest/cpu_entry.S
index 6461377..702e84b 100644
--- a/src/arch/aarch64/hftest/cpu_entry.S
+++ b/src/arch/aarch64/hftest/cpu_entry.S
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-.globl vm_cpu_entry_raw
+.global vm_cpu_entry_raw
 vm_cpu_entry_raw:
 	/* Initialise stack from the cpu_start_state struct. */
 	ldr x1, [x0]
diff --git a/src/arch/aarch64/hypervisor/cpu_entry.S b/src/arch/aarch64/hypervisor/cpu_entry.S
index 1b90bce..301a1a1 100644
--- a/src/arch/aarch64/hypervisor/cpu_entry.S
+++ b/src/arch/aarch64/hypervisor/cpu_entry.S
@@ -17,7 +17,7 @@
 #include "offsets.h"
 
 .section .text.cpu_entry, "ax"
-.globl cpu_entry
+.global cpu_entry
 cpu_entry:
 	/* Disable interrupts. */
 	msr DAIFSet, #0xf
diff --git a/src/arch/aarch64/hypervisor/exceptions.S b/src/arch/aarch64/hypervisor/exceptions.S
index 6524540..900396a 100644
--- a/src/arch/aarch64/hypervisor/exceptions.S
+++ b/src/arch/aarch64/hypervisor/exceptions.S
@@ -360,7 +360,7 @@
 	mov x0, x19
 
 	/* Intentional fallthrough. */
-.globl vcpu_restore_all_and_run
+.global vcpu_restore_all_and_run
 vcpu_restore_all_and_run:
 	/* Update pointer to current vcpu. */
 	msr tpidr_el2, x0
diff --git a/src/arch/aarch64/smc.S b/src/arch/aarch64/smc.S
index 6710ea6..f49955d 100644
--- a/src/arch/aarch64/smc.S
+++ b/src/arch/aarch64/smc.S
@@ -15,7 +15,7 @@
  */
 
 .section .text.smc, "ax"
-.globl smc
+.global smc
 smc:
 	smc #0
 	ret