summaryrefslogtreecommitdiffstats
path: root/arch/kvx/cpu/barebox.lds.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/kvx/cpu/barebox.lds.S')
-rw-r--r--arch/kvx/cpu/barebox.lds.S95
1 files changed, 95 insertions, 0 deletions
diff --git a/arch/kvx/cpu/barebox.lds.S b/arch/kvx/cpu/barebox.lds.S
new file mode 100644
index 0000000000..8d1944afbf
--- /dev/null
+++ b/arch/kvx/cpu/barebox.lds.S
@@ -0,0 +1,95 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * Copyright (C) 2019 Kalray Inc.
+ */
+
+#include <config.h>
+#include <asm/common.h>
+#include <asm/sys_arch.h>
+#include <asm-generic/barebox.lds.h>
+
+OUTPUT_FORMAT("elf64-kvx")
+OUTPUT_ARCH("kvx:kv3-1:64")
+
+#define DTB_DEFAULT_SIZE (24 * 1024)
+
+SECTIONS
+{
+ . = CONFIG_ARCH_TEXT_BASE;
+ .text ALIGN(4) : {
+ *(.startup);
+ _stext = .;
+ *(.text)
+ }
+
+ /* Exception vector must be aligned on a huge frontier */
+ .exception ALIGN(EXCEPTION_ALIGNMENT) :
+ {
+ _exception_start = ABSOLUTE(.);
+ /**
+ * First handler is at _exception_start + EXCEPTION_STRIDE
+ * In order to force getting to the next stride, add at
+ * least 1 byte of data. The next ALIGN will then be
+ * forced to get to the next stride.
+ */
+ . += 1;
+ . = ALIGN(EXCEPTION_STRIDE);
+
+ /* Entry for traps */
+ KEEP(*(.exception.trap));
+ . += 1;
+
+ /* Entry for interrupts */
+ . = ALIGN(EXCEPTION_STRIDE);
+ KEEP(*(.exception.interrupt));
+ . += 1;
+
+ /* Entry for syscall */
+ . = ALIGN(EXCEPTION_STRIDE);
+ KEEP(*(.exception.syscall));
+ }
+
+ .rodata ALIGN(8) : {
+ *(.rodata*)
+ . = ALIGN(8);
+ RO_DATA_SECTION
+ }
+
+ .dtb ALIGN(16):
+ {
+ __dtb_start = .;
+ . += DTB_DEFAULT_SIZE;
+ __dtb_end = .;
+ }
+
+ _etext = .; /* End of text and rodata section */
+
+ .data ALIGN(4): {
+ sdata = .;
+ _sdata = .;
+ *(.data)
+ . = ALIGN(8);
+ __stack_end = .;
+ . += CONFIG_STACK_SIZE;
+ __stack_start = .;
+ }
+
+ .gdb_page ALIGN(4 * 1024) :
+ {
+ _debug_start = ABSOLUTE(.);
+ _debug_phy_start = ABSOLUTE(.);
+ . += 4 * 1024;
+ }
+ __debug_phy_end = ABSOLUTE(.);
+ _edata = .;
+
+ /* We use store quad for bss init so align on 16 bytes */
+ .bss ALIGN(16):
+ {
+ __bss_start = .;
+ *(.bss)
+ . = ALIGN(16);
+ __bss_stop = .;
+ }
+ __end = .;
+}