arch/x86: always use _start as entry symbol for all stages
Instead of keeping track of all the combinations of entry points depending on the stage and other options just use _start. That way, there's no need to update the arch/header.ld for complicated cases as _start is always the entry point for a stage. Change-Id: I7795a5ee1caba92ab533bdb8c3ad80294901a48b Signed-off-by: Aaron Durbin <adurbin@chromium.org> Reviewed-on: https://review.coreboot.org/13882 Tested-by: build bot (Jenkins) Reviewed-by: Andrey Petrov <andrey.petrov@intel.com>
This commit is contained in:
parent
4330a9c8e5
commit
8198c678f7
|
@ -20,27 +20,4 @@ PHDRS
|
||||||
to_load PT_LOAD;
|
to_load PT_LOAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* For CONFIG_SEPARATE_VERSTAGE romstage doesn't have the cache-as-ram setup.
|
|
||||||
* It only contains the teardown code. The verstage has the cache-as-ram setup
|
|
||||||
* code. Therefore, it needs the protected_start symbol as its entry point.
|
|
||||||
* The romstage entry will be named _start for consistency, but it's likely
|
|
||||||
* to be implemented in the chipset code in order to control the logic flow.
|
|
||||||
*/
|
|
||||||
#if IS_ENABLED(CONFIG_SEPARATE_VERSTAGE)
|
|
||||||
#if ENV_RAMSTAGE || ENV_RMODULE || ENV_ROMSTAGE
|
|
||||||
ENTRY(_start)
|
|
||||||
#elif ENV_VERSTAGE
|
|
||||||
ENTRY(protected_start)
|
|
||||||
#endif
|
|
||||||
#else
|
|
||||||
#if ENV_RAMSTAGE || ENV_RMODULE
|
|
||||||
ENTRY(_start)
|
|
||||||
#elif ENV_ROMSTAGE
|
|
||||||
ENTRY(protected_start)
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if IS_ENABLED(CONFIG_C_ENVIRONMENT_BOOTBLOCK) && ENV_BOOTBLOCK
|
|
||||||
ENTRY(_start)
|
ENTRY(_start)
|
||||||
#endif
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
#include <arch/rom_segs.h>
|
#include <arch/rom_segs.h>
|
||||||
#include <cpu/x86/post_code.h>
|
#include <cpu/x86/post_code.h>
|
||||||
|
#include <rules.h>
|
||||||
|
|
||||||
.code32
|
.code32
|
||||||
|
|
||||||
|
@ -44,10 +45,17 @@ gdt_end:
|
||||||
*
|
*
|
||||||
* NOTE aligned to 4 so that we are sure that the prefetch
|
* NOTE aligned to 4 so that we are sure that the prefetch
|
||||||
* cache will be reloaded.
|
* cache will be reloaded.
|
||||||
|
*
|
||||||
|
* In the bootblock there is already a ljmp to __protected_start and
|
||||||
|
* the reset vector jumps to symbol _start16bit in entry16.inc from
|
||||||
|
* the reset vectors's symbol which is _start. Therefore, don't
|
||||||
|
* expose the _start symbol for bootblock.
|
||||||
*/
|
*/
|
||||||
.align 4
|
.align 4
|
||||||
.globl protected_start
|
#if !ENV_BOOTBLOCK
|
||||||
protected_start:
|
.globl _start
|
||||||
|
_start:
|
||||||
|
#endif
|
||||||
|
|
||||||
lgdt %cs:gdtptr
|
lgdt %cs:gdtptr
|
||||||
ljmp $ROM_CODE_SEG, $__protected_start
|
ljmp $ROM_CODE_SEG, $__protected_start
|
||||||
|
|
Loading…
Reference in New Issue