process. The PPC support is still stubbed, with commented out x86 code as guide line for an implementor. Signed-off-by: Patrick Georgi <patrick.georgi@coresystems.de> Acked-by: Stefan Reinauer <stepan@coresystems.de> git-svn-id: svn://svn.coreboot.org/coreboot/trunk@4293 2b7e53f0-3cfb-0310-b3e9-8179ed1497e1
		
			
				
	
	
		
			88 lines
		
	
	
		
			2.4 KiB
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
			
		
		
	
	
			88 lines
		
	
	
		
			2.4 KiB
		
	
	
	
		
			ArmAsm
		
	
	
	
	
	
| /*
 | |
|  * This file is part of the libpayload project.
 | |
|  *
 | |
|  * Copyright (C) 2008 Advanced Micro Devices, Inc.
 | |
|  *
 | |
|  * Redistribution and use in source and binary forms, with or without
 | |
|  * modification, are permitted provided that the following conditions
 | |
|  * are met:
 | |
|  * 1. Redistributions of source code must retain the above copyright
 | |
|  *    notice, this list of conditions and the following disclaimer.
 | |
|  * 2. Redistributions in binary form must reproduce the above copyright
 | |
|  *    notice, this list of conditions and the following disclaimer in the
 | |
|  *    documentation and/or other materials provided with the distribution.
 | |
|  * 3. The name of the author may not be used to endorse or promote products
 | |
|  *    derived from this software without specific prior written permission.
 | |
|  *
 | |
|  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
 | |
|  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 | |
|  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 | |
|  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
 | |
|  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
 | |
|  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
 | |
|  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 | |
|  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
 | |
|  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
 | |
|  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
 | |
|  * SUCH DAMAGE.
 | |
|  */
 | |
| 
 | |
| 	.globl _entry, _leave
 | |
| 	.text
 | |
| 	.align 4
 | |
| 
 | |
| /*
 | |
|  * Our entry point - assume that the CPU is in 32 bit protected mode and
 | |
|  * all segments are in a flat model. That's our operating mode, so we won't
 | |
|  * change anything.
 | |
|  */
 | |
| _entry:
 | |
| #if 0
 | |
| 	call _init
 | |
| 
 | |
| 	/* We're back - go back to the bootloader. */
 | |
| 	ret
 | |
| 	.align 4
 | |
| 
 | |
| 
 | |
| /*
 | |
|  * This function saves off the previous stack and switches us to our
 | |
|  * own execution environment.
 | |
|  */
 | |
| _init:
 | |
| 	/* No interrupts, please. */
 | |
| 	cli
 | |
| 
 | |
| 	/* Store current stack pointer. */
 | |
| 	movl %esp, %esi
 | |
| 
 | |
| 	/* Store EAX and EBX */
 | |
| 
 | |
| 	movl %eax,loader_eax
 | |
| 	movl %ebx,loader_ebx
 | |
| 
 | |
| 	/* Setup new stack. */
 | |
| 	movl $_stack, %ebx
 | |
| 
 | |
| 	movl %ebx, %esp
 | |
| 
 | |
| 	/* Save old stack pointer. */
 | |
| 	pushl %esi
 | |
| 
 | |
| 	/* Let's rock. */
 | |
| 	call start_main
 | |
| 
 | |
| #endif
 | |
| 	/* %eax has the return value - pass it on unmolested */
 | |
| _leave:
 | |
| #if 0
 | |
| 	/* Get old stack pointer. */
 | |
| 	popl %ebx
 | |
| 
 | |
| 	/* Restore old stack. */
 | |
| 	movl %ebx, %esp
 | |
| 
 | |
| 	/* Return to the original context. */
 | |
| 	ret
 | |
| #endif
 |