Increase optimization, fix clobbers in vesad

This commit is contained in:
Jeremy Soller 2016-09-21 16:46:16 -06:00
parent 0540726890
commit afe7a99700
2 changed files with 8 additions and 12 deletions

View file

@ -12,9 +12,9 @@ KCARGOFLAGS=--target $(KTARGET).json -- -C soft-float
TARGET=$(ARCH)-unknown-redox TARGET=$(ARCH)-unknown-redox
BUILD=build/userspace BUILD=build/userspace
RUSTC=./rustc.sh RUSTC=./rustc.sh
RUSTCFLAGS=--target $(TARGET).json -C soft-float --cfg redox RUSTCFLAGS=--target $(TARGET).json -C opt-level=2 -C soft-float --cfg redox
CARGO=RUSTC="$(RUSTC)" cargo CARGO=RUSTC="$(RUSTC)" cargo
CARGOFLAGS=--target $(TARGET).json -- -C soft-float --cfg redox CARGOFLAGS=--target $(TARGET).json -- -C opt-level=2 -C soft-float --cfg redox
# Default targets # Default targets
.PHONY: all clean qemu bochs FORCE .PHONY: all clean qemu bochs FORCE
@ -43,7 +43,7 @@ ifeq ($(ARCH),arm)
QEMUFLAGS+=-cpu arm1176 -machine integratorcp QEMUFLAGS+=-cpu arm1176 -machine integratorcp
QEMUFLAGS+=-nographic QEMUFLAGS+=-nographic
build/%.list: build/% %.list: %
$(ARCH)-none-eabi-objdump -C -D $< > $@ $(ARCH)-none-eabi-objdump -C -D $< > $@
$(KBUILD)/harddrive.bin: $(KBUILD)/kernel $(KBUILD)/harddrive.bin: $(KBUILD)/kernel
@ -68,7 +68,7 @@ else
LD=$(ARCH)-elf-ld LD=$(ARCH)-elf-ld
endif endif
build/%.list: build/% %.list: %
objdump -C -M intel -D $< > $@ objdump -C -M intel -D $< > $@
$(KBUILD)/harddrive.bin: $(KBUILD)/kernel bootloader/$(ARCH)/** $(KBUILD)/harddrive.bin: $(KBUILD)/kernel bootloader/$(ARCH)/**

View file

@ -1,5 +1,4 @@
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
#[allow(unused_assignments)]
#[inline(always)] #[inline(always)]
#[cold] #[cold]
pub unsafe fn fast_copy(dst: *mut u8, src: *const u8, len: usize) { pub unsafe fn fast_copy(dst: *mut u8, src: *const u8, len: usize) {
@ -7,12 +6,11 @@ pub unsafe fn fast_copy(dst: *mut u8, src: *const u8, len: usize) {
rep movsb" rep movsb"
: :
: "{rdi}"(dst as usize), "{rsi}"(src as usize), "{rcx}"(len) : "{rdi}"(dst as usize), "{rsi}"(src as usize), "{rcx}"(len)
: "cc", "memory" : "cc", "memory", "rdi", "rsi", "rcx"
: "intel", "volatile"); : "intel", "volatile");
} }
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
#[allow(unused_assignments)]
#[inline(always)] #[inline(always)]
#[cold] #[cold]
pub unsafe fn fast_copy64(dst: *mut u64, src: *const u64, len: usize) { pub unsafe fn fast_copy64(dst: *mut u64, src: *const u64, len: usize) {
@ -20,12 +18,11 @@ pub unsafe fn fast_copy64(dst: *mut u64, src: *const u64, len: usize) {
rep movsq" rep movsq"
: :
: "{rdi}"(dst as usize), "{rsi}"(src as usize), "{rcx}"(len) : "{rdi}"(dst as usize), "{rsi}"(src as usize), "{rcx}"(len)
: "cc", "memory" : "cc", "memory", "rdi", "rsi", "rcx"
: "intel", "volatile"); : "intel", "volatile");
} }
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
#[allow(unused_assignments)]
#[inline(always)] #[inline(always)]
#[cold] #[cold]
pub unsafe fn fast_set32(dst: *mut u32, src: u32, len: usize) { pub unsafe fn fast_set32(dst: *mut u32, src: u32, len: usize) {
@ -33,12 +30,11 @@ pub unsafe fn fast_set32(dst: *mut u32, src: u32, len: usize) {
rep stosd" rep stosd"
: :
: "{rdi}"(dst as usize), "{eax}"(src), "{rcx}"(len) : "{rdi}"(dst as usize), "{eax}"(src), "{rcx}"(len)
: "cc", "memory" : "cc", "memory", "rdi", "rcx"
: "intel", "volatile"); : "intel", "volatile");
} }
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
#[allow(unused_assignments)]
#[inline(always)] #[inline(always)]
#[cold] #[cold]
pub unsafe fn fast_set64(dst: *mut u64, src: u64, len: usize) { pub unsafe fn fast_set64(dst: *mut u64, src: u64, len: usize) {
@ -46,6 +42,6 @@ pub unsafe fn fast_set64(dst: *mut u64, src: u64, len: usize) {
rep stosq" rep stosq"
: :
: "{rdi}"(dst as usize), "{rax}"(src), "{rcx}"(len) : "{rdi}"(dst as usize), "{rax}"(src), "{rcx}"(len)
: "cc", "memory" : "cc", "memory", "rdi", "rcx"
: "intel", "volatile"); : "intel", "volatile");
} }