core/gpu: Ignore top bit when reading colors from the palette
According to CowBite, the top bit is supposed to be ignored. fixes #99 Former-commit-id: 28a43311726f9ce5ef26c77687fedecc33b286e0 Former-commit-id: df0b025ed6d51e36bcffd4c91250858167b42f40
This commit is contained in:
parent
f84e425d24
commit
91515b6b10
|
@ -340,14 +340,14 @@ impl Gpu {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn get_palette_color(&self, index: u32, palette_index: u32, offset: u32) -> Rgb15 {
|
pub fn get_palette_color(&self, index: u32, palette_bank: u32, offset: u32) -> Rgb15 {
|
||||||
if index == 0 || (palette_index != 0 && index % 16 == 0) {
|
if index == 0 || (palette_bank != 0 && index % 16 == 0) {
|
||||||
return Rgb15::TRANSPARENT;
|
return Rgb15::TRANSPARENT;
|
||||||
}
|
}
|
||||||
Rgb15(
|
let value = self.palette_ram.read_16(offset + 2 * index + 0x20 * palette_bank);
|
||||||
self.palette_ram
|
|
||||||
.read_16(offset + 2 * index + 0x20 * palette_index),
|
// top bit is ignored
|
||||||
)
|
Rgb15(value & 0x7FFF)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
Reference in a new issue