@@ -13,7 +13,7 @@ pub const registers = @cImport({
1313// have 48 active virtual address bits. If/when this changes, we need to come
1414// back here and rework the code just a bit to handle the multiple possible
1515// sizes instead of assuming 48 bits at compile time.
16- pub const X86_VADDR_BITS : u8 = 48 ;
16+ pub const x86_vaddr_bits : u8 = 48 ;
1717
1818// The definition of an x86/64 "canonical" address depends on the number of
1919// total meaningful virtual address bits. The canonical address range is
@@ -25,19 +25,19 @@ pub const X86_VADDR_BITS: u8 = 48;
2525// Precompute the mask for the [N - 1, 63] range so we can easily test to see if
2626// an address might be a user-mode address ((addr & Mask) == 0), or might be a
2727// kernel address ((addr & Mask) == Mask).
28- pub const X86_CANONICAL_ADDRESS_MASK : u64 = ~ ((@as (u64 , 1 ) << (X86_VADDR_BITS - 1 )) - 1 );
28+ pub const x86_canonical_address_mask : u64 = ~ ((@as (u64 , 1 ) << (x86_vaddr_bits - 1 )) - 1 );
2929
30- pub const X86_8BYTE_MASK : u32 = 0xFFFFFFFF ;
30+ pub const x86_8byte_mask : u32 = 0xFFFFFFFF ;
3131
32- pub const arch_exception_context = struct {
32+ pub const ArchExceptionContext = struct {
3333 frame : * regs.iframe_t ,
3434 cr2 : u64 ,
3535 //user_synth_code: u32,
3636 //user_synth_data: u32,
3737 is_page_fault : bool ,
3838};
3939
40- pub const x86_64_context_switch_frame = struct {
40+ pub const X86_64ContextSwitchFrame = struct {
4141 r15 : u64 ,
4242 r14 : u64 ,
4343 r13 : u64 ,
@@ -47,15 +47,15 @@ pub const x86_64_context_switch_frame = struct {
4747 rip : u64 ,
4848};
4949
50- pub const IO_BITMAP_BITS : usize = 65536 ;
51- pub const IO_BITMAP_BYTES : usize = IO_BITMAP_BITS / 8 ;
52- pub const IO_BITMAP_LONGS : usize = IO_BITMAP_BITS / @sizeOf (usize );
50+ pub const io_bitmap_bits : usize = 65536 ;
51+ pub const io_bitmap_bytes : usize = io_bitmap_bits / 8 ;
52+ pub const io_bitmap_longs : usize = io_bitmap_bits / @sizeOf (usize );
5353
5454// Assignment of Interrupt Stack Table entries
55- pub const NUM_ASSIGNED_IST_ENTRIES : usize = 3 ;
56- pub const NMI_IST_INDEX : usize = 1 ;
57- pub const MCE_IST_INDEX : usize = 2 ;
58- pub const DBF_IST_INDEX : usize = 3 ;
55+ pub const num_assigned_ist_entries : usize = 3 ;
56+ pub const nmi_ist_index : usize = 1 ;
57+ pub const mce_ist_index : usize = 2 ;
58+ pub const dbf_ist_index : usize = 3 ;
5959
6060// x86-64 TSS structure
6161pub const Tss = extern struct {
@@ -77,7 +77,7 @@ pub const Tss = extern struct {
7777 rsvd5 : u16 align (1 ),
7878 iomap_base : u16 align (1 ),
7979
80- tss_bitmap : [IO_BITMAP_BYTES + 1 ]u8 align (1 ),
80+ tss_bitmap : [io_bitmap_bytes + 1 ]u8 align (1 ),
8181};
8282
8383pub inline fn clts () void {
@@ -273,105 +273,105 @@ inline fn getSegmentRegister(comptime reg: SegmentRegister) u16 {
273273}
274274
275275// Convenience wrapper functions for each register
276- pub inline fn set_ds (value : u16 ) void {
276+ pub inline fn setDs (value : u16 ) void {
277277 setSegmentRegister (.ds , value );
278278}
279279
280- pub inline fn get_ds () u16 {
280+ pub inline fn getDs () u16 {
281281 return getSegmentRegister (.ds );
282282}
283283
284- pub inline fn set_es (value : u16 ) void {
284+ pub inline fn setEs (value : u16 ) void {
285285 setSegmentRegister (.es , value );
286286}
287287
288- pub inline fn get_es () u16 {
288+ pub inline fn getEs () u16 {
289289 return getSegmentRegister (.es );
290290}
291291
292- pub inline fn set_fs (value : u16 ) void {
292+ pub inline fn setFs (value : u16 ) void {
293293 setSegmentRegister (.fs , value );
294294}
295295
296- pub inline fn get_fs () u16 {
296+ pub inline fn getFs () u16 {
297297 return getSegmentRegister (.fs );
298298}
299299
300- pub inline fn set_gs (value : u16 ) void {
300+ pub inline fn setGs (value : u16 ) void {
301301 setSegmentRegister (.gs , value );
302302}
303303
304- pub inline fn get_gs () u16 {
304+ pub inline fn getGs () u16 {
305305 return getSegmentRegister (.gs );
306306}
307307
308- pub inline fn read_msr ( msr_id : u32 ) u64 {
309- var msr_read_val_lo : u32 = undefined ;
310- var msr_read_val_hi : u32 = undefined ;
308+ pub inline fn readMsr ( msrId : u32 ) u64 {
309+ var msrReadValLo : u32 = undefined ;
310+ var msrReadValHi : u32 = undefined ;
311311 asm volatile ("rdmsr"
312- : [lo ] "={eax}" (msr_read_val_lo ),
313- [hi ] "={edx}" (msr_read_val_hi ),
314- : [msr ] "{ecx}" (msr_id ),
312+ : [lo ] "={eax}" (msrReadValLo ),
313+ [hi ] "={edx}" (msrReadValHi ),
314+ : [msr ] "{ecx}" (msrId ),
315315 );
316- return (@as (u64 , msr_read_val_hi ) << 32 ) | msr_read_val_lo ;
316+ return (@as (u64 , msrReadValHi ) << 32 ) | msrReadValLo ;
317317}
318318
319- pub inline fn read_msr32 ( msr_id : u32 ) u32 {
320- var msr_read_val : u32 = undefined ;
319+ pub inline fn readMsr32 ( msrId : u32 ) u32 {
320+ var msrReadVal : u32 = undefined ;
321321 asm volatile ("rdmsr"
322- : [val ] "={eax}" (msr_read_val ),
323- : [msr ] "{ecx}" (msr_id ),
322+ : [val ] "={eax}" (msrReadVal ),
323+ : [msr ] "{ecx}" (msrId ),
324324 : "edx"
325325 );
326- return msr_read_val ;
326+ return msrReadVal ;
327327}
328328
329329// Implemented in assembly.
330- pub extern "C" fn read_msr_safe ( msr_id : u32 , val : * u64 ) i32 ;
330+ pub extern "C" fn readMsrSafe ( msrId : u32 , val : * u64 ) i32 ;
331331
332- pub inline fn write_msr ( msr_id : u32 , msr_write_val : u64 ) void {
332+ pub inline fn writeMsr ( msrId : u32 , msrWriteVal : u64 ) void {
333333 asm volatile ("wrmsr"
334334 :
335- : [msr ] "{ecx}" (msr_id ),
336- [lo ] "{eax}" (@as (u32 , @truncate (msr_write_val ))),
337- [hi ] "{edx}" (@as (u32 , @truncate (msr_write_val >> 32 ))),
335+ : [msr ] "{ecx}" (msrId ),
336+ [lo ] "{eax}" (@as (u32 , @truncate (msrWriteVal ))),
337+ [hi ] "{edx}" (@as (u32 , @truncate (msrWriteVal >> 32 ))),
338338 );
339339}
340340
341- pub inline fn is_paging_enabled () bool {
341+ pub inline fn isPagingEnabled () bool {
342342 return (get_cr0 () & registers .X86_CR0_PG ) != 0 ;
343343}
344344
345- pub inline fn is_PAE_enabled () bool {
346- if (! is_paging_enabled ()) return false ;
345+ pub inline fn isPaeEnabled () bool {
346+ if (! isPagingEnabled ()) return false ;
347347 return (get_cr4 () & registers .X86_CR4_PAE ) != 0 ;
348348}
349349
350- pub inline fn read_gs_offset64 (comptime offset : usize ) u64 {
350+ pub inline fn readGsOffset64 (comptime offset : usize ) u64 {
351351 var ret : u64 = undefined ;
352352 asm volatile ("movq %%gs:" ++ std .fmt .comptimePrint ("{}" , .{offset }) ++ ", %[ret]"
353353 : [ret ] "=r" (ret ),
354354 );
355355 return ret ;
356356}
357357
358- pub inline fn write_gs_offset64 (comptime offset : usize , val : u64 ) void {
358+ pub inline fn writeGsOffset64 (comptime offset : usize , val : u64 ) void {
359359 asm volatile ("movq %[val], %%gs:" ++ std .fmt .comptimePrint ("{}" , .{offset })
360360 :
361361 : [val ] "ir" (val ),
362362 : "memory"
363363 );
364364}
365365
366- pub inline fn read_gs_offset32 (comptime offset : usize ) u32 {
366+ pub inline fn readGsOffset32 (comptime offset : usize ) u32 {
367367 var ret : u32 = undefined ;
368368 asm volatile ("movl %%gs:" ++ std .fmt .comptimePrint ("{}" , .{offset }) ++ ", %[ret]"
369369 : [ret ] "=r" (ret ),
370370 );
371371 return ret ;
372372}
373373
374- pub inline fn write_gs_offset32 (comptime offset : usize , val : u32 ) void {
374+ pub inline fn writeGsOffset32 (comptime offset : usize , val : u32 ) void {
375375 asm volatile ("movl %[val], %%gs:" ++ std .fmt .comptimePrint ("{}" , .{offset })
376376 :
377377 : [val ] "ir" (val ),
0 commit comments