this repo has no description

revert k prefix naming #2

merged opened by jonaskruckenberg.de targeting main from push-mxypkzmpquky
Labels

None yet.

assignee

None yet.

Participants 1
AT URI
at://did:plc:wur5mmsnhlocanyqtus3oex5/sh.tangled.repo.pull/3mgottuu24v22
+198 -198
Diff #2
+1 -1
build/defs.bzl
··· 7 7 8 8 Args: 9 9 name: Library name. Sources are globbed from `{name}/src/**/*.rs`. 10 - crate: Crate name override (e.g. "kutil"). Defaults to None (uses `name`). 10 + crate: Crate name override (e.g. "util"). Defaults to None (uses `name`). 11 11 deps: Dependencies shared between the library and its tests. 12 12 test_deps: Additional dependencies only needed by tests. 13 13 test_features: Features only enabled for tests.
+19 -19
lib/BUCK
··· 2 2 3 3 k23_rust_library( 4 4 name = "util", 5 - crate = "kutil", 5 + crate = "util", 6 6 ) 7 7 8 8 k23_rust_library( 9 9 name = "trap", 10 - crate = "ktrap", 10 + crate = "trap", 11 11 ) 12 12 13 13 k23_rust_library( 14 14 name = "arrayvec", 15 - crate = "karrayvec", 15 + crate = "arrayvec", 16 16 ) 17 17 18 18 k23_rust_library( 19 19 name = "fastrand", 20 - crate = "kfastrand", 20 + crate = "fastrand", 21 21 ) 22 22 23 23 k23_rust_library( 24 24 name = "wavltree", 25 - crate = "kwavltree", 25 + crate = "wavltree", 26 26 test_deps = ["//third-party:rand"], 27 27 ) 28 28 29 29 k23_rust_library( 30 30 name = "cpu-local", 31 - crate = "kcpu_local", 31 + crate = "cpu_local", 32 32 deps = [ 33 33 ":util", 34 34 ], ··· 36 36 37 37 k23_rust_library( 38 38 name = "spin", 39 - crate = "kspin", 39 + crate = "spin", 40 40 deps = [ 41 41 ":util", 42 42 "//third-party:cfg-if", ··· 50 50 51 51 k23_rust_library( 52 52 name = "fdt", 53 - crate = "kfdt", 53 + crate = "fdt", 54 54 deps = [ 55 55 "//third-party:fallible-iterator", 56 56 ], ··· 58 58 59 59 k23_rust_library( 60 60 name = "riscv", 61 - crate = "kriscv", 61 + crate = "riscv", 62 62 deps = [ 63 63 ":spin", 64 64 ":trap", ··· 69 69 70 70 k23_rust_library( 71 71 name = "abort", 72 - crate = "kabort", 72 + crate = "abort", 73 73 deps = [ 74 74 "//third-party:cfg-if", 75 75 ] + select({ ··· 80 80 81 81 k23_rust_library( 82 82 name = "uart-16550", 83 - crate = "kuart_16550", 83 + crate = "uart_16550", 84 84 deps = [ 85 85 ":spin", 86 86 "//third-party:bitflags", ··· 89 89 90 90 k23_rust_library( 91 91 name = "sharded-slab", 92 - crate = "ksharded_slab", 92 + crate = "sharded_slab", 93 93 deps = [ 94 94 ":spin", 95 95 ":cpu-local", ··· 99 99 100 100 k23_rust_library( 101 101 name = "unwind", 102 - crate = "kunwind", 102 + crate = "unwind", 103 103 deps = [ 104 104 ":spin", 105 105 ":abort", ··· 113 113 114 114 k23_rust_library( 115 115 name = "addr2line", 116 - crate = "kaddr2line", 116 + crate = "addr2line", 117 117 deps = [ 118 118 ":spin", 119 119 "//third-party:gimli", ··· 125 125 126 126 k23_rust_library( 127 127 name = "backtrace", 128 - crate = "kbacktrace", 128 + crate = "backtrace", 129 129 deps = [ 130 130 ":addr2line", 131 131 ":arrayvec", ··· 140 140 141 141 k23_rust_library( 142 142 name = "panic-unwind", 143 - crate = "kpanic_unwind", 143 + crate = "panic_unwind", 144 144 deps = [ 145 145 ":unwind", 146 146 ":spin", ··· 153 153 154 154 k23_rust_library( 155 155 name = "mem-core", 156 - crate = "kmem_core", 156 + crate = "mem_core", 157 157 deps = [ 158 158 ":arrayvec", 159 159 ":riscv", ··· 174 174 175 175 k23_rust_library( 176 176 name = "range-tree", 177 - crate = "krange_tree", 177 + crate = "range_tree", 178 178 deps = [ 179 179 "//third-party:cfg-if", 180 180 "//third-party:tracing", ··· 189 189 190 190 k23_rust_library( 191 191 name = "wast", 192 - crate = "kwast", 192 + crate = "wast", 193 193 deps = [ 194 194 ":cpu-local", 195 195 "//third-party:leb128fmt",
+1 -1
lib/_kmem/Cargo.toml
··· 7 7 8 8 [dependencies] 9 9 wavltree = { workspace = true, features = ["dot"] } 10 - kcpu-local.workspace = true 10 + cpu-local.workspace = true 11 11 12 12 # 3rd-party dependencies 13 13 lock_api.workspace = true
+1 -1
lib/_kmem/src/address_space.rs
··· 13 13 use core::ptr::NonNull; 14 14 15 15 use anyhow::{format_err, Context}; 16 - use kwavltree::WAVLTree; 16 + use wavltree::WAVLTree; 17 17 use rand::distr::Uniform; 18 18 use rand::Rng; 19 19 use rand_chacha::ChaCha20Rng;
+9 -9
lib/_kmem/src/address_space/region.rs
··· 32 32 33 33 /// Links to other regions in the WAVL tree 34 34 #[pin] 35 - links: kwavltree::Links<AddressSpaceRegion>, 35 + links: wavltree::Links<AddressSpaceRegion>, 36 36 } 37 37 38 38 impl AddressSpaceRegion { ··· 50 50 51 51 max_gap: 0, 52 52 subtree_range: spot..spot.checked_add(layout.size()).unwrap(), 53 - links: kwavltree::Links::new(), 53 + links: wavltree::Links::new(), 54 54 } 55 55 } 56 56 ··· 98 98 Some(unsafe { self.links.parent()?.as_ref() }) 99 99 } 100 100 101 - /// Update the gap search metadata of this region. This method is called in the [`kwavltree::Linked`] 101 + /// Update the gap search metadata of this region. This method is called in the [`wavltree::Linked`] 102 102 /// implementation below after each tree mutation that impacted this node or its subtree in some way 103 103 /// (insertion, rotation, deletion). 104 104 /// ··· 169 169 170 170 // Safety: the pinning and !Unpin requirements are enforced by the `#[pin_project(!Unpin)]` attribute 171 171 // of the `AddressSpaceRegion`. see above. 172 - unsafe impl kwavltree::Linked for AddressSpaceRegion { 172 + unsafe impl wavltree::Linked for AddressSpaceRegion { 173 173 /// Any heap-allocated type that owns an element may be used. 174 174 /// 175 175 /// An element *must not* move while part of an intrusive data ··· 180 180 181 181 /// Convert an owned `Handle` into a raw pointer 182 182 fn into_ptr(handle: Self::Handle) -> NonNull<Self> { 183 - // Safety: kwavltree treats the ptr as pinned 183 + // Safety: wavltree treats the ptr as pinned 184 184 unsafe { NonNull::from(Box::leak(Pin::into_inner_unchecked(handle))) } 185 185 } 186 186 ··· 191 191 unsafe { Pin::new_unchecked(Box::from_raw(ptr.as_ptr())) } 192 192 } 193 193 194 - unsafe fn links(ptr: NonNull<Self>) -> NonNull<kwavltree::Links<Self>> { 194 + unsafe fn links(ptr: NonNull<Self>) -> NonNull<wavltree::Links<Self>> { 195 195 ptr.map_addr(|addr| { 196 196 let offset = offset_of!(Self, links); 197 197 addr.checked_add(offset).unwrap() ··· 219 219 parent: NonNull<Self>, 220 220 sibling: Option<NonNull<Self>>, 221 221 lr_child: Option<NonNull<Self>>, 222 - side: kwavltree::Side, 222 + side: wavltree::Side, 223 223 ) { 224 224 let this = self.project(); 225 225 // Safety: caller ensures ptr is valid ··· 229 229 this.subtree_range.end = _parent.subtree_range.end; 230 230 *this.max_gap = _parent.max_gap; 231 231 232 - if side == kwavltree::Side::Left { 232 + if side == wavltree::Side::Left { 233 233 Self::update_gap_metadata(parent, sibling, lr_child); 234 234 } else { 235 235 Self::update_gap_metadata(parent, lr_child, sibling); ··· 241 241 mod tests { 242 242 use core::alloc::Layout; 243 243 244 - use kwavltree::WAVLTree; 244 + use wavltree::WAVLTree; 245 245 246 246 use super::*; 247 247 use crate::{AccessRules, VirtualAddress};
+1 -1
lib/abort/Cargo.toml
··· 1 1 [package] 2 - name = "kabort" 2 + name = "abort" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true
+1 -1
lib/abort/src/lib.rs
··· 28 28 extern crate std; 29 29 std::process::abort(); 30 30 } else if #[cfg(any(target_arch = "riscv64", target_arch = "riscv32"))] { 31 - kriscv::exit(1); 31 + riscv::exit(1); 32 32 } else { 33 33 loop {} 34 34 // compile_error!("unsupported target architecture")
+2 -2
lib/addr2line/Cargo.toml
··· 1 1 [package] 2 - name = "kaddr2line" 2 + name = "addr2line" 3 3 version = "0.24.2" 4 4 description = "A cross-platform symbolication library written in Rust, using `gimli`" 5 5 edition.workspace = true ··· 7 7 license = "Apache-2.0 OR MIT" 8 8 9 9 [dependencies] 10 - kspin.workspace = true 10 + spin.workspace = true 11 11 gimli = { workspace = true, default-features = false, features = ["read"] } 12 12 smallvec.workspace = true 13 13 fallible-iterator.workspace = true
+1 -1
lib/addr2line/src/lib.rs
··· 49 49 mod line; 50 50 51 51 mod lookup; 52 - use kspin::OnceLock; 52 + use spin::OnceLock; 53 53 pub use lookup::{LookupContinuation, LookupResult, SplitDwarfLoad}; 54 54 55 55 mod unit;
+1 -1
lib/arrayvec/Cargo.toml
··· 1 1 [package] 2 - name = "karrayvec" 2 + name = "arrayvec" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true
+4 -4
lib/backtrace/Cargo.toml
··· 1 1 [package] 2 - name = "kbacktrace" 2 + name = "backtrace" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true ··· 9 9 workspace = true 10 10 11 11 [dependencies] 12 - kaddr2line.workspace = true 13 - karrayvec.workspace = true 14 - kunwind.workspace = true 12 + addr2line.workspace = true 13 + arrayvec.workspace = true 14 + unwind.workspace = true 15 15 16 16 # 3rd-party dependencies 17 17 gimli.workspace = true
+8 -8
lib/backtrace/src/lib.rs
··· 14 14 use core::fmt::Formatter; 15 15 16 16 use fallible_iterator::FallibleIterator; 17 - use karrayvec::ArrayVec; 18 - use kunwind::FrameIter; 17 + use arrayvec::ArrayVec; 18 + use unwind::FrameIter; 19 19 20 20 pub use crate::symbolize::SymbolizeContext; 21 21 ··· 37 37 /// 38 38 /// # Errors 39 39 /// 40 - /// Returns the underlying [`kunwind::Error`] if walking the stack fails. 40 + /// Returns the underlying [`unwind::Error`] if walking the stack fails. 41 41 #[inline] 42 - pub fn capture(ctx: &'a SymbolizeContext<'data>) -> Result<Self, kunwind::Error> { 42 + pub fn capture(ctx: &'a SymbolizeContext<'data>) -> Result<Self, unwind::Error> { 43 43 Self::new_inner(ctx, FrameIter::new()) 44 44 } 45 45 ··· 53 53 /// 54 54 /// # Errors 55 55 /// 56 - /// Returns the underlying [`kunwind::Error`] if walking the stack fails. 56 + /// Returns the underlying [`unwind::Error`] if walking the stack fails. 57 57 #[inline] 58 58 pub fn from_registers( 59 59 ctx: &'a SymbolizeContext<'data>, 60 - regs: kunwind::Registers, 60 + regs: unwind::Registers, 61 61 ip: usize, 62 - ) -> Result<Self, kunwind::Error> { 62 + ) -> Result<Self, unwind::Error> { 63 63 let iter = FrameIter::from_registers(regs, ip); 64 64 Self::new_inner(ctx, iter) 65 65 } ··· 67 67 fn new_inner( 68 68 ctx: &'a SymbolizeContext<'data>, 69 69 mut iter: FrameIter, 70 - ) -> Result<Self, kunwind::Error> { 70 + ) -> Result<Self, unwind::Error> { 71 71 let mut frames = ArrayVec::new(); 72 72 let mut frames_omitted: usize = 0; 73 73
+4 -4
lib/backtrace/src/symbolize.rs
··· 19 19 /// `addr2line`'s frame internally has all the nitty gritty details. 20 20 Frame { 21 21 addr: *mut c_void, 22 - location: Option<kaddr2line::Location<'a>>, 22 + location: Option<addr2line::Location<'a>>, 23 23 name: Option<&'a str>, 24 24 }, 25 25 /// Couldn't find debug information, but we found it in the symbol table of ··· 126 126 addr: u64, 127 127 elf: &'ctx xmas_elf::ElfFile<'a>, 128 128 symtab: &'ctx [xmas_elf::symbol_table::Entry64], 129 - iter: kaddr2line::FrameIter<'ctx, EndianSlice<'a, NativeEndian>>, 129 + iter: addr2line::FrameIter<'ctx, EndianSlice<'a, NativeEndian>>, 130 130 anything: bool, 131 131 } 132 132 ··· 174 174 175 175 /// Context necessary to resolve an address to its symbol name and source location. 176 176 pub struct SymbolizeContext<'a> { 177 - addr2line: kaddr2line::Context<EndianSlice<'a, NativeEndian>>, 177 + addr2line: addr2line::Context<EndianSlice<'a, NativeEndian>>, 178 178 elf: xmas_elf::ElfFile<'a>, 179 179 adjust_vma: u64, 180 180 } ··· 191 191 }; 192 192 Ok(EndianSlice::new(data, NativeEndian)) 193 193 })?; 194 - let addr2line = kaddr2line::Context::from_dwarf(dwarf)?; 194 + let addr2line = addr2line::Context::from_dwarf(dwarf)?; 195 195 196 196 Ok(Self { 197 197 addr2line,
+2 -2
lib/cpu-local/Cargo.toml
··· 1 1 [package] 2 - name = "kcpu-local" 2 + name = "cpu-local" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true 6 6 license.workspace = true 7 7 8 8 [dependencies] 9 - kutil.workspace = true 9 + util.workspace = true 10 10 11 11 [lints] 12 12 workspace = true
+1 -1
lib/cpu-local/src/collection.rs
··· 12 12 use core::sync::atomic::{AtomicBool, AtomicPtr, AtomicUsize, Ordering}; 13 13 use core::{fmt, mem, ptr, slice}; 14 14 15 - use kutil::CheckedMaybeUninit; 15 + use util::CheckedMaybeUninit; 16 16 17 17 use crate::cpu_local; 18 18
+1 -1
lib/fastrand/Cargo.toml
··· 1 1 [package] 2 - name = "kfastrand" 2 + name = "fastrand" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true
+1 -1
lib/fdt/Cargo.toml
··· 1 1 [package] 2 - name = "kfdt" 2 + name = "fdt" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true
+6 -6
lib/mem-core/Cargo.toml
··· 1 1 [package] 2 - name = "kmem-core" 2 + name = "mem-core" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true 6 6 license.workspace = true 7 7 8 8 [dependencies] 9 - karrayvec.workspace = true 9 + arrayvec.workspace = true 10 10 riscv.workspace = true 11 - kcpu-local = { workspace = true, optional = true } 12 - kspin = { workspace = true, optional = true } 11 + cpu-local = { workspace = true, optional = true } 12 + spin = { workspace = true, optional = true } 13 13 proptest = { workspace = true, optional = true } 14 14 proptest-derive = { workspace = true, optional = true } 15 15 parking_lot = { version = "0.12.5", optional = true } ··· 20 20 lock_api.workspace = true 21 21 22 22 [dev-dependencies] 23 - kmem-core = { workspace = true, features = ["test_utils"] } 23 + mem-core = { workspace = true, features = ["test_utils"] } 24 24 test-log = "0.2.19" 25 25 26 26 [features] 27 - test_utils = ["kcpu-local", "kspin", "proptest", "proptest-derive", "parking_lot"] 27 + test_utils = ["cpu-local", "spin", "proptest", "proptest-derive", "parking_lot"] 28 28 29 29 [lints] 30 30 workspace = true
+2 -2
lib/mem-core/src/arch/riscv64.rs
··· 1 1 use core::ops::Range; 2 2 3 - use kriscv::satp; 4 - use kriscv::sbi::rfence::{sfence_vma, sfence_vma_asid}; 3 + use riscv::satp; 4 + use riscv::sbi::rfence::{sfence_vma, sfence_vma_asid}; 5 5 6 6 use crate::arch::PageTableLevel; 7 7 use crate::{
+1 -1
lib/mem-core/src/flush.rs
··· 1 1 use core::mem; 2 2 use core::ops::Range; 3 3 4 - use karrayvec::ArrayVec; 4 + use arrayvec::ArrayVec; 5 5 6 6 use crate::VirtualAddress; 7 7 use crate::arch::Arch;
+2 -2
lib/mem-core/src/frame_allocator/bump.rs
··· 4 4 use core::ops::Range; 5 5 use core::{cmp, fmt, iter}; 6 6 7 - use karrayvec::ArrayVec; 7 + use arrayvec::ArrayVec; 8 8 use lock_api::Mutex; 9 9 10 10 use crate::arch::Arch; ··· 479 479 480 480 enum Blocks<const MAX: usize> { 481 481 One(iter::Once<Range<PhysicalAddress>>), 482 - Multiple(karrayvec::IntoIter<Range<PhysicalAddress>, MAX>), 482 + Multiple(arrayvec::IntoIter<Range<PhysicalAddress>, MAX>), 483 483 } 484 484 485 485 impl<const MAX: usize> Iterator for Blocks<MAX> {
+1 -1
lib/mem-core/src/table.rs
··· 1 1 use core::marker::PhantomData; 2 2 use core::ops::Range; 3 3 4 - use karrayvec::ArrayVec; 4 + use arrayvec::ArrayVec; 5 5 6 6 use crate::arch::{Arch, PageTableEntry, PageTableLevel}; 7 7 use crate::physmap::PhysMap;
+2 -2
lib/mem-core/src/test_utils/machine.rs
··· 6 6 use std::sync::Arc; 7 7 use std::{cmp, fmt}; 8 8 9 - use karrayvec::ArrayVec; 10 - use kcpu_local::collection::CpuLocal; 9 + use arrayvec::ArrayVec; 10 + use cpu_local::collection::CpuLocal; 11 11 12 12 use crate::address_space::Active; 13 13 use crate::arch::{Arch, PageTableEntry, PageTableLevel};
+5 -5
lib/panic-unwind/Cargo.toml
··· 1 1 [package] 2 - name = "kpanic-unwind" 2 + name = "panic-unwind" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true 6 6 license.workspace = true 7 7 8 8 [dependencies] 9 - kspin.workspace = true 10 - kcpu-local.workspace = true 11 - kunwind.workspace = true 12 - kabort.workspace = true 9 + spin.workspace = true 10 + cpu-local.workspace = true 11 + unwind.workspace = true 12 + abort.workspace = true 13 13 14 14 # 3rd-party dependencies 15 15 tracing.workspace = true
+1 -1
lib/panic-unwind/src/hook.rs
··· 10 10 use core::panic::Location; 11 11 use core::{fmt, mem}; 12 12 13 - use kspin::RwLock; 13 + use spin::RwLock; 14 14 15 15 #[derive(Debug)] 16 16 pub struct PanicHookInfo<'a> {
+8 -8
lib/panic-unwind/src/lib.rs
··· 29 29 use core::mem; 30 30 use core::panic::{PanicPayload, UnwindSafe}; 31 31 use hook::{default_hook, Hook, PanicHookInfo, HOOK}; 32 - use kabort::abort; 32 + use abort::abort; 33 33 use panic_count::MustAbort; 34 34 35 35 /// Determines whether the current thread is unwinding because of panic. ··· 47 47 where 48 48 F: FnOnce() -> R + UnwindSafe, 49 49 { 50 - kunwind::catch_unwind(f).inspect_err(|_| { 50 + unwind::catch_unwind(f).inspect_err(|_| { 51 51 panic_count::decrease(); // decrease the panic count, since we caught it 52 52 }) 53 53 } ··· 55 55 /// Resume an unwind previously caught with [`catch_unwind`]. 56 56 pub fn resume_unwind(payload: Box<dyn Any + Send>) -> ! { 57 57 debug_assert!(panic_count::increase(false).is_none()); 58 - kunwind::with_context(|regs, pc| rust_panic(payload, regs.clone(), pc)) 58 + unwind::with_context(|regs, pc| rust_panic(payload, regs.clone(), pc)) 59 59 } 60 60 61 61 /// Begin unwinding from an externally captured set of registers (such as from a trap handler). ··· 64 64 /// 65 65 /// This will start walking the stack and calling `Drop` implementations starting the the `pc` and 66 66 /// register set you provided. Be VERY careful that it is actually correctly captured. 67 - pub unsafe fn begin_unwind(payload: Box<dyn Any + Send>, regs: kunwind::Registers, pc: usize) -> ! { 67 + pub unsafe fn begin_unwind(payload: Box<dyn Any + Send>, regs: unwind::Registers, pc: usize) -> ! { 68 68 debug_assert!(panic_count::increase(false).is_none()); 69 69 rust_panic(payload, regs, pc) 70 70 } ··· 105 105 abort(); 106 106 } 107 107 108 - kunwind::with_context(|regs, pc| rust_panic(payload, regs.clone(), pc)) 108 + unwind::with_context(|regs, pc| rust_panic(payload, regs.clone(), pc)) 109 109 } 110 110 111 111 /// Mirroring std, this is an unmangled function on which to slap 112 112 /// yer breakpoints for backtracing panics. 113 113 #[inline(never)] 114 114 #[unsafe(no_mangle)] 115 - fn rust_panic(payload: Box<dyn Any + Send>, regs: kunwind::Registers, pc: usize) -> ! { 115 + fn rust_panic(payload: Box<dyn Any + Send>, regs: unwind::Registers, pc: usize) -> ! { 116 116 // Safety: `begin_unwind` will either return an error or not return at all 117 - match unsafe { kunwind::begin_unwind_with(payload, regs, pc).unwrap_err_unchecked() } { 118 - kunwind::Error::EndOfStack => { 117 + match unsafe { unwind::begin_unwind_with(payload, regs, pc).unwrap_err_unchecked() } { 118 + unwind::Error::EndOfStack => { 119 119 tracing::error!( 120 120 "unwinding completed without finding a `catch_unwind` make sure there is at least a root level catch unwind wrapping the main function. aborting." 121 121 );
+1 -1
lib/panic-unwind/src/panic_count.rs
··· 8 8 use core::cell::Cell; 9 9 use core::sync::atomic::{AtomicUsize, Ordering}; 10 10 11 - use kcpu_local::cpu_local; 11 + use cpu_local::cpu_local; 12 12 13 13 /// A reason for forcing an immediate abort on panic. 14 14 #[derive(Debug)]
+2 -2
lib/range-tree/benches/comparisons.rs
··· 11 11 12 12 use brie_tree::BTree; 13 13 use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 14 - use krange_tree::RangeTree; 15 - use kwavltree::{Linked, Links, WAVLTree}; 14 + use range_tree::RangeTree; 15 + use wavltree::{Linked, Links, WAVLTree}; 16 16 use nonmax::NonMaxU64; 17 17 use pin_project::pin_project; 18 18 use rand::distr::Uniform;
+1 -1
lib/range-tree/tests/gaps.rs
··· 5 5 use std::num::NonZeroU32; 6 6 use std::ops::Bound; 7 7 8 - use krange_tree::RangeTree; 8 + use range_tree::RangeTree; 9 9 10 10 use crate::common::nonzero; 11 11
+1 -1
lib/range-tree/tests/insertion.rs
··· 7 7 use std::alloc::Global; 8 8 use std::range::RangeInclusive; 9 9 10 - use krange_tree::{OverlapError, RangeTree}; 10 + use range_tree::{OverlapError, RangeTree}; 11 11 use rand::seq::SliceRandom; 12 12 13 13 use crate::common::nonzero;
+1 -1
lib/range-tree/tests/lookup.rs
··· 1 1 use core::num::NonZeroU64; 2 2 3 - use krange_tree::RangeTree; 3 + use range_tree::RangeTree; 4 4 5 5 use crate::common::nonzero; 6 6
+1 -1
lib/range-tree/tests/proptest.rs
··· 8 8 use std::ops; 9 9 use std::range::RangeInclusive; 10 10 11 - use krange_tree::RangeTree; 11 + use range_tree::RangeTree; 12 12 use proptest::collection::SizeRange; 13 13 use proptest::prelude::*; 14 14 use rand::seq::SliceRandom;
+1 -1
lib/riscv/Cargo.toml
··· 10 10 workspace = true 11 11 12 12 [dependencies] 13 - kspin.workspace = true 13 + spin.workspace = true 14 14 trap.workspace = true 15 15 16 16 # 3rd-party dependencies
+1 -1
lib/riscv/src/hio.rs
··· 10 10 use core::fmt::{Error, Write}; 11 11 use core::{fmt, slice}; 12 12 13 - use kspin::Mutex; 13 + use spin::Mutex; 14 14 15 15 use super::semihosting::syscall; 16 16
+1 -1
lib/riscv/src/trap.rs
··· 1 1 use crate::scause::{Exception, Interrupt}; 2 2 3 - pub type Trap = ktrap::Trap<Interrupt, Exception>; 3 + pub type Trap = trap::Trap<Interrupt, Exception>;
+3 -3
lib/sharded-slab/Cargo.toml
··· 1 1 [package] 2 - name = "ksharded-slab" 2 + name = "sharded-slab" 3 3 version = "0.1.7" 4 4 authors = ["Eliza Weisman <eliza@buoyant.io>"] 5 5 edition.workspace = true ··· 12 12 13 13 [dependencies] 14 14 log.workspace = true 15 - kspin.workspace = true 16 - kcpu-local.workspace = true 15 + spin.workspace = true 16 + cpu-local.workspace = true 17 17 18 18 [features] 19 19 loom = []
+2 -2
lib/sharded-slab/src/clear.rs
··· 59 59 } 60 60 } 61 61 62 - impl<T: Clear> Clear for kspin::Mutex<T> { 62 + impl<T: Clear> Clear for spin::Mutex<T> { 63 63 #[inline] 64 64 fn clear(&mut self) { 65 65 self.get_mut().clear(); 66 66 } 67 67 } 68 68 69 - impl<T: Clear> Clear for kspin::RwLock<T> { 69 + impl<T: Clear> Clear for spin::RwLock<T> { 70 70 #[inline] 71 71 fn clear(&mut self) { 72 72 self.write().clear();
+19 -19
lib/sharded-slab/src/lib.rs
··· 49 49 //! 50 50 //! Inserting an item into the slab, returning an index: 51 51 //! ```rust 52 - //! # use ksharded_slab::Slab; 52 + //! # use sharded_slab::Slab; 53 53 //! let slab = Slab::new(); 54 54 //! 55 55 //! let key = slab.insert("hello world").unwrap(); ··· 58 58 //! 59 59 //! To share a slab across threads, it may be wrapped in an `Arc`: 60 60 //! ```rust 61 - //! # use ksharded_slab::Slab; 61 + //! # use sharded_slab::Slab; 62 62 //! use alloc::sync::Arc; 63 63 //! let slab = Arc::new(Slab::new()); 64 64 //! ··· 83 83 //! each item, providing granular locking of items rather than of the slab: 84 84 //! 85 85 //! ```rust 86 - //! # use ksharded_slab::Slab; 86 + //! # use sharded_slab::Slab; 87 87 //! use core::sync::{Arc, Mutex}; 88 88 //! let slab = Arc::new(Slab::new()); 89 89 //! ··· 263 263 /// # Examples 264 264 /// 265 265 /// ``` 266 - /// # use ksharded_slab::Slab; 266 + /// # use sharded_slab::Slab; 267 267 /// let mut slab = Slab::new(); 268 268 /// 269 269 /// let hello = { ··· 299 299 /// # Examples 300 300 /// 301 301 /// ``` 302 - /// # use ksharded_slab::Slab; 302 + /// # use sharded_slab::Slab; 303 303 /// # extern crate alloc; 304 304 /// use alloc::sync::Arc; 305 305 /// ··· 318 318 /// for the `'static` lifetime: 319 319 /// 320 320 /// ``` 321 - /// # use ksharded_slab::Slab; 321 + /// # use sharded_slab::Slab; 322 322 /// # extern crate alloc; 323 - /// use ksharded_slab::OwnedEntry; 323 + /// use sharded_slab::OwnedEntry; 324 324 /// use alloc::sync::Arc; 325 325 /// 326 326 /// pub struct MyStruct { ··· 352 352 /// `OwnedEntry`s may be sent between threads: 353 353 /// 354 354 /// ``` 355 - /// # use ksharded_slab::Slab; 355 + /// # use sharded_slab::Slab; 356 356 /// use core::{thread, sync::Arc}; 357 357 /// 358 358 /// let slab: Arc<Slab<&'static str>> = Arc::new(Slab::new()); ··· 418 418 /// 419 419 /// # Examples 420 420 /// ```rust 421 - /// # use ksharded_slab::Slab; 421 + /// # use sharded_slab::Slab; 422 422 /// let slab = Slab::new(); 423 423 /// 424 424 /// let key = slab.insert("hello world").unwrap(); ··· 445 445 /// # Examples 446 446 /// 447 447 /// ``` 448 - /// # use ksharded_slab::Slab; 448 + /// # use sharded_slab::Slab; 449 449 /// let mut slab = Slab::new(); 450 450 /// 451 451 /// let hello = { ··· 484 484 /// # Examples 485 485 /// 486 486 /// ```rust 487 - /// let slab = ksharded_slab::Slab::new(); 487 + /// let slab = sharded_slab::Slab::new(); 488 488 /// let key = slab.insert("hello world").unwrap(); 489 489 /// 490 490 /// // Remove the item from the slab. ··· 553 553 /// # Examples 554 554 /// 555 555 /// ```rust 556 - /// let slab = ksharded_slab::Slab::new(); 556 + /// let slab = sharded_slab::Slab::new(); 557 557 /// let key = slab.insert("hello world").unwrap(); 558 558 /// 559 559 /// // Remove the item from the slab, returning it. ··· 608 608 /// # Examples 609 609 /// 610 610 /// ```rust 611 - /// let slab = ksharded_slab::Slab::new(); 611 + /// let slab = sharded_slab::Slab::new(); 612 612 /// let key = slab.insert("hello world").unwrap(); 613 613 /// 614 614 /// assert_eq!(slab.get(key).unwrap(), "hello world"); ··· 648 648 /// # Examples 649 649 /// 650 650 /// ``` 651 - /// # use ksharded_slab::Slab; 651 + /// # use sharded_slab::Slab; 652 652 /// # extern crate alloc; 653 653 /// use alloc::sync::Arc; 654 654 /// ··· 667 667 /// for the `'static` lifetime: 668 668 /// 669 669 /// ``` 670 - /// # use ksharded_slab::Slab; 670 + /// # use sharded_slab::Slab; 671 671 /// # extern crate alloc; 672 - /// use ksharded_slab::OwnedEntry; 672 + /// use sharded_slab::OwnedEntry; 673 673 /// use alloc::sync::Arc; 674 674 /// 675 675 /// pub struct MyStruct { ··· 726 726 /// # Examples 727 727 /// 728 728 /// ``` 729 - /// let slab = ksharded_slab::Slab::new(); 729 + /// let slab = sharded_slab::Slab::new(); 730 730 /// 731 731 /// let key = slab.insert("hello world").unwrap(); 732 732 /// assert!(slab.contains(key)); ··· 863 863 /// # Examples 864 864 /// 865 865 /// ``` 866 - /// # use ksharded_slab::Slab; 866 + /// # use sharded_slab::Slab; 867 867 /// let mut slab = Slab::new(); 868 868 /// 869 869 /// let hello = { ··· 905 905 /// # Examples 906 906 /// 907 907 /// ``` 908 - /// # use ksharded_slab::*; 908 + /// # use sharded_slab::*; 909 909 /// let mut slab = Slab::new(); 910 910 /// 911 911 /// let hello = {
+1 -1
lib/sharded-slab/src/page/slot.rs
··· 3 3 use core::sync::atomic::{AtomicUsize, Ordering}; 4 4 use core::{fmt, mem, ptr}; 5 5 6 - use kspin::Backoff; 6 + use spin::Backoff; 7 7 8 8 use super::FreeList; 9 9 use crate::clear::Clear;
+2 -2
lib/sharded-slab/src/pool.rs
··· 949 949 shard.clear_after_release(self.key); 950 950 } else { 951 951 log::trace!("-> shard={:?} does not exist! THIS IS A BUG", shard_idx); 952 - // debug_assert!(kpanic_unwind::panicking(), "[internal error] tried to drop an `OwnedRef` to a slot on a shard that never existed!"); 952 + // debug_assert!(panic_unwind::panicking(), "[internal error] tried to drop an `OwnedRef` to a slot on a shard that never existed!"); 953 953 } 954 954 } 955 955 } ··· 1077 1077 shard.clear_after_release(self.key); 1078 1078 } else { 1079 1079 log::trace!("-> shard does not exist! THIS IS A BUG"); 1080 - // debug_assert!(kpanic_unwind::panicking(), "[internal error] tried to drop an `OwnedRefMut` to a slot on a shard that never existed!"); 1080 + // debug_assert!(panic_unwind::panicking(), "[internal error] tried to drop an `OwnedRefMut` to a slot on a shard that never existed!"); 1081 1081 } 1082 1082 } 1083 1083 }
+2 -2
lib/sharded-slab/src/tid.rs
··· 4 4 use core::marker::PhantomData; 5 5 use core::sync::atomic::{AtomicUsize, Ordering}; 6 6 7 - use kcpu_local::cpu_local; 8 - use kspin::{LazyLock, Mutex}; 7 + use cpu_local::cpu_local; 8 + use spin::{LazyLock, Mutex}; 9 9 10 10 use crate::cfg::{self, CfgPrivate}; 11 11 use crate::{Pack, page};
+2 -2
lib/spin/Cargo.toml
··· 1 1 [package] 2 - name = "kspin" 2 + name = "spin" 3 3 description = "Synchronization primitives for use in k23" 4 4 version.workspace = true 5 5 edition.workspace = true ··· 11 11 12 12 [dependencies] 13 13 cfg-if.workspace = true 14 - kutil.workspace = true 14 + util.workspace = true 15 15 16 16 # 3rd-party dependencies 17 17 lock_api.workspace = true
+1 -1
lib/spin/src/barrier.rs
··· 5 5 // http://opensource.org/licenses/MIT>, at your option. This file may not be 6 6 // copied, modified, or distributed except according to those terms. 7 7 8 - use kutil::loom_const_fn; 8 + use util::loom_const_fn; 9 9 10 10 use crate::{Backoff, Mutex}; 11 11
+1 -1
lib/spin/src/lazy_lock.rs
··· 10 10 use core::panic::{RefUnwindSafe, UnwindSafe}; 11 11 use core::{fmt, ptr}; 12 12 13 - use kutil::loom_const_fn; 13 + use util::loom_const_fn; 14 14 15 15 use super::Once; 16 16 use super::once::ExclusiveState;
+1 -1
lib/spin/src/once.rs
··· 7 7 8 8 use core::mem; 9 9 10 - use kutil::loom_const_fn; 10 + use util::loom_const_fn; 11 11 12 12 use crate::Backoff; 13 13 use crate::loom::sync::atomic::{AtomicU8, Ordering};
+1 -1
lib/spin/src/once_lock.rs
··· 9 9 use core::mem::MaybeUninit; 10 10 use core::panic::{RefUnwindSafe, UnwindSafe}; 11 11 12 - use kutil::loom_const_fn; 12 + use util::loom_const_fn; 13 13 14 14 use super::Once; 15 15 use crate::loom::cell::UnsafeCell;
+2 -2
lib/uart-16550/Cargo.toml
··· 1 1 [package] 2 - name = "kuart-16550" 2 + name = "uart-16550" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true ··· 7 7 8 8 [dependencies] 9 9 bitflags.workspace = true 10 - kspin.workspace = true 10 + spin.workspace = true 11 11 12 12 [lints] 13 13 workspace = true
+1 -1
lib/uart-16550/src/lib.rs
··· 11 11 use core::sync::atomic::{AtomicPtr, Ordering}; 12 12 13 13 use bitflags::bitflags; 14 - use kspin::Backoff; 14 + use spin::Backoff; 15 15 16 16 macro_rules! wait_for { 17 17 ($cond:expr, $boff:expr) => {
+3 -3
lib/unwind/Cargo.toml
··· 1 1 [package] 2 - name = "kunwind" 2 + name = "unwind" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true ··· 11 11 [dependencies] 12 12 cfg-if.workspace = true 13 13 gimli = { workspace = true, features = ["read-core"] } 14 - kspin.workspace = true 14 + spin.workspace = true 15 15 fallible-iterator.workspace = true 16 - kabort.workspace = true 16 + abort.workspace = true 17 17 tracing.workspace = true
+1 -1
lib/unwind/src/eh_info.rs
··· 9 9 BaseAddresses, EhFrame, EhFrameHdr, EndianSlice, FrameDescriptionEntry, NativeEndian, 10 10 ParsedEhFrameHdr, UnwindSection, 11 11 }; 12 - use kspin::LazyLock; 12 + use spin::LazyLock; 13 13 14 14 use super::utils::{deref_pointer, get_unlimited_slice}; 15 15
+1 -1
lib/unwind/src/exception.rs
··· 10 10 use core::ffi::c_int; 11 11 use core::ptr; 12 12 13 - use kabort::abort; 13 + use abort::abort; 14 14 15 15 use crate::Error; 16 16
+4 -4
lib/unwind/src/frame.rs
··· 277 277 278 278 /// An iterator over frames on the stack. 279 279 /// 280 - /// This is the primary means for walking the stack in `kunwind`. 280 + /// This is the primary means for walking the stack in `unwind`. 281 281 /// 282 282 /// ```rust 283 - /// # use kunwind::FrameIter; 283 + /// # use unwind::FrameIter; 284 284 /// use fallible_iterator::FallibleIterator; 285 285 /// 286 286 /// let mut frames = FrameIter::new(); // start the stack walking at the current frame ··· 293 293 /// You can also construct a `FrameIter` from the raw register context and instruction pointer: 294 294 /// 295 295 /// ```rust 296 - /// # use kunwind::FrameIter; 296 + /// # use unwind::FrameIter; 297 297 /// use fallible_iterator::FallibleIterator; 298 298 /// 299 299 /// // in a real scenario you would obtain these values from e.g. a signal/trap handler 300 - /// let regs = kunwind::Registers {gp: [0; 32],fp: [0; 32]}; 300 + /// let regs = unwind::Registers {gp: [0; 32],fp: [0; 32]}; 301 301 /// let ip = 0; 302 302 /// 303 303 /// let mut frames = FrameIter::from_registers(regs, ip);
+2 -2
lib/unwind/src/lang_items.rs
··· 5 5 // http://opensource.org/licenses/MIT>, at your option. This file may not be 6 6 // copied, modified, or distributed except according to those terms. 7 7 8 - use kabort::abort; 8 + use abort::abort; 9 9 10 10 use crate::exception::Exception; 11 11 use crate::utils::with_context; ··· 13 13 14 14 /// In traditional unwinders the personality routine is responsible for determining the unwinders 15 15 /// behaviour for each frame (stop unwinding because a handler has been found, continue etc.) 16 - /// Since `kunwind` only cares about Rust code, the personality routine here is just a stub to make 16 + /// Since `unwind` only cares about Rust code, the personality routine here is just a stub to make 17 17 /// the compiler happy and ensure we're not unwinding across language boundaries. The real unwinding 18 18 /// happens in [`raise_exception_phase2`]. 19 19 #[lang = "eh_personality"]
+4 -4
lib/unwind/src/lib.rs
··· 35 35 use exception::Exception; 36 36 use fallible_iterator::FallibleIterator; 37 37 pub use frame::{Frame, FrameIter}; 38 - use kabort::abort; 38 + use abort::abort; 39 39 use lang_items::ensure_rust_personality_routine; 40 40 pub use utils::with_context; 41 41 ··· 100 100 /// 101 101 /// Note that the traditional unwinding process has 2 phases, the first where the landing pad is discovered 102 102 /// and the second where the stack is actually unwound up to that landing pad. 103 - /// In `kunwind` we can get away with one phase because we bypass the language personality routine: 103 + /// In `unwind` we can get away with one phase because we bypass the language personality routine: 104 104 /// Traditional unwinders call the personality routine on each frame to discover a landing pad, and 105 105 /// then during cleanup call the personality routine again to determine if control should actually be 106 106 /// transferred. This is done so that languages have maximum flexibility in how they treat exceptions. 107 107 /// 108 - /// `kunwind` - being Rust-only - doesn't need that flexibility since Rust landing pads are called 109 - /// unconditionally. Furthermore, `kunwind` never actually calls the personality routine, instead 108 + /// `unwind` - being Rust-only - doesn't need that flexibility since Rust landing pads are called 109 + /// unconditionally. Furthermore, `unwind` never actually calls the personality routine, instead 110 110 /// parsing the [`EHAction`] for each frame directly. 111 111 /// 112 112 /// The name `raise_exception_phase2` is kept though to make it easier to understand what this function
+1 -1
lib/util/Cargo.toml
··· 1 1 [package] 2 - name = "kutil" 2 + name = "util" 3 3 version.workspace = true 4 4 edition.workspace = true 5 5 authors.workspace = true
+2 -2
lib/wast/Cargo.toml
··· 1 1 [package] 2 - name = "kwast" 2 + name = "wast" 3 3 version = "228.0.0" 4 4 authors = ["Alex Crichton <alex@alexcrichton.com>"] 5 5 edition.workspace = true ··· 26 26 unicode-width.workspace = true 27 27 memchr.workspace = true 28 28 bumpalo.workspace = true 29 - kcpu-local.workspace = true 29 + cpu-local.workspace = true 30 30 31 31 [dev-dependencies] 32 32 wat.workspace = true
+1 -1
lib/wast/src/gensym.rs
··· 1 1 use core::cell::Cell; 2 2 3 - use kcpu_local::cpu_local; 3 + use cpu_local::cpu_local; 4 4 5 5 use crate::token::{Id, Span}; 6 6
+1 -1
lib/wavltree/benches/insertions_deletions.rs
··· 4 4 use std::ptr::NonNull; 5 5 6 6 use criterion::{criterion_group, criterion_main, Criterion}; 7 - use kwavltree::{Linked, Links, WAVLTree}; 7 + use wavltree::{Linked, Links, WAVLTree}; 8 8 use rand::prelude::SliceRandom; 9 9 use rand::thread_rng; 10 10
+2 -2
lib/wavltree/fuzz/fuzz_targets/inserts.rs
··· 1 1 #![no_main] 2 2 3 - use kwavltree::Linked; 4 - use kwavltree::{Links, WAVLTree}; 3 + use wavltree::Linked; 4 + use wavltree::{Links, WAVLTree}; 5 5 use libfuzzer_sys::fuzz_target; 6 6 use std::cmp::Ordering; 7 7 use std::mem::offset_of;
+2 -2
lib/wavltree/fuzz/fuzz_targets/inserts_deletes.rs
··· 1 1 #![no_main] 2 2 3 - use kwavltree::Linked; 4 - use kwavltree::{Links, WAVLTree}; 3 + use wavltree::Linked; 4 + use wavltree::{Links, WAVLTree}; 5 5 use libfuzzer_sys::fuzz_target; 6 6 use std::fmt; 7 7 use std::mem::offset_of;
+15 -15
lib/wavltree/src/lib.rs
··· 32 32 //! # use core::ptr::NonNull; 33 33 //! #[derive(Default)] 34 34 //! struct MyNode { 35 - //! links: kwavltree::Links<Self>, 35 + //! links: wavltree::Links<Self>, 36 36 //! value: usize, 37 37 //! } 38 38 //! ··· 46 46 //! 47 47 //! // Participation in an intrusive collection requires a bit more effort 48 48 //! // on the values's part. 49 - //! unsafe impl kwavltree::Linked for MyNode { 49 + //! unsafe impl wavltree::Linked for MyNode { 50 50 //! /// The owning handle type, must ensure participating values are pinned in memory. 51 51 //! type Handle = Pin<Box<Self>>; 52 52 //! /// The key type by which entries are identified. ··· 64 64 //! } 65 65 //! 66 66 //! /// Return the links of the node pointed to by ptr. 67 - //! unsafe fn links(ptr: NonNull<Self>) -> NonNull<kwavltree::Links<Self>> { 67 + //! unsafe fn links(ptr: NonNull<Self>) -> NonNull<wavltree::Links<Self>> { 68 68 //! ptr.map_addr(|addr| { 69 69 //! let offset = offset_of!(Self, links); 70 70 //! addr.checked_add(offset).unwrap() ··· 79 79 //! } 80 80 //! 81 81 //! fn main() { 82 - //! let mut tree = kwavltree::WAVLTree::new(); 82 + //! let mut tree = wavltree::WAVLTree::new(); 83 83 //! tree.insert(Box::pin(MyNode::new(42))); 84 84 //! tree.insert(Box::pin(MyNode::new(17))); 85 85 //! tree.insert(Box::pin(MyNode::new(9))); ··· 190 190 /// Suppose we have an element type like this: 191 191 /// ```rust 192 192 /// struct Entry { 193 - /// links: kwavltree::Links<Self>, 193 + /// links: wavltree::Links<Self>, 194 194 /// data: usize, 195 195 /// } 196 196 /// ``` ··· 199 199 /// might look like this: 200 200 /// 201 201 /// ``` 202 - /// use kwavltree::Linked; 202 + /// use wavltree::Linked; 203 203 /// use core::ptr::NonNull; 204 204 /// 205 205 /// # struct Entry { 206 - /// # links: kwavltree::Links<Self>, 206 + /// # links: wavltree::Links<Self>, 207 207 /// # data: usize 208 208 /// # } 209 209 /// ··· 215 215 /// # unsafe fn from_ptr(ptr: NonNull<Self>) -> Self::Handle { ptr } 216 216 /// // ... 217 217 /// 218 - /// unsafe fn links(mut target: NonNull<Self>) -> NonNull<kwavltree::Links<Self>> { 218 + /// unsafe fn links(mut target: NonNull<Self>) -> NonNull<wavltree::Links<Self>> { 219 219 /// // Borrow the target's `links` field. 220 220 /// let links = &mut target.as_mut().links; 221 221 /// // Convert that reference into a pointer. ··· 235 235 /// 236 236 /// ``` 237 237 /// use core::ptr::{self, NonNull}; 238 - /// # use kwavltree::Linked; 238 + /// # use wavltree::Linked; 239 239 /// # struct Entry { 240 - /// # links: kwavltree::Links<Self>, 240 + /// # links: wavltree::Links<Self>, 241 241 /// # data: usize, 242 242 /// # } 243 243 /// ··· 249 249 /// # unsafe fn from_ptr(ptr: NonNull<Self>) -> Self::Handle { ptr } 250 250 /// // ... 251 251 /// 252 - /// unsafe fn links(target: NonNull<Self>) -> NonNull<kwavltree::Links<Self>> { 252 + /// unsafe fn links(target: NonNull<Self>) -> NonNull<wavltree::Links<Self>> { 253 253 /// // Note that we use the `map_addr` method here that is part of the strict-provenance 254 254 /// target 255 255 /// .map_addr(|addr| { ··· 298 298 /// 299 299 /// ```rust 300 300 /// struct Entry { 301 - /// links: kwavltree::Links<Self>, 301 + /// links: wavltree::Links<Self>, 302 302 /// age: u16, 303 303 /// name: String 304 304 /// } ··· 311 311 /// # use std::ptr::NonNull; 312 312 /// 313 313 /// # struct Entry { 314 - /// # links: kwavltree::Links<Self>, 314 + /// # links: wavltree::Links<Self>, 315 315 /// # age: u16, 316 316 /// # name: String 317 317 /// # } 318 318 /// 319 - /// unsafe impl kwavltree::Linked for Entry { 319 + /// unsafe impl wavltree::Linked for Entry { 320 320 /// # type Handle = NonNull<Self>; 321 321 /// # fn into_ptr(r: Self::Handle) -> NonNull<Self> { r } 322 322 /// # unsafe fn from_ptr(ptr: NonNull<Self>) -> Self::Handle { ptr } 323 - /// # unsafe fn links(ptr: NonNull<Self>) -> NonNull<kwavltree::Links<Entry>> { ptr.map_addr(|a| { 323 + /// # unsafe fn links(ptr: NonNull<Self>) -> NonNull<wavltree::Links<Entry>> { ptr.map_addr(|a| { 324 324 /// # a.checked_add(core::mem::offset_of!(Self, links)).unwrap() 325 325 /// # }).cast() } 326 326 /// // ...
+7 -7
sys/async/Cargo.toml
··· 11 11 harness = false 12 12 13 13 [dependencies] 14 - kutil.workspace = true 15 - kcpu-local.workspace = true 16 - kspin.workspace = true 17 - kfastrand.workspace = true 18 - kpanic-unwind = { workspace = true, optional = true } 14 + util.workspace = true 15 + cpu-local.workspace = true 16 + spin.workspace = true 17 + fastrand.workspace = true 18 + panic-unwind = { workspace = true, optional = true } 19 19 cordyceps.workspace = true 20 - karrayvec.workspace = true 20 + arrayvec.workspace = true 21 21 22 22 # 3rd-party dependencies 23 23 static_assertions.workspace = true ··· 38 38 loom.workspace = true 39 39 40 40 [features] 41 - kunwind = ["dep:kpanic-unwind"] 41 + unwind = ["dep:panic-unwind"] 42 42 counters = [] 43 43 __bench = ["tracing/max_level_off"] 44 44
+3 -3
sys/async/src/executor.rs
··· 15 15 16 16 use cordyceps::mpsc_queue::{MpscQueue, TryDequeueError}; 17 17 use futures::pin_mut; 18 - use kcpu_local::collection::CpuLocal; 19 - use kfastrand::FastRand; 20 - use kspin::Backoff; 18 + use cpu_local::collection::CpuLocal; 19 + use fastrand::FastRand; 20 + use spin::Backoff; 21 21 22 22 use crate::error::{Closed, SpawnError}; 23 23 use crate::executor::steal::{Injector, Stealer, TryStealError};
+1 -1
sys/async/src/sync/wait_cell.rs
··· 11 11 use core::{fmt, task}; 12 12 13 13 use bitflags::bitflags; 14 - use kutil::{CachePadded, loom_const_fn}; 14 + use util::{CachePadded, loom_const_fn}; 15 15 use static_assertions::const_assert_eq; 16 16 17 17 use crate::error::Closed;
+2 -2
sys/async/src/sync/wait_queue.rs
··· 15 15 use core::{fmt, mem, ptr}; 16 16 17 17 use cordyceps::{Linked, List, list}; 18 - use kspin::{Mutex, MutexGuard}; 19 - use kutil::{CachePadded, loom_const_fn}; 18 + use spin::{Mutex, MutexGuard}; 19 + use util::{CachePadded, loom_const_fn}; 20 20 use mycelium_bitfield::{FromBits, bitfield, enum_from_bits}; 21 21 use pin_project::{pin_project, pinned_drop}; 22 22
+1 -1
sys/async/src/sync/wake_batch.rs
··· 7 7 8 8 use core::task::Waker; 9 9 10 - use karrayvec::ArrayVec; 10 + use arrayvec::ArrayVec; 11 11 12 12 const NUM_WAKERS: usize = 32; 13 13
+3 -3
sys/async/src/task.rs
··· 26 26 use cordyceps::mpsc_queue; 27 27 pub use id::Id; 28 28 pub use join_handle::{JoinError, JoinHandle}; 29 - use kutil::{CachePadded, CheckedMaybeUninit, loom_const_fn}; 29 + use util::{CachePadded, CheckedMaybeUninit, loom_const_fn}; 30 30 pub use yield_now::yield_now; 31 31 32 32 use crate::executor::Scheduler; ··· 869 869 cfg_if::cfg_if! { 870 870 if #[cfg(test)] { 871 871 let result = ::std::panic::catch_unwind(poll); 872 - } else if #[cfg(feature = "kunwind")] { 873 - let result = kpanic_unwind::catch_unwind(poll); 872 + } else if #[cfg(feature = "unwind")] { 873 + let result = panic_unwind::catch_unwind(poll); 874 874 } else { 875 875 let result = Ok(poll()); 876 876 }
+2 -2
sys/async/src/task/state.rs
··· 7 7 8 8 use core::fmt; 9 9 10 - use kspin::Backoff; 11 - use kutil::loom_const_fn; 10 + use spin::Backoff; 11 + use util::loom_const_fn; 12 12 13 13 use crate::loom::sync::atomic::{self, AtomicUsize, Ordering}; 14 14 use crate::task::PollResult;
+1 -1
sys/async/src/test_util.rs
··· 10 10 11 11 use futures::pin_mut; 12 12 use futures::task::WakerRef; 13 - use kutil::loom_const_fn; 13 + use util::loom_const_fn; 14 14 15 15 use crate::loom::sync::{Arc, Condvar, Mutex as StdMutex}; 16 16
+1 -1
sys/async/src/time/sleep.rs
··· 140 140 141 141 #[cfg(test)] 142 142 mod tests { 143 - use kfastrand::FastRand; 143 + use fastrand::FastRand; 144 144 use tracing_subscriber::EnvFilter; 145 145 use tracing_subscriber::fmt::format::FmtSpan; 146 146
+2 -2
sys/async/src/time/timer.rs
··· 15 15 16 16 use cordyceps::List; 17 17 pub(in crate::time) use entry::Entry; 18 - use kspin::Mutex; 19 - use kutil::loom_const_fn; 18 + use spin::Mutex; 19 + use util::loom_const_fn; 20 20 use wheel::Wheel; 21 21 22 22 use crate::loom::sync::atomic::Ordering;
+1 -1
sys/async/src/time/timer/entry.rs
··· 11 11 use core::sync::atomic::{AtomicBool, Ordering}; 12 12 13 13 use cordyceps::{Linked, list}; 14 - use kutil::loom_const_fn; 14 + use util::loom_const_fn; 15 15 use pin_project::pin_project; 16 16 17 17 use crate::sync::wait_cell::WaitCell;
+1 -1
sys/kernel/main.rs
··· 3 3 #![feature(array_repeat)] 4 4 5 5 extern crate alloc; 6 - extern crate kpanic_unwind; 6 + extern crate panic_unwind; 7 7 8 8 use alloc::alloc::GlobalAlloc; 9 9 use core::alloc::Layout;

History

3 rounds 2 comments
sign up or login to add to the discussion
1 commit
expand
revert k prefix naming
expand 0 comments
pull request successfully merged
1 commit
expand
revert k prefix naming
expand 2 comments

the ktest renaming might become problematic in the future ๐Ÿค” but I guess we'll need to rebuild the testing infra anyways

lets revert this for now though

1 commit
expand
revert k prefix naming
expand 0 comments