diff --git a/std/internal/README.md b/std/internal/README.md new file mode 100644 index 000000000..3e1999be5 --- /dev/null +++ b/std/internal/README.md @@ -0,0 +1,4 @@ +# The `internal` standard library + +The `std::internal` standard library is contains internal functionalities for standard library packages. \ +Usage is restricted for all references except standard library package (standard library module). diff --git a/std/internal/dynar/dynar.hpp b/std/internal/dynar/dynar.hpp new file mode 100644 index 000000000..7b01ce7ea --- /dev/null +++ b/std/internal/dynar/dynar.hpp @@ -0,0 +1,47 @@ +// Copyright 2023-2024 The Jule Programming Language. +// Use of this source code is governed by a BSD 3-Clause +// license that can be found in the LICENSE file. + +#ifndef __JULE_STD_INTERNAL_DYNAR +#define __JULE_STD_INTERNAL_DYNAR + +#include + +#include "../../../api/error.hpp" +#include "../../../api/types.hpp" +#include "../../../api/panic.hpp" + +namespace jule_std +{ + template + struct DynarBuffer + { + Item *heap = nullptr; + jule::Int len = 0; + jule::Int cap = 0; + + DynarBuffer(void) = default; + + DynarBuffer(const jule_std::DynarBuffer &ref) + { + this->operator=(ref); + } + + void operator=(const jule_std::DynarBuffer &ref) + { + // Assignment to itself. + if (this->heap != nullptr && this->heap == ref.heap) + return; + + this->heap = new (std::nothrow) Item[ref.len]; + if (!this->heap) + jule::panic(__JULE_ERROR__MEMORY_ALLOCATION_FAILED); + this->len = ref.len; + this->cap = this->len; + std::copy(ref.heap, ref.heap + this->len, this->heap); + } + }; + +} // namespace jule_std + +#endif // __JULE_STD_INTERNAL_DYNAR diff --git a/std/internal/dynar/dynar.jule b/std/internal/dynar/dynar.jule new file mode 100644 index 000000000..3ff1841e8 --- /dev/null +++ b/std/internal/dynar/dynar.jule @@ -0,0 +1,120 @@ +// Copyright 2024 The Jule Programming Language. +// Use of this source code is governed by a BSD 3-Clause +// license that can be found in the LICENSE file. + +use integ for std::jule::integrated + +cpp use "dynar.hpp" + +#typedef +#namespace "jule_std" +cpp struct DynarBuffer[T] { + pub heap: *T + pub len: int + pub cap: int +} + +#namespace "std" +cpp unsafe fn copy(mut start: *unsafe, mut end: *unsafe, mut dest: *unsafe) + +unsafe fn copy[T](mut dest: *T, mut buff: *T, len: int) { + cpp.copy(buff, buff + len, dest); +} + +// Dynamic allocation on heap, suitable for dynamic array scenarios. +// Independent from slice type of Jule, pure pointer implementation. +// Implements dispose method that deallocates buffer. +// Implements internal copy algorithm that copies all elements to +// destination Dynar[T] instead of using shared memory. +// +// This structure can accept as fully unsafe. +// But all functions can use in safe Jule without Unsaf Jule. +// However, this functions have not any safet checks for performance purposes. +// Use this structure carefully. +pub struct Dynar[T] { + pub buff: cpp.DynarBuffer[T] +} + +impl Dynar { + pub static fn new(): Dynar[T] { + ret Dynar[T]{} + } + + // Deallocate heap. + pub fn dispose(mut self) { + self.buff.len = 0 + self.buff.cap = 0 + unsafe { + integ::delete_array[T](self.buff.heap) + } + self.buff.heap = nil + } + + // Resizes heap. It will allocate new allocation by size and + // copies old elements into new heap after allocation, then + // deallocates old heap allocation. + // Reports whether process completed successfuly. + pub fn resize(mut self, n: int): bool { + let mut new_heap = integ::new_array[T](n) + if new_heap == nil { + ret false + } + if self.buff.heap == nil { + self.buff.heap = new_heap + self.buff.cap = n + ret true + } + unsafe { + if self.buff.len > 0 { + copy[T](new_heap, self.buff.heap, self.buff.len) + } + unsafe { + integ::delete_array[T](self.buff.heap) + } + self.buff.heap = new_heap + } + self.buff.cap = n + ret true + } + + // Returns pointer that points to first element of buffer. + pub fn begin(mut self): *T { + ret self.buff.heap + } + + // Returns pointer that points to end of the last element of buffer. + pub fn end(mut self): *T { + ret self.buff.heap + self.buff.len + } + + // Shift elements to right by n until reach i. + // Starts at end of buffer, goes to left step by step. + pub fn shift_right(mut self, i: int, n: int) { + let mut j = self.end() - 1 + let k = self.begin() + i + for j >= k; j-- { + unsafe { + *(j + n) = *j + } + } + } + + // Shift elements to left by n untin reach j, start at i. + pub fn shift_left(mut self, i: int, j: int, n: int) { + let mut k = self.begin() + i + let l = self.begin() + j + for k < l; k++ { + unsafe { + *(k - n) = *k + } + } + } + + // Set buffer elements, starts at i. + // Uses p for read data, reads n elements and assigns to buffer. + pub fn set(mut self, i: int, mut p: *T, n: int) { + unsafe { + copy[T](self.begin() + i, p, n) + } + } +} diff --git a/std/vec/vec.hpp b/std/vec/vec.hpp deleted file mode 100644 index 5345ec676..000000000 --- a/std/vec/vec.hpp +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2023-2024 The Jule Programming Language. -// Use of this source code is governed by a BSD 3-Clause -// license that can be found in the LICENSE file. - -#ifndef __JULE_STD_VEC -#define __JULE_STD_VEC - -#include - -#include "../../api/types.hpp" -#include "../../api/slice.hpp" - -template -struct StdJuleVecBuffer -{ - Item *heap = nullptr; - jule::Int len = 0; - jule::Int cap = 0; - - StdJuleVecBuffer(void) = default; - - StdJuleVecBuffer(const StdJuleVecBuffer &ref) - { - this->operator=(ref); - } - - void operator=(const StdJuleVecBuffer &ref) - { - // Assignment to itself. - if (this->heap != nullptr && this->heap == ref.heap) - return; - - this->heap = new (std::nothrow) Item[ref.len]; - this->len = ref.len; - this->cap = this->len; - std::copy(ref.heap, ref.heap + this->len, this->heap); - } -}; - -#endif // __JULE_STD_VEC diff --git a/std/vec/vec.jule b/std/vec/vec.jule index c7364a6bb..628b5f16b 100644 --- a/std/vec/vec.jule +++ b/std/vec/vec.jule @@ -2,23 +2,7 @@ // Use of this source code is governed by a BSD 3-Clause // license that can be found in the LICENSE file. -use integrated for std::jule::integrated - -cpp use "vec.hpp" - -#typedef -cpp struct StdJuleVecBuffer[T] { - len: int - cap: int - heap: *T -} - -#namespace "std" -cpp unsafe fn copy(mut start: *unsafe, mut end: *unsafe, mut dest: *unsafe) - -unsafe fn copy[T](mut dest: *T, mut buff: *T, len: int) { - cpp.copy(buff, buff + len, dest); -} +use std::internal::dynar::{Dynar} const GROW_FACTOR = 2 @@ -33,112 +17,109 @@ const GROW_FACTOR = 2 // Vectors aren't use shared allocation between them. // Allocates new space and copies (not deep copy) items into space. pub struct Vec[T] { - buff: cpp.StdJuleVecBuffer[T] + mem: Dynar[T] } impl Vec { // Allocate new vector with capacity. pub static fn new(cap: int): Vec[T] { - let mut vec = Vec[T]{} - if cap != 0 { - vec.resize_alloc(cap) + let mut vec = Vec[T]{ + mem: Dynar[T].new(), + } + if cap > 0 { + vec.resize(cap) } ret vec } + fn resize(mut self, n: int) { + let ok = self.mem.resize(n) + if !ok { + panic("Vec[T]: heap reallocation failed") + } + } + fn calc_grow(self, delta: int): int { - let t = self.buff.len + delta + let t = self.len() + delta ret t * GROW_FACTOR } - fn resize_alloc(mut self, n: int) { - let mut new_heap = integrated::new_array[T](n) - if new_heap == nil { - panic("Vec[T].resize_alloc: heap reallocation failed") + fn review_size(mut self, delta: int) { + if self.len() + delta > self.cap() { + self.resize(self.calc_grow(delta)) } - - if self.buff.heap == nil { - self.buff.heap = new_heap - self.buff.cap = n - ret - } - - unsafe { - if self.buff.len > 0 { - copy[T](new_heap, self.buff.heap, self.buff.len) - } - unsafe { integrated::delete_array[T](self.buff.heap) } - self.buff.heap = new_heap - } - self.buff.cap = n } - fn review_allocation(mut self, delta: int) { - if self.buff.len+delta > self.buff.cap { - self.resize_alloc(self.calc_grow(delta)) - } + // Deallocate heap. + pub fn dispose(mut self) { + self.mem.dispose() } // Returns length. - pub fn len(self): int { ret self.buff.len } + pub fn len(self): int { + ret self.mem.buff.len + } // Returns capacity. - pub fn cap(self): int { ret self.buff.cap } + pub fn cap(self): int { + ret self.mem.buff.cap + } // Sets length. // Sets length to zero if n < 0. // Don't set length if n >= length of vector. pub fn set_len(mut self, n: int) { if n < 0 { - self.buff.len = 0 + self.mem.buff.len = 0 ret } - if n >= self.buff.len { + if n >= self.len() { ret } - self.buff.len = n + self.mem.buff.len = n } // Returns item by index. pub fn at(mut self, i: int): T { - if i < 0 || i >= self.buff.len { + if i < 0 || i >= self.len() { panic("Vec[T].at: out of range") } - ret unsafe { self.buff.heap[i] } + unsafe { + ret self.mem.buff.heap[i] + } } // Set element by index. pub fn set(mut self, i: int, mut item: T) { - if i < 0 || i >= self.buff.len { + if i < 0 || i >= self.len() { panic("Vec[T].set: out of range") } - unsafe { self.buff.heap[i] = item } + unsafe { + self.mem.buff.heap[i] = item + } } // Push item to end of heap. pub fn push(mut self, mut item: T) { - if self.buff.len >= self.buff.cap { - self.resize_alloc((self.buff.cap * GROW_FACTOR) + 1) + if self.len() >= self.cap() { + self.resize((self.cap() * GROW_FACTOR) + 1) } - - unsafe { self.buff.heap[self.buff.len] = item } - self.buff.len++ + unsafe { + self.mem.buff.heap[self.len()] = item + } + self.mem.buff.len++ } // Push item to front of heap. pub fn push_front(mut self, mut item: T) { - if self.buff.len >= self.buff.cap { - self.resize_alloc((self.buff.cap * GROW_FACTOR) + 1) + if self.len() >= self.cap() { + self.resize((self.cap() * GROW_FACTOR) + 1) } - - // Shift items. - let mut i = self.buff.len-1 - for i >= 0; i-- { - unsafe { self.buff.heap[i+1] = self.buff.heap[i] } + self.mem.shift_right(0, 1) + unsafe { + self.mem.buff.heap[0] = item } - - unsafe { self.buff.heap[0] = item } - self.buff.len++ + self.mem.buff.len++ } // Push items to end of heap. @@ -146,41 +127,30 @@ impl Vec { if items.len == 0 { ret } - - self.review_allocation(items.len) - - unsafe { copy[T](self.buff.heap + self.buff.len, &items[0], items.len) } - self.buff.len += items.len + self.review_size(items.len) + self.mem.set(self.len(), &items[0], items.len) + self.mem.buff.len += items.len } // Merge items to end of heap. pub fn merge(mut self, mut vec: Vec[T]) { - if vec.buff.len == 0 { + if vec.len() == 0 { ret } - - self.review_allocation(vec.buff.len) - - unsafe { copy[T](self.buff.heap + self.buff.len, vec.buff.heap, vec.buff.len) } - self.buff.len += vec.buff.len + self.review_size(vec.len()) + self.mem.set(self.len(), vec.mem.begin(), vec.len()) + self.mem.buff.len += vec.len() } // Merge items to front of heap. pub fn merge_front(mut self, mut vec: Vec[T]) { - if vec.buff.len == 0 { + if vec.len() == 0 { ret } - - self.review_allocation(vec.buff.len) - - // Shift items. - let mut i = self.buff.len - 1 - for i >= 0; i-- { - unsafe { self.buff.heap[i+vec.buff.len] = self.buff.heap[i] } - } - - unsafe { copy[T](self.buff.heap, vec.buff.heap, vec.buff.len) } - self.buff.len += vec.buff.len + self.review_size(vec.len()) + self.mem.shift_right(0, vec.len()) + self.mem.set(0, vec.mem.begin(), vec.len()) + self.mem.buff.len += vec.len() } // Remove range from heap. @@ -188,24 +158,17 @@ impl Vec { if n < 1 { ret } - if start < -1 { panic("Vec[T].remove_range: removing starts at negative index") } - if start >= self.buff.len { + if start >= self.len() { panic("Vec[T].remove_range: removing starts at out of range") } - if self.buff.len-start-n < 0 { + if self.len() - start - n < 0 { panic("Vec[T].remove_range: removing continues at out of range") } - - // Shift items. - let mut i = start + n - for i < self.buff.len; i++ { - unsafe { self.buff.heap[i-n] = self.buff.heap[i] } - } - - self.buff.len -= n + self.mem.shift_left(start + n, self.len(), n) + self.mem.buff.len -= n } // Insert item by index. @@ -213,23 +176,17 @@ impl Vec { if i < 0 { panic("Vec[T].insert: insertion starts at negative index") } - if i > self.buff.len { + if i > self.len() { panic("Vec[T].insert: insertion starts at out of range") } - - if self.buff.len >= self.buff.cap { - self.resize_alloc((self.buff.cap * GROW_FACTOR) + 1) + if self.len() >= self.cap() { + self.resize((self.cap() * GROW_FACTOR) + 1) } - - // Shift items. - let mut j = self.buff.len-1 - for j >= i; j-- { - unsafe { self.buff.heap[j+1] = self.buff.heap[j] } + self.mem.shift_right(i, 1) + unsafe { + self.mem.buff.heap[i] = item } - - // Assign. - unsafe { self.buff.heap[i] = item } - self.buff.len++ + self.mem.buff.len++ } // Slice between indexes except end position. @@ -237,10 +194,10 @@ impl Vec { if start < 0 { panic("Vec[T].slice: slicing starts at negative index") } - if start > self.buff.len { + if start > self.len() { panic("Vec[T].slice: slicing starts at out of range") } - if end > self.buff.len { + if end > self.len() { panic("Vec[T].slice: slicing ends at out of range") } if end < 0 { @@ -249,26 +206,12 @@ impl Vec { if start > end { panic("Vec[T].slice: start point < end point") } - if start == end { ret Vec[T].new(0) } - let mut vec = Vec[T].new(end - start) - - unsafe { copy[T](vec.buff.heap, self.buff.heap + start, vec.buff.cap) } - - vec.buff.len = vec.buff.cap - + vec.mem.set(0, self.mem.begin() + start, vec.cap()) + vec.mem.buff.len = vec.cap() ret vec } - - // Deallocate heap. - pub fn dispose(mut self) { - self.buff.len = 0 - self.buff.cap = 0 - - unsafe { integrated::delete_array[T](self.buff.heap) } - self.buff.heap = nil - } }