0.4.0-pre.19

This commit is contained in:
wisdgod
2026-01-24 22:36:00 +08:00
parent 1488cf6744
commit 5990ec339d
157 changed files with 3981 additions and 8308 deletions
+3 -3
View File
@@ -1,3 +1,3 @@
[unstable]
profile-rustflags = true
trim-paths = true
# [unstable]
# profile-rustflags = true
# trim-paths = true
+11 -4
View File
@@ -122,8 +122,15 @@ DEBUG_LOG_FILE=debug.log
# 日志储存条数(最大值100000)(为0则无日志,为100000则无限制,但日志文件上限8EB=8192PB=8388608TB,以防你看不懂,前提是你内存多大)
REQUEST_LOGS_LIMIT=100
# TCP保活时间(秒)(最大值600)
TCP_KEEPALIVE=90
# 连接空闲多久后开始发送探测包(秒)(最大值600)
TCP_KEEPALIVE=15
# 探测包发送间隔(秒)(最大值600)
# 按理不应大于 TCP_KEEPALIVE
TCP_KEEPALIVE_INTERVAL=15
# 探测失败后的最大重试次数(最大值20)
TCP_KEEPALIVE_RETRIES=3
# 服务请求超时(秒)(最大值600)
SERVICE_TIMEOUT=30
@@ -152,8 +159,8 @@ GENERAL_TIMEZONE=Asia/Shanghai
# 禁用HTTP2
# DISABLE_HTTP2=false
# Cursor客户端版本
CURSOR_CLIENT_VERSION=2.0.0
# Cursor客户端版本(已弃用)
# CURSOR_CLIENT_VERSION=2.0.0
# 思考标签(已弃用)
# THINKING_TAG=think
+8 -11
View File
@@ -9,11 +9,12 @@ members = [
"crates/byte_str",
"crates/atomic_enum",
"crates/proto-value",
"__build",
]
default-members = ["."]
[workspace.package]
version = "0.4.0-pre.17"
version = "0.4.0-pre.19"
edition = "2024"
authors = ["wisdgod <nav@wisdgod.com>"]
description = "A format compatibility layer for the Cursor API"
@@ -42,7 +43,6 @@ codegen-units = 256
[profile.fast]
inherits = "dev"
opt-level = 1
trim-paths = "all"
# ===== 性能测试配置(接近 release 但编译更快)=====
[profile.bench]
@@ -52,21 +52,16 @@ codegen-units = 16
# ===== 发布配置(性能最大化)=====
[profile.release]
opt-level = 3
lto = "fat"
lto = true
codegen-units = 1
panic = "abort"
strip = true
debug = false
overflow-checks = false
incremental = false
trim-paths = "all"
[patch.crates-io]
h2 = { path = "patch/h2-0.4.10" }
reqwest = { path = "patch/reqwest-0.12.18" }
rustls = { path = "patch/rustls-0.23.35" }
chrono = { path = "patch/chrono-0.4.42" }
chrono = { path = "patch/chrono-0.4.43" }
dotenvy = { path = "patch/dotenvy-0.15.7" }
prost = { path = "patch/prost-0.14.1" }
# prost-derive = { path = "patch/prost-derive" }
@@ -95,7 +90,9 @@ path = "src/main.rs"
# path = "tools/rkyv_adapter/src/main.rs"
[build-dependencies]
chrono = { version = "0.4", default-features = false, features = ["alloc"] }
__build = { package = "cursor-api-build", path = "__build" }
# chrono = { version = "0.4", default-features = false, features = ["alloc"] }
# prost-build = { version = "0.14", optional = true }
sha2 = { version = "0", default-features = false }
serde_json = "1"
@@ -134,7 +131,7 @@ chrono = { version = "0.4", default-features = false, features = [
"serde",
"rkyv-64",
] }
chrono-tz = { version = "0.10", features = ["serde"] }
chrono-tz = { version = "0.11.0", features = ["serde"], git = "https://github.com/chronotope/chrono-tz.git" }
# crossbeam = { version = "0.8.4", features = ["nightly"] }
# dashmap = { version = "7.0.0-rc2", features = ["inline-more"] }
dotenvy = "0.15"
+1 -1
View File
@@ -1 +1 @@
21
23
+11
View File
@@ -0,0 +1,11 @@
[package]
name = "cursor-api-build"
version.workspace = true
edition.workspace = true
authors.workspace = true
description.workspace = true
license.workspace = true
repository.workspace = true
[dependencies]
chrono = { version = "0.4.43", default-features = false, features = ["alloc"] }
+8
View File
@@ -0,0 +1,8 @@
style_edition = "2024"
use_small_heuristics = "Max"
merge_derives = false
group_imports = "One"
imports_granularity = "Module"
use_field_init_shorthand = true
tab_spaces = 2
where_single_line = true
+61
View File
@@ -0,0 +1,61 @@
macro_rules! generate_static_variable {
(
$variable_name: ident
$variable_type: ty
) => {
pub(self) static $variable_name: $crate::_marco::Variable<$variable_type> =
$crate::_marco::uninit_variable();
#[inline]
pub(crate) unsafe fn get_unchecked() -> &'static $variable_type {
unsafe { (&*$variable_name.0.get()).assume_init_ref() }
}
#[inline]
pub(crate) unsafe fn initialize() {
unsafe { (&mut *$variable_name.0.get()).write(_initialize()) };
}
};
}
pub(crate) struct Variable<T>(pub(crate) ::core::cell::UnsafeCell<::core::mem::MaybeUninit<T>>);
unsafe impl<T> Send for Variable<T> {}
unsafe impl<T> Sync for Variable<T> {}
#[inline(always)]
pub(crate) const fn uninit_variable<T>() -> Variable<T> {
Variable(::core::cell::UnsafeCell::new(::core::mem::MaybeUninit::uninit()))
}
macro_rules! generate_variable_get {
(
$variable_name: ident
$fn_result: ty
|$x:ident| $map:expr
) => {
pub(crate) mod $variable_name;
#[inline]
pub fn $variable_name() -> $fn_result {
if !$crate::once::initialized() {
$crate::variables::initialize();
}
let $x = unsafe { $variable_name::get_unchecked() };
$map
}
};
}
macro_rules! reexport_info_types {
{
$(
$info_name: ident
$($info_type: ty)+,
)*
} => {
$(
mod $info_name;
)*
#[allow(unused_braces)]
pub(crate) mod prelude {
$(
pub use super::$info_name::{$($info_type,)+};
)*
}
};
}
+53
View File
@@ -0,0 +1,53 @@
use super::*;
pub struct BuildInfo;
impl BuildInfo {
pub fn write_to<W: Write>(self, mut writer: W) -> io::Result<()> {
write_generated(&mut writer)?;
writer.write_all(b"use crate::app::model::version::{Version, ReleaseStage::*};\n\n")?;
let version_number = version_number();
if version_number != 0 {
writeln!(writer, "pub const BUILD_VERSION: u32 = {version_number};")?;
}
let build_timestamp = build_timestamp();
writeln!(
writer,
"pub const BUILD_TIMESTAMP: &'static str = {:?};",
chrono::DateTime::from_timestamp_secs(build_timestamp as i64)
.unwrap()
.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)
)?;
writeln!(
writer,
"/// pub const VERSION_STR: &'static str = \"{version}\";\npub const VERSION: Version = {version:?};",
version = pkg_version()
)?;
let is_preview = is_preview();
let is_debug = cfg!(debug_assertions);
write!(
writer,
"pub const IS_PRERELEASE: bool = {is_preview};\npub const IS_DEBUG: bool = {is_debug};\n\n"
)?;
write!(
writer,
r#"#[cfg(unix)]
pub const BUILD_EPOCH: std::time::SystemTime =
unsafe {{ ::core::intrinsics::transmute(({build_timestamp}i64, 0u32)) }};
#[cfg(windows)]
pub const BUILD_EPOCH: std::time::SystemTime = unsafe {{
const INTERVALS_PER_SEC: u64 = 10_000_000;
const INTERVALS_TO_UNIX_EPOCH: u64 = 11_644_473_600 * INTERVALS_PER_SEC;
const TARGET_INTERVALS: u64 = INTERVALS_TO_UNIX_EPOCH + {build_timestamp} * INTERVALS_PER_SEC;
::core::intrinsics::transmute((
TARGET_INTERVALS as u32,
(TARGET_INTERVALS >> 32) as u32,
))
}};
"#
)?;
Ok(())
}
}
+63
View File
@@ -0,0 +1,63 @@
use super::*;
use std::fs;
#[allow(non_camel_case_types)]
#[derive(Clone, Copy)]
pub enum PlatformType {
Windows,
macOS,
Linux,
Android,
FreeBSD,
Unknown,
}
impl PlatformType {
#[inline]
pub const fn as_str(self) -> &'static str {
match self {
PlatformType::Windows => "Windows",
PlatformType::macOS => "macOS",
PlatformType::Linux => "Linux",
PlatformType::Android => "Android",
PlatformType::FreeBSD => "FreeBSD",
PlatformType::Unknown => "Unknown",
}
}
#[inline]
pub const fn or_default(self) -> Self {
match self {
PlatformType::Windows | PlatformType::macOS | PlatformType::Linux => self,
_ => PlatformType::Windows,
}
}
}
pub const CURRENT: PlatformType = cfg_select! {
target_os = "windows" => {PlatformType::Windows}
target_os = "macos" => {PlatformType::macOS}
target_os = "linux" => {PlatformType::Linux}
target_os = "android" => {PlatformType::Android}
target_os = "freebsd" => {PlatformType::FreeBSD}
_ => {PlatformType::Unknown}
};
pub struct PlatformInfo;
impl PlatformInfo {
pub fn write_to<W: Write>(self, mut writer: W) -> io::Result<()> {
write_generated(&mut writer)?;
writer
.write_all(b"use crate::app::model::platform::PlatformType;\n\n")?;
let default = CURRENT.or_default();
writeln!(writer, "pub const DEFAULT: PlatformType = PlatformType::{};", default.as_str())?;
writeln!(
writer,
"pub const CONFIG_EXAMPLE: &'static str = {:?};",
fs::read_to_string(manifest_dir().join("config.example.toml"))
.unwrap()
.replace("{DEFAULT_PLATFORM}", default.as_str())
)?;
Ok(())
}
}
+53
View File
@@ -0,0 +1,53 @@
// style
#![allow(clippy::redundant_static_lifetimes, clippy::needless_pub_self)]
#![feature(cfg_select)]
#[macro_use]
mod _marco;
mod once;
mod version;
pub mod variables {
use crate::version::Version;
use std::path::Path;
#[cold]
fn initialize() {
crate::once::initialize(|| unsafe {
out_dir::initialize();
cfg_feature::initialize();
manifest_dir::initialize();
build_timestamp::initialize();
// after cfg_feature
is_preview::initialize();
// after manifest_dir is_preview
version_number::initialize();
// after version_number
pkg_version::initialize();
})
}
generate_variable_get!(out_dir &'static Path |x| x.as_path());
generate_variable_get!(build_timestamp u64 |x| *x);
generate_variable_get!(version_number u16 |x| *x);
generate_variable_get!(manifest_dir &'static Path |x| x.as_path());
generate_variable_get!(pkg_version Version |x| *x);
generate_variable_get!(cfg_feature &'static str |x| x.as_str());
generate_variable_get!(is_preview bool |x| *x);
}
mod infos {
use crate::variables::*;
use std::io::{self, Write};
fn write_generated<W: Write>(writer: &mut W) -> io::Result<()> {
writer
.write_all(b"// This file is automatically @generated by build.rs. Do not edit manually.\n")
}
reexport_info_types!(
build BuildInfo,
platform PlatformType CURRENT PlatformInfo,
);
}
pub use infos::prelude::*;
+14
View File
@@ -0,0 +1,14 @@
use std::sync::Once;
static ONCE: Once = Once::new();
#[cold]
pub fn initialize<F>(f: F)
where F: FnOnce() {
ONCE.call_once_force(|_| f());
}
#[inline]
pub fn initialized() -> bool {
ONCE.is_completed()
}
+12
View File
@@ -0,0 +1,12 @@
use std::env;
use std::time::{SystemTime, UNIX_EPOCH};
generate_static_variable!(BUILD_TIMESTAMP u64);
fn _initialize() -> u64 {
if let Some(s) = env::var_os("BUILD_TIMESTAMP") {
s.to_str().unwrap().parse().unwrap()
} else {
SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs()
}
}
+9
View File
@@ -0,0 +1,9 @@
use std::env;
generate_static_variable!(CFG_FEATURE String);
fn _initialize() -> String {
let s = env::var("CARGO_CFG_FEATURE").unwrap_or_default();
println!("{s}");
s
}
+5
View File
@@ -0,0 +1,5 @@
generate_static_variable!(IS_PREVIEW bool);
fn _initialize() -> bool {
unsafe { super::cfg_feature::get_unchecked() }.contains("__preview")
}
+8
View File
@@ -0,0 +1,8 @@
use std::env;
use std::path::PathBuf;
generate_static_variable!(CARGO_MANIFEST_DIR PathBuf);
fn _initialize() -> PathBuf {
env::var_os("CARGO_MANIFEST_DIR").unwrap().into()
}
+8
View File
@@ -0,0 +1,8 @@
use std::env;
use std::path::PathBuf;
generate_static_variable!(OUT_DIR PathBuf);
fn _initialize() -> PathBuf {
env::var_os("OUT_DIR").unwrap().into()
}
+14
View File
@@ -0,0 +1,14 @@
use std::env;
use crate::version::ReleaseStage::*;
use crate::version::Version;
generate_static_variable!(PKG_VERSION Version);
fn _initialize() -> Version {
let mut ver: Version = env::var("CARGO_PKG_VERSION").unwrap().parse().unwrap();
if let Preview { ref mut build, .. } = ver.stage {
*build = Some(*unsafe { super::version_number::get_unchecked() });
}
ver
}
+19
View File
@@ -0,0 +1,19 @@
use std::{env, fs};
generate_static_variable!(VERSION_NUMBER u16);
fn _initialize() -> u16 {
if unsafe { *super::is_preview::get_unchecked() } {
if let Some(s) = env::var_os("VERSION_NUMBER") {
s.to_str().unwrap().trim().parse().unwrap()
} else {
fs::read_to_string(unsafe { super::manifest_dir::get_unchecked() }.join("VERSION"))
.unwrap()
.trim()
.parse()
.unwrap()
}
} else {
0
}
}
+144
View File
@@ -0,0 +1,144 @@
/// 版本发布阶段
#[derive(Debug, Clone, Copy)]
pub enum ReleaseStage {
/// 正式发布版本
Release,
/// 预览版本,格式如 `-pre.6` 或 `-pre.6+build.8`
Preview {
/// 预览版本号
version: u16,
/// 构建号(可选)
build: Option<u16>,
},
}
impl core::fmt::Display for ReleaseStage {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
ReleaseStage::Release => Ok(()),
ReleaseStage::Preview { version, build: None } => {
write!(f, "-pre.{version}")
}
ReleaseStage::Preview { version, build: Some(build) } => {
write!(f, "-pre.{version}+build.{build}")
}
}
}
}
/// 遵循格式:v0.4.0-pre.6+build.8
#[derive(Debug, Clone, Copy)]
pub struct Version {
pub major: u16,
pub minor: u16,
pub patch: u16,
pub stage: ReleaseStage,
}
impl core::fmt::Display for Version {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)?;
self.stage.fmt(f)
}
}
/// 版本字符串解析错误
#[allow(clippy::enum_variant_names)]
#[derive(Debug)]
pub enum ParseError {
/// 整体格式错误(如缺少必需部分)
InvalidFormat,
/// 数字解析失败
InvalidNumber,
/// pre 部分格式错误
InvalidPreRelease,
/// build 部分格式错误
InvalidBuild,
// /// 正式版不能带 build 标识
// BuildWithoutPreview,
}
impl core::fmt::Display for ParseError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
ParseError::InvalidFormat => write!(f, "invalid version format"),
ParseError::InvalidNumber => write!(f, "invalid number in version"),
ParseError::InvalidPreRelease => write!(f, "invalid pre-release format"),
ParseError::InvalidBuild => write!(f, "invalid build format"),
// ParseError::BuildWithoutPreview => {
// write!(f, "build metadata cannot exist without pre-release version")
// }
}
}
}
impl std::error::Error for ParseError {}
impl core::str::FromStr for Version {
type Err = ParseError;
fn from_str(s: &str) -> core::result::Result<Self, Self::Err> {
// 按 '-' 分割基础版本号和扩展部分
let (base, extension) = match s.split_once('-') {
Some((base, ext)) => (base, Some(ext)),
None => (s, None),
};
// 解析基础版本号 major.minor.patch
let mut parts: [u16; 3] = [0, 0, 0];
let mut parsed_count = 0;
for (i, s) in base.split('.').enumerate() {
if i >= parts.len() {
return Err(ParseError::InvalidFormat);
}
parts[i] = s.parse().map_err(|_| ParseError::InvalidNumber)?;
parsed_count += 1;
}
if parsed_count != 3 {
return Err(ParseError::InvalidFormat);
}
let major = parts[0];
let minor = parts[1];
let patch = parts[2];
// 解析扩展部分(如果存在)
let stage =
if let Some(ext) = extension { parse_extension(ext)? } else { ReleaseStage::Release };
Ok(Version { major, minor, patch, stage })
}
}
/// 解析扩展部分:pre.X 或 pre.X+build.Y
fn parse_extension(s: &str) -> core::result::Result<ReleaseStage, ParseError> {
// 检查是否以 "pre." 开头
// 移除 "pre." 前缀
let Some(after_pre) = s.strip_prefix("pre.") else {
return Err(ParseError::InvalidPreRelease);
};
// 按 '+' 分割 version 和 build 部分
let (version_str, build_str) = match after_pre.split_once('+') {
Some((ver, build_part)) => (ver, Some(build_part)),
None => (after_pre, None),
};
// 解析 pre 版本号
let version = version_str.parse().map_err(|_| ParseError::InvalidPreRelease)?;
// 解析 build 号(如果存在)
let build = if let Some(build_part) = build_str {
// 检查格式是否为 "build.X"
let Some(build_num_str) = build_part.strip_prefix("build.") else {
return Err(ParseError::InvalidBuild);
};
let build_num = build_num_str.parse().map_err(|_| ParseError::InvalidBuild)?;
Some(build_num)
} else {
None
};
Ok(ReleaseStage::Preview { version, build })
}
+14 -8
View File
@@ -4,18 +4,15 @@ use sha2::{Digest, Sha256};
use std::collections::HashMap;
#[cfg(not(feature = "use-minified"))]
use std::fs;
#[cfg(not(debug_assertions))]
#[cfg(feature = "__preview")]
use std::fs::File;
use std::io::Result;
#[cfg(not(debug_assertions))]
#[cfg(feature = "__preview")]
#[cfg(all(not(debug_assertions), not(feature = "__preview_locked"), feature = "__preview"))]
use std::io::{Read, Write};
#[cfg(not(feature = "use-minified"))]
use std::path::Path;
#[cfg(not(feature = "use-minified"))]
use std::path::PathBuf;
#[cfg(not(feature = "use-minified"))]
use std::process::Command;
use std::{fs::File, io::Result};
// 支持的文件类型
// #[cfg(not(feature = "use-minified"))]
@@ -187,7 +184,15 @@ fn minify_assets() -> Result<()> {
Ok(())
}
include!("build_info.rs");
fn generate_build_info() -> Result<()> {
let file = File::create(__build::variables::out_dir().join("build_info.rs"))?;
__build::BuildInfo.write_to(file)
}
fn generate_platform_info() -> Result<()> {
let file = File::create(__build::variables::out_dir().join("platform_info.rs"))?;
__build::PlatformInfo.write_to(file)
}
// #[cfg(feature = "__protoc")]
// macro_rules! proto_attributes {
@@ -202,7 +207,7 @@ include!("build_info.rs");
fn main() -> Result<()> {
// 更新版本号 - 只在 release 构建时执行
#[cfg(all(not(debug_assertions), feature = "__preview"))]
#[cfg(all(not(debug_assertions), not(feature = "__preview_locked"), feature = "__preview"))]
update_version()?;
// #[cfg(feature = "__protoc")]
@@ -345,6 +350,7 @@ fn main() -> Result<()> {
// 生成构建信息文件
generate_build_info()?;
generate_platform_info()?;
Ok(())
}
+14 -4
View File
@@ -41,8 +41,18 @@ dynamic_key_secret = ""
web_references_included = false
# 模型数据获取模式
# - 可选值:
# - truncate - 覆盖模式(默认): 完全使用新获取的模型列表,替换所有现有模型
# - append:truncate - 智能合并模式: 保留现有模型中不在新列表中的,同时添加或更新新模型
# - append - 纯追加模式: 只添加不存在的新模型,已有模型保持不变
# 可选值:
# - truncate - 覆盖模式(默认): 完全使用新获取的模型列表,替换所有现有模型
# - append:truncate - 智能合并模式: 保留现有模型中不在新列表中的,同时添加或更新新模型
# - append - 纯追加模式: 只添加不存在的新模型,已有模型保持不变
raw_model_fetch_mode = "truncate"
# 模拟平台(默认{DEFAULT_PLATFORM})
# 可选值:
# - Windows
# - macOS
# - Linux
emulated_platform = "{DEFAULT_PLATFORM}"
# Cursor客户端版本
cursor_client_version = "2.0.0"
+2 -2
View File
@@ -10,10 +10,10 @@ license = "MIT"
any_all_workaround = { version = "0.1", optional = true }
bytes = { version = "1", default-features = false }
cfg-if = "1"
serde = { version = "1", default-features = false, optional = true }
serde_core = { version = "1", default-features = false, optional = true }
[features]
default = ["nightly"]
std = ["bytes/std"]
serde = ["dep:serde"]
serde = ["dep:serde_core"]
nightly = ["dep:any_all_workaround"]
+3 -8
View File
@@ -20,7 +20,7 @@ extern crate alloc;
extern crate bytes;
#[cfg(feature = "serde")]
extern crate serde;
extern crate serde_core;
#[macro_use]
extern crate cfg_if;
@@ -207,18 +207,13 @@ impl core::hash::Hash for ByteStr {
#[inline]
fn hash<H>(&self, state: &mut H)
where H: core::hash::Hasher {
self.bytes.hash(state)
ops::Deref::deref(self).hash(state)
}
}
impl const Borrow<str> for ByteStr {
#[inline]
fn borrow(&self) -> &str { &**self }
}
impl const Borrow<[u8]> for ByteStr {
#[inline]
fn borrow(&self) -> &[u8] { self.as_ref() }
fn borrow(&self) -> &str { self }
}
impl PartialEq<str> for ByteStr {
+12 -12
View File
@@ -3,17 +3,17 @@ use alloc::vec::Vec;
use super::*;
impl serde::Serialize for ByteStr {
impl serde_core::Serialize for ByteStr {
#[inline]
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: serde::Serializer {
serializer.serialize_str(&**self)
where S: serde_core::Serializer {
serializer.serialize_str(self)
}
}
struct ByteStrVisitor;
impl<'de> serde::de::Visitor<'de> for ByteStrVisitor {
impl<'de> serde_core::de::Visitor<'de> for ByteStrVisitor {
type Value = ByteStr;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -22,19 +22,19 @@ impl<'de> serde::de::Visitor<'de> for ByteStrVisitor {
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: serde::de::Error {
where E: serde_core::de::Error {
Ok(ByteStr::from(v))
}
#[inline]
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where E: serde::de::Error {
where E: serde_core::de::Error {
Ok(ByteStr::from(v))
}
#[inline]
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: serde::de::Error {
where E: serde_core::de::Error {
match str::from_utf8(v) {
Ok(s) => Ok(ByteStr::from(s)),
Err(e) => Err(E::custom(format_args!("invalid UTF-8: {e}"))),
@@ -43,7 +43,7 @@ impl<'de> serde::de::Visitor<'de> for ByteStrVisitor {
#[inline]
fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
where E: serde::de::Error {
where E: serde_core::de::Error {
match String::from_utf8(v) {
Ok(s) => Ok(ByteStr::from(s)),
Err(e) => Err(E::custom(format_args!("invalid UTF-8: {}", e.utf8_error()))),
@@ -52,8 +52,8 @@ impl<'de> serde::de::Visitor<'de> for ByteStrVisitor {
#[inline]
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where V: serde::de::SeqAccess<'de> {
use serde::de::Error as _;
where V: serde_core::de::SeqAccess<'de> {
use serde_core::de::Error as _;
let len = core::cmp::min(seq.size_hint().unwrap_or(0), 4096);
let mut bytes: Vec<u8> = Vec::with_capacity(len);
@@ -68,10 +68,10 @@ impl<'de> serde::de::Visitor<'de> for ByteStrVisitor {
}
}
impl<'de> serde::Deserialize<'de> for ByteStr {
impl<'de> serde_core::Deserialize<'de> for ByteStr {
#[inline]
fn deserialize<D>(deserializer: D) -> Result<ByteStr, D::Error>
where D: serde::Deserializer<'de> {
where D: serde_core::Deserializer<'de> {
deserializer.deserialize_string(ByteStrVisitor)
}
}
+2 -2
View File
@@ -36,12 +36,12 @@ ahash = { version = "0.8", default-features = false, features = [
manually_init.workspace = true
serde = { version = "1.0", default-features = false, optional = true }
serde_core = { version = "1", default-features = false, optional = true }
[features]
default = ["serde"]
nightly = []
serde = ["dep:serde"]
serde = ["dep:serde_core"]
#[profile.release]
#opt-level = 3
+64 -53
View File
@@ -58,7 +58,7 @@ use scc::{Equivalent, HashMap};
/// # 设计目标
///
/// - **内存去重**:相同内容的字符串共享同一内存地址
/// - **零拷贝克隆**clone() 只涉及原子递增操作
/// - **零拷贝克隆**`clone()` 只涉及原子递增操作
/// - **线程安全**:支持多线程环境下的安全使用
/// - **高性能查找**:使用预计算哈希值优化池查找
///
@@ -115,8 +115,8 @@ impl ArcStr {
///
/// # 性能特征
///
/// - **池命中**O(1) HashMap 查找 + 原子递增
/// - **池缺失**O(1) 内存分配 + O(1) HashMap 插入
/// - **池命中**O(1) `HashMap` 查找 + 原子递增
/// - **池缺失**O(1) 内存分配 + O(1) `HashMap` 插入
/// - **哈希计算**:使用 ahash 的高性能哈希算法
///
/// # Examples
@@ -150,9 +150,8 @@ impl ArcStr {
// 进入这里说明需要创建新的字符串实例
let pool = ARC_STR_POOL.get();
use scc::hash_map::RawEntry;
match pool.raw_entry().from_key_hashed_nocheck_sync(hash, string) {
RawEntry::Occupied(entry) => {
scc::hash_map::RawEntry::Occupied(entry) => {
// 双重检查:在获取写锁的过程中,其他线程可能已经创建了相同的字符串
let ptr = entry.key().0;
@@ -162,13 +161,13 @@ impl ArcStr {
Self { ptr, _marker: PhantomData }
}
RawEntry::Vacant(entry) => {
scc::hash_map::RawEntry::Vacant(entry) => {
// 确认需要创建新实例:分配内存并初始化
let layout = ArcStrInner::layout_for_string(string.len());
// SAFETY: layout_for_string 确保布局有效且大小合理
let ptr = unsafe {
let alloc = alloc::alloc::alloc(layout) as *mut ArcStrInner;
let alloc: *mut ArcStrInner = alloc::alloc::alloc(layout).cast();
if alloc.is_null() {
hint::cold_path();
@@ -197,7 +196,8 @@ impl ArcStr {
///
/// 这是一个 `const fn`,在编译时就能确定偏移量,
/// 运行时仅需要一次内存解引用。
#[inline(always)]
#[must_use]
#[inline]
pub const fn as_str(&self) -> &str {
// SAFETY: ptr 在 ArcStr 生命周期内始终指向有效的 ArcStrInner
// 且字符串数据保证是有效的 UTF-8
@@ -207,28 +207,32 @@ impl ArcStr {
/// 获取字符串的字节切片
///
/// 提供对底层字节数据的直接访问。
#[inline(always)]
#[must_use]
#[inline]
pub const fn as_bytes(&self) -> &[u8] {
// SAFETY: ptr 始终指向有效的 ArcStrInner
unsafe { self.ptr.as_ref().as_bytes() }
}
/// 获取字符串长度(字节数)
#[inline(always)]
#[must_use]
#[inline]
pub const fn len(&self) -> usize {
// SAFETY: ptr 始终指向有效的 ArcStrInner
unsafe { self.ptr.as_ref().string_len }
}
/// 检查字符串是否为空
#[inline(always)]
#[must_use]
#[inline]
pub const fn is_empty(&self) -> bool { self.len() == 0 }
/// 获取当前引用计数
///
/// 注意:由于并发访问,返回的值可能在返回后立即发生变化。
/// 此方法主要用于调试和测试。
#[inline(always)]
#[must_use]
#[inline]
pub fn ref_count(&self) -> usize {
// SAFETY: ptr 始终指向有效的 ArcStrInner
unsafe { self.ptr.as_ref().strong_count() }
@@ -237,7 +241,8 @@ impl ArcStr {
/// 获取字符串数据的内存地址(用于调试和测试)
///
/// 返回字符串内容的起始地址,可用于验证字符串是否共享内存。
#[inline(always)]
#[must_use]
#[inline]
pub const fn as_ptr(&self) -> *const u8 {
// SAFETY: ptr 始终指向有效的 ArcStrInner
unsafe { self.ptr.as_ref().string_ptr() }
@@ -257,7 +262,8 @@ impl ArcStr {
/// # 返回值
///
/// 如果找到匹配的字符串,返回增加引用计数后的 `ArcStr`;否则返回 `None`。
#[inline(always)]
#[must_use]
#[inline]
fn try_find_existing(pool: &PtrMap, hash: u64, string: &str) -> Option<Self> {
// 使用 hashbrown 的 from_key_hashed_nocheck API
// 这利用了 Equivalent trait 来进行高效比较
@@ -349,10 +355,10 @@ impl Drop for ArcStr {
// 第二层:标准库集成
// ═══════════════════════════════════════════════════════════════════════════
/// # 基础 Trait 实现
///
/// 这些实现确保 `ArcStr` 能够与 Rust 的标准库类型无缝集成,
/// 提供符合直觉的比较、格式化和访问接口。
// # 基础 Trait 实现
//
// 这些实现确保 `ArcStr` 能够与 Rust 的标准库类型无缝集成,
// 提供符合直觉的比较、格式化和访问接口。
impl PartialEq for ArcStr {
/// 基于指针的快速相等比较
@@ -423,10 +429,10 @@ impl const core::ops::Deref for ArcStr {
fn deref(&self) -> &Self::Target { self.as_str() }
}
/// # 与其他字符串类型的互操作性
///
/// 这些实现使得 `ArcStr` 可以与 Rust 生态系统中的各种字符串类型
/// 进行直接比较,提供良好的开发体验。
// # 与其他字符串类型的互操作性
//
// 这些实现使得 `ArcStr` 可以与 Rust 生态系统中的各种字符串类型
// 进行直接比较,提供良好的开发体验。
impl const PartialEq<str> for ArcStr {
#[inline]
@@ -470,10 +476,10 @@ impl PartialOrd<String> for ArcStr {
}
}
/// # 类型转换实现
///
/// 提供从各种字符串类型到 `ArcStr` 的便捷转换,
/// 以及从 `ArcStr` 到其他类型的转换。
// # 类型转换实现
//
// 提供从各种字符串类型到 `ArcStr` 的便捷转换,
// 以及从 `ArcStr` 到其他类型的转换。
impl<'a> From<&'a str> for ArcStr {
#[inline]
@@ -523,8 +529,8 @@ impl str::FromStr for ArcStr {
/// 序列化时输出字符串内容,反序列化时重新建立池化引用。
#[cfg(feature = "serde")]
mod serde_impls {
use super::*;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use super::ArcStr;
use serde_core::{Deserialize, Deserializer, Serialize, Serializer};
impl Serialize for ArcStr {
#[inline]
@@ -547,10 +553,10 @@ mod serde_impls {
// 第三层:核心实现机制
// ═══════════════════════════════════════════════════════════════════════════
/// # 内存布局与数据结构设计
///
/// 这个模块包含了 `ArcStr` 的底层数据结构定义和内存布局管理。
/// 理解这部分有助于深入了解性能优化的原理。
// # 内存布局与数据结构设计
//
// 这个模块包含了 `ArcStr` 的底层数据结构定义和内存布局管理。
// 理解这部分有助于深入了解性能优化的原理。
/// 字符串内容的内部表示(DST 头部)
///
@@ -593,15 +599,15 @@ struct ArcStrInner {
/// 预计算的内容哈希值
///
/// 这个哈希值在多个场景中被复用:
/// - 全局池的HashMap键
/// - Drop时的快速查找
/// - 全局池的 `HashMap`
/// - `Drop` 时的快速查找
/// - 避免重复哈希计算的性能优化
hash: u64,
/// 原子引用计数
///
/// 使用原生原子类型确保最佳性能。
/// 计数范围:[1, isize::MAX],超出时触发abort。
/// 计数范围:[1, `isize::MAX`],超出时触发abort。
count: AtomicUsize,
/// 字符串的字节长度(UTF-8编码)
@@ -627,10 +633,11 @@ impl ArcStrInner {
/// - `self` 必须是指向有效 `ArcStrInner` 的指针
/// - 必须确保字符串数据已经被正确初始化
/// - 调用者负责确保返回的指针在使用期间保持有效
#[inline(always)]
#[must_use]
#[inline]
const unsafe fn string_ptr(&self) -> *const u8 {
// SAFETY: repr(C) 保证字符串数据位于结构体末尾的固定偏移处
(self as *const Self).add(1).cast()
core::ptr::from_ref(self).add(1).cast()
}
/// 获取字符串的字节切片
@@ -641,7 +648,8 @@ impl ArcStrInner {
/// - 字符串数据必须已经被正确初始化
/// - `string_len` 必须准确反映实际字符串长度
/// - 字符串数据必须在返回的切片生命周期内保持有效
#[inline(always)]
#[must_use]
#[inline]
const unsafe fn as_bytes(&self) -> &[u8] {
let ptr = self.string_ptr();
// SAFETY: 调用者保证 ptr 指向有效的 string_len 字节数据
@@ -656,7 +664,8 @@ impl ArcStrInner {
/// - 字符串数据必须是有效的 UTF-8 编码
/// - `string_len` 必须准确反映实际字符串长度
/// - 字符串数据必须在返回的切片生命周期内保持有效
#[inline(always)]
#[must_use]
#[inline]
const unsafe fn as_str(&self) -> &str {
// SAFETY: 调用者保证字符串数据是有效的 UTF-8
core::str::from_utf8_unchecked(self.as_bytes())
@@ -729,7 +738,7 @@ impl ArcStrInner {
// - string_ptr() 计算出的地址位于已分配内存范围内
// - string.len() 与分配时的长度一致
// - string.as_ptr() 指向有效的 UTF-8 数据
let string_ptr = (*inner).string_ptr() as *mut u8;
let string_ptr = (*inner).string_ptr().cast_mut();
core::ptr::copy_nonoverlapping(string.as_ptr(), string_ptr, string.len());
}
@@ -783,14 +792,14 @@ impl ArcStrInner {
fn strong_count(&self) -> usize { self.count.load(Relaxed) }
}
/// # 全局字符串池的设计与实现
///
/// 全局池是整个系统的核心,负责去重和生命周期管理。
// # 全局字符串池的设计与实现
//
// 全局池是整个系统的核心,负责去重和生命周期管理。
/// 线程安全的内部指针包装
///
/// 这个类型解决了在 `HashMap` 中存储 `NonNull<ArcStrInner>` 的问题:
/// - 提供必要的 trait 实现(Hash, PartialEq, Send, Sync
/// - 提供必要的 trait 实现( `Hash`, `PartialEq`, `Send`, `Sync`
/// - 封装指针的线程安全语义
/// - 支持基于内容的查找(通过 Equivalent trait
///
@@ -811,7 +820,7 @@ unsafe impl Sync for ThreadSafePtr {}
impl const core::ops::Deref for ThreadSafePtr {
type Target = NonNull<ArcStrInner>;
#[inline(always)]
#[inline]
fn deref(&self) -> &Self::Target { &self.0 }
}
@@ -826,7 +835,7 @@ impl Hash for ThreadSafePtr {
// SAFETY: ThreadSafePtr 保证指针在池生命周期内始终有效
unsafe {
let inner = self.0.as_ref();
state.write_u64(inner.hash)
state.write_u64(inner.hash);
}
}
}
@@ -871,11 +880,11 @@ impl Equivalent<ThreadSafePtr> for str {
}
}
/// # 哈希算法选择与池类型定义
// # 哈希算法选择与池类型定义
/// 透传哈希器,用于全局池内部
///
/// 由于我们在 `ArcStrInner` 中预存了哈希值,池内部的 HashMap
/// 由于我们在 `ArcStrInner` 中预存了哈希值,池内部的 `HashMap`
/// 不需要重新计算哈希。`IdentityHasher` 直接透传 u64 值。
///
/// # 工作原理
@@ -883,7 +892,7 @@ impl Equivalent<ThreadSafePtr> for str {
/// 1. `ThreadSafePtr::hash()` 调用 `hasher.write_u64(stored_hash)`
/// 2. `IdentityHasher::write_u64()` 直接存储这个值
/// 3. `IdentityHasher::finish()` 返回存储的值
/// 4. HashMap 使用这个哈希值进行桶分配和查找
/// 4. `HashMap` 使用这个哈希值进行桶分配和查找
///
/// 这避免了重复的哈希计算,将池操作的哈希开销降到最低。
#[derive(Default, Clone, Copy)]
@@ -894,10 +903,10 @@ impl Hasher for IdentityHasher {
unreachable!("IdentityHasher usage error");
}
#[inline(always)]
fn write_u64(&mut self, id: u64) { self.0 = id; }
#[inline]
fn write_u64(&mut self, id: u64) { self.0 = id }
#[inline(always)]
#[inline]
fn finish(&self) -> u64 { self.0 }
}
@@ -912,7 +921,7 @@ type PtrMap = HashMap<ThreadSafePtr, (), PoolHasher>;
///
/// # 为什么使用 ahash
///
/// - 高性能:比标准库的 DefaultHasher 更快
/// - 高性能:比标准库的 `DefaultHasher` 更快
/// - 安全性:抗哈希洪水攻击
/// - 质量:分布均匀,减少哈希冲突
static CONTENT_HASHER: ManuallyInit<ahash::RandomState> = ManuallyInit::new();
@@ -949,6 +958,8 @@ static ARC_STR_POOL: ManuallyInit<PtrMap> = ManuallyInit::new();
/// 虽然这个函数本身不是线程安全的,但它应该在单线程环境下
/// (如 main 函数开始或静态初始化时)被调用一次。
#[inline(always)]
// 只调用一次
#[allow(clippy::inline_always)]
pub(crate) fn __init() {
CONTENT_HASHER.init(ahash::RandomState::new());
ARC_STR_POOL.init(PtrMap::with_capacity_and_hasher(128, PoolHasher::default()));
+23 -12
View File
@@ -29,8 +29,8 @@
//! | 创建 | 0 ns | ~100 ns (首次) / ~20 ns (池命中) |
//! | Clone | ~1 ns | ~5 ns (atomic inc) |
//! | Drop | 0 ns | ~5 ns (atomic dec) + 可能的清理 |
//! | as_str() | 0 ns | 0 ns (直接访问) |
//! | len() | 0 ns | 0 ns (直接读字段) |
//! | `as_str()` | 0 ns | 0 ns (直接访问) |
//! | `len()` | 0 ns | 0 ns (直接读字段) |
//!
//! # 使用场景
//!
@@ -220,6 +220,7 @@ impl Str {
/// assert_eq!(s.as_static(), Some("constant"));
/// assert_eq!(s.ref_count(), None);
/// ```
#[must_use]
#[inline]
pub const fn from_static(s: &'static str) -> Self { Self::Static(s) }
@@ -229,8 +230,8 @@ impl Str {
///
/// # Performance
///
/// - **首次创建**:堆分配 + HashMap 插入 ≈ 100-200ns
/// - **池命中**HashMap 查找 + 引用计数递增 ≈ 10-20ns
/// - **首次创建**:堆分配 + `HashMap` 插入 ≈ 100-200ns
/// - **池命中**`HashMap` 查找 + 引用计数递增 ≈ 10-20ns
///
/// # Thread Safety
///
@@ -304,6 +305,7 @@ impl Str {
/// }
/// # fn register_constant(_: &'static str) {}
/// ```
#[must_use]
#[inline]
pub const fn is_static(&self) -> bool { matches!(self, Self::Static(_)) }
@@ -330,6 +332,7 @@ impl Str {
/// assert_eq!(s2.ref_count(), Some(2));
/// assert_eq!(s3.ref_count(), Some(2));
/// ```
#[must_use]
#[inline]
pub fn ref_count(&self) -> Option<usize> {
match self {
@@ -374,6 +377,7 @@ impl Str {
/// eprintln!("warning: not a static string");
/// }
/// ```
#[must_use]
#[inline]
pub const fn as_static(&self) -> Option<&'static str> {
match self {
@@ -397,6 +401,7 @@ impl Str {
/// assert!(s1.as_arc_str().is_none());
/// assert!(s2.as_arc_str().is_some());
/// ```
#[must_use]
#[inline]
pub const fn as_arc_str(&self) -> Option<&ArcStr> {
match self {
@@ -421,6 +426,7 @@ impl Str {
/// assert!(s1.into_arc_str().is_some());
/// assert!(s2.into_arc_str().is_none());
/// ```
#[must_use]
#[inline]
pub fn into_arc_str(self) -> Option<ArcStr> {
match self {
@@ -454,7 +460,8 @@ impl Str {
/// let s = Str::new("hello");
/// assert_eq!(s.as_str(), "hello");
/// ```
#[inline(always)]
#[must_use]
#[inline]
pub const fn as_str(&self) -> &str {
match self {
Self::Static(s) => s,
@@ -474,7 +481,8 @@ impl Str {
/// let s = Str::new("hello");
/// assert_eq!(s.as_bytes(), b"hello");
/// ```
#[inline(always)]
#[must_use]
#[inline]
pub const fn as_bytes(&self) -> &[u8] {
match self {
Self::Static(s) => s.as_bytes(),
@@ -499,7 +507,8 @@ impl Str {
/// let s = Str::new("hello");
/// assert_eq!(s.len(), 5);
/// ```
#[inline(always)]
#[must_use]
#[inline]
pub const fn len(&self) -> usize {
match self {
Self::Static(s) => s.len(),
@@ -522,7 +531,8 @@ impl Str {
/// assert!(s1.is_empty());
/// assert!(!s2.is_empty());
/// ```
#[inline(always)]
#[must_use]
#[inline]
pub const fn is_empty(&self) -> bool {
match self {
Self::Static(s) => s.is_empty(),
@@ -541,7 +551,8 @@ impl Str {
/// let ptr = s.as_ptr();
/// assert!(!ptr.is_null());
/// ```
#[inline(always)]
#[must_use]
#[inline]
pub const fn as_ptr(&self) -> *const u8 {
match self {
Self::Static(s) => s.as_ptr(),
@@ -686,7 +697,7 @@ impl From<Str> for alloc::boxed::Box<str> {
fn from(s: Str) -> Self { s.as_str().into() }
}
impl<'a> From<Str> for Cow<'a, str> {
impl From<Str> for Cow<'_, str> {
/// 转换为 `Cow`
///
/// - **Static 变体**:转换为 `Cow::Borrowed`(零成本)
@@ -1057,8 +1068,8 @@ impl const Default for Str {
#[cfg(feature = "serde")]
mod serde_impls {
use super::*;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use super::Str;
use serde_core::{Deserialize, Deserializer, Serialize, Serializer};
impl Serialize for Str {
/// 序列化为普通字符串,丢失变体信息
+2 -2
View File
@@ -18,12 +18,12 @@ byte_str = { version = "0.1.0", path = "../byte_str", default-features = false,
bytes = { version = "1.11.0", default-features = false, optional = true }
indexmap = { version = "2.12.1", default-features = false, optional = true }
itoa = { version = "1.0.17", optional = true }
serde = { version = "1.0.228", default-features = false, optional = true }
serde_core = { version = "1.0", default-features = false, optional = true }
[features]
default = ["bytes", "byte_str"]
alloc = []
serde = ["dep:serde", "dep:base64-simd", "dep:itoa"]
serde = ["dep:serde_core", "dep:base64-simd", "dep:itoa"]
std = ["alloc"]
indexmap = ["dep:indexmap"]
bytes = ["dep:bytes"]
+2 -2
View File
@@ -129,8 +129,8 @@ mod serde_impls {
use core::fmt;
use base64_simd::{forgiving_decode_to_vec, STANDARD};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde::de::{self, Unexpected, Visitor};
use serde_core::{Deserialize, Deserializer, Serialize, Serializer};
use serde_core::de::{self, Unexpected, Visitor};
use super::Bytes;
+1 -1
View File
@@ -41,7 +41,7 @@ impl<T: [const] Into<i32>> const From<T> for Enum<T> {
mod serde_impls {
use super::Enum;
use core::{fmt, marker::PhantomData};
use serde::{
use serde_core::{
Deserialize, Deserializer, Serialize, Serializer,
de::{self, Unexpected, Visitor, value::StrDeserializer},
};
+1 -1
View File
@@ -11,7 +11,7 @@
extern crate alloc;
#[cfg(feature = "serde")]
extern crate serde;
extern crate serde_core;
mod bytes_value;
mod enum_value;
+1 -1
View File
@@ -6,7 +6,7 @@
//! 特别适用于与 JavaScript 交互时避免精度损失的场景。
use core::{fmt, marker::PhantomData};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use serde_core::{Deserialize, Deserializer, Serialize, Serializer, de};
/// 密封特征,限制可以被字符串化的类型
mod private {
@@ -1,7 +0,0 @@
--out src/offset/local/win_bindings.rs
--flat --sys --no-comment
--filter
GetTimeZoneInformationForYear
SystemTimeToFileTime
SystemTimeToTzSpecificLocalTime
TzSpecificLocalTimeToSystemTime
@@ -3,8 +3,8 @@ cff-version: 1.2.0
message: Please cite this crate using these information.
# Version information.
date-released: 2025-02-26
version: 0.4.41
date-released: 2026-01-09
version: 0.4.43
# Project information.
abstract: Date and time library for Rust
@@ -1,6 +1,6 @@
[package]
name = "chrono"
version = "0.4.42"
version = "0.4.43"
description = "Date and time library for Rust"
homepage = "https://github.com/chronotope/chrono"
documentation = "https://docs.rs/chrono/"
@@ -20,6 +20,7 @@ name = "chrono"
# Don't forget to adjust `ALL_NON_EXCLUSIVE_FEATURES` in CI scripts when adding a feature or an optional dependency.
default = ["clock", "std", "oldtime", "wasmbind"]
alloc = []
defmt = ["dep:defmt", "pure-rust-locales?/defmt"]
libc = []
winapi = ["windows-link"]
std = ["alloc"]
@@ -41,9 +42,10 @@ __internal_bench = []
[dependencies]
num-traits = { version = "0.2", default-features = false }
serde = { version = "1.0.99", default-features = false, optional = true }
pure-rust-locales = { version = "0.8", optional = true }
rkyv = { version = "0.8", optional = true, default-features = false }
pure-rust-locales = { version = "0.8.2", optional = true }
rkyv = { version = "0.8.14", optional = true, default-features = false }
arbitrary = { version = "1.0.0", features = ["derive"], optional = true }
defmt = { version = "1.0.1", optional = true }
[target.'cfg(all(target_arch = "wasm32", not(any(target_os = "emscripten", target_os = "wasi"))))'.dependencies]
wasm-bindgen = { version = "0.2", optional = true }
@@ -52,9 +54,6 @@ js-sys = { version = "0.3", optional = true } # contains FFI bindings for
[target.'cfg(windows)'.dependencies]
windows-link = { version = "0.2", optional = true }
[target.'cfg(windows)'.dev-dependencies]
windows-bindgen = { version = "0.63" } # MSRV is 1.74
[target.'cfg(unix)'.dependencies]
iana-time-zone = { version = "0.1.45", optional = true, features = ["fallback"] }
@@ -63,6 +62,7 @@ serde_json = { version = "1" }
serde_derive = { version = "1", default-features = false }
similar-asserts = { version = "1.6.1" }
bincode = { version = "1.3.0" }
windows-bindgen = { version = "0.66" } # MSRV is 1.74
[target.'cfg(all(target_arch = "wasm32", not(any(target_os = "emscripten", target_os = "wasi"))))'.dev-dependencies]
wasm-bindgen-test = "0.3"
@@ -1,5 +1,5 @@
Rust-chrono is dual-licensed under The MIT License [1] and
Apache 2.0 License [2]. Copyright (c) 2014--2025, Kang Seonghoon and
Apache 2.0 License [2]. Copyright (c) 2014--2026, Kang Seonghoon and
contributors.
Nota Bene: This is same as the Rust Project's own license.
@@ -551,6 +551,16 @@ where
}
}
#[cfg(feature = "defmt")]
impl<Tz: TimeZone> defmt::Format for Date<Tz>
where
Tz::Offset: defmt::Format,
{
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "{}{}", self.naive_local(), self.offset);
}
}
// Note that implementation of Arbitrary cannot be automatically derived for Date<Tz>, due to
// the nontrivial bound <Tz as TimeZone>::Offset: Arbitrary.
#[cfg(all(feature = "arbitrary", feature = "std"))]
@@ -21,8 +21,8 @@ use crate::format::Locale;
#[cfg(feature = "alloc")]
use crate::format::{DelayedFormat, SecondsFormat, write_rfc2822, write_rfc3339};
use crate::format::{
Fixed, Item, ParseError, ParseResult, Parsed, StrftimeItems, TOO_LONG, parse,
parse_and_remainder, parse_rfc3339,
Fixed, Item, ParseError, ParseResult, Parsed, StrftimeItems, parse, parse_and_remainder,
parse_rfc3339,
};
use crate::naive::{Days, IsoWeek, NaiveDate, NaiveDateTime, NaiveTime};
#[cfg(feature = "clock")]
@@ -414,7 +414,7 @@ impl<Tz: TimeZone> DateTime<Tz> {
}
/// Fix the offset from UTC to its current value, dropping the associated timezone information.
/// This it useful for converting a generic `DateTime<Tz: Timezone>` to `DateTime<FixedOffset>`.
/// This is useful for converting a generic `DateTime<Tz: Timezone>` to `DateTime<FixedOffset>`.
#[inline]
#[must_use]
pub fn fixed_offset(&self) -> DateTime<FixedOffset> {
@@ -1068,12 +1068,7 @@ impl DateTime<FixedOffset> {
/// also simultaneously valid RFC 3339 values, but not all RFC 3339 values are valid ISO 8601
/// values (or the other way around).
pub fn parse_from_rfc3339(s: &str) -> ParseResult<DateTime<FixedOffset>> {
let mut parsed = Parsed::new();
let (s, _) = parse_rfc3339(&mut parsed, s)?;
if !s.is_empty() {
return Err(TOO_LONG);
}
parsed.to_datetime()
parse_rfc3339(s)
}
/// Parses a string from a user-specified format into a `DateTime<FixedOffset>` value.
@@ -1523,7 +1518,7 @@ impl<Tz: TimeZone> hash::Hash for DateTime<Tz> {
/// Add `TimeDelta` to `DateTime`.
///
/// As a part of Chrono's [leap second handling], the addition assumes that **there is no leap
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// the assumption becomes that **there is exactly a single leap second ever**.
///
/// # Panics
@@ -1542,7 +1537,7 @@ impl<Tz: TimeZone> Add<TimeDelta> for DateTime<Tz> {
/// Add `std::time::Duration` to `DateTime`.
///
/// As a part of Chrono's [leap second handling], the addition assumes that **there is no leap
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// the assumption becomes that **there is exactly a single leap second ever**.
///
/// # Panics
@@ -1563,7 +1558,7 @@ impl<Tz: TimeZone> Add<Duration> for DateTime<Tz> {
/// Add-assign `chrono::Duration` to `DateTime`.
///
/// As a part of Chrono's [leap second handling], the addition assumes that **there is no leap
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// the assumption becomes that **there is exactly a single leap second ever**.
///
/// # Panics
@@ -1583,7 +1578,7 @@ impl<Tz: TimeZone> AddAssign<TimeDelta> for DateTime<Tz> {
/// Add-assign `std::time::Duration` to `DateTime`.
///
/// As a part of Chrono's [leap second handling], the addition assumes that **there is no leap
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// second ever**, except when the `NaiveDateTime` itself represents a leap second in which case
/// the assumption becomes that **there is exactly a single leap second ever**.
///
/// # Panics
@@ -1683,7 +1678,7 @@ impl<Tz: TimeZone> Sub<Duration> for DateTime<Tz> {
/// This is the same as the addition with a negated `TimeDelta`.
///
/// As a part of Chrono's [leap second handling], the addition assumes that **there is no leap
/// second ever**, except when the `DateTime` itself represents a leap second in which case
/// second ever**, except when the `DateTime` itself represents a leap second in which case
/// the assumption becomes that **there is exactly a single leap second ever**.
///
/// # Panics
@@ -1703,7 +1698,7 @@ impl<Tz: TimeZone> SubAssign<TimeDelta> for DateTime<Tz> {
/// Subtract-assign `std::time::Duration` from `DateTime`.
///
/// As a part of Chrono's [leap second handling], the addition assumes that **there is no leap
/// second ever**, except when the `DateTime` itself represents a leap second in which case
/// second ever**, except when the `DateTime` itself represents a leap second in which case
/// the assumption becomes that **there is exactly a single leap second ever**.
///
/// # Panics
@@ -1817,6 +1812,16 @@ impl<Tz: TimeZone> fmt::Debug for DateTime<Tz> {
}
}
#[cfg(feature = "defmt")]
impl<Tz: TimeZone> defmt::Format for DateTime<Tz>
where
Tz::Offset: defmt::Format,
{
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "{}{}", self.overflowing_naive_local(), self.offset);
}
}
// `fmt::Debug` is hand implemented for the `rkyv::Archive` variant of `DateTime` because
// deriving a trait recursively does not propagate trait defined associated types with their own
// constraints:
@@ -50,9 +50,7 @@ impl<Tz: TimeZone> ser::Serialize for DateTime<Tz> {
}
}
#[allow(missing_docs)]
#[allow(missing_debug_implementations)]
pub struct DateTimeVisitor;
struct DateTimeVisitor;
impl de::Visitor<'_> for DateTimeVisitor {
type Value = DateTime<FixedOffset>;
@@ -76,6 +74,7 @@ impl de::Visitor<'_> for DateTimeVisitor {
///
/// See [the `serde` module](crate::serde) for alternate deserialization formats.
impl<'de> de::Deserialize<'de> for DateTime<FixedOffset> {
#[inline]
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
@@ -474,6 +474,7 @@ impl OffsetFormat {
///
/// See the `TimeZone::to_rfc3339_opts` function for usage.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[allow(clippy::manual_non_exhaustive)]
pub enum SecondsFormat {
/// Format whole seconds only, with no decimal point nor subseconds.
@@ -78,6 +78,7 @@ enum Void {}
/// Padding characters for numeric items.
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Pad {
/// No padding.
None,
@@ -102,6 +103,7 @@ pub enum Pad {
/// parsed with the same formatting items.
#[non_exhaustive]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Numeric {
/// Full Gregorian year (FW=4, PW=∞).
/// May accept years before 1 BCE or after 9999 CE, given an initial sign (+/-).
@@ -170,12 +172,20 @@ impl fmt::Debug for InternalNumeric {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for InternalNumeric {
fn format(&self, f: defmt::Formatter) {
defmt::write!(f, "<InternalNumeric>")
}
}
/// Fixed-format item types.
///
/// They have their own rules of formatting and parsing.
/// Otherwise noted, they print in the specified cases but parse case-insensitively.
#[non_exhaustive]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Fixed {
/// Abbreviated month names.
///
@@ -260,11 +270,13 @@ pub enum Fixed {
/// An opaque type representing fixed-format item types for internal uses only.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct InternalFixed {
val: InternalInternal,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
enum InternalInternal {
/// Same as [`TimezoneOffsetColonZ`](#variant.TimezoneOffsetColonZ), but
/// allows missing minutes (per [ISO 8601][iso8601]).
@@ -285,6 +297,7 @@ enum InternalInternal {
/// Type for specifying the format of UTC offsets.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct OffsetFormat {
/// See `OffsetPrecision`.
pub precision: OffsetPrecision,
@@ -298,6 +311,7 @@ pub struct OffsetFormat {
/// The precision of an offset from UTC formatting item.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum OffsetPrecision {
/// Format offset from UTC as only hours. Not recommended, it is not uncommon for timezones to
/// have an offset of 30 minutes, 15 minutes, etc.
@@ -319,6 +333,7 @@ pub enum OffsetPrecision {
/// The separator between hours and minutes in an offset.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Colons {
/// No separator
None,
@@ -350,6 +365,23 @@ pub enum Item<'a> {
Error,
}
#[cfg(feature = "defmt")]
impl<'a> defmt::Format for Item<'a> {
fn format(&self, f: defmt::Formatter) {
match self {
Item::Literal(v) => defmt::write!(f, "Literal {{ {} }}", v),
#[cfg(feature = "alloc")]
Item::OwnedLiteral(_) => {}
Item::Space(v) => defmt::write!(f, "Space {{ {} }}", v),
#[cfg(feature = "alloc")]
Item::OwnedSpace(_) => {}
Item::Numeric(u, v) => defmt::write!(f, "Numeric {{ {}, {} }}", u, v),
Item::Fixed(v) => defmt::write!(f, "Fixed {{ {} }}", v),
Item::Error => defmt::write!(f, "Error"),
}
}
}
const fn num(numeric: Numeric) -> Item<'static> {
Item::Numeric(numeric, Pad::None)
}
@@ -388,6 +420,7 @@ impl Item<'_> {
/// An error from the `parse` function.
#[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct ParseError(ParseErrorKind);
impl ParseError {
@@ -400,6 +433,7 @@ impl ParseError {
/// The category of parse error
#[allow(clippy::manual_non_exhaustive)]
#[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum ParseErrorKind {
/// Given field is out of permitted range.
OutOfRange,
@@ -11,7 +11,7 @@ use super::scan;
use super::{BAD_FORMAT, INVALID, OUT_OF_RANGE, TOO_LONG, TOO_SHORT};
use super::{Fixed, InternalFixed, InternalInternal, Item, Numeric, Pad, Parsed};
use super::{ParseError, ParseResult};
use crate::{DateTime, FixedOffset, Weekday};
use crate::{DateTime, FixedOffset, MappedLocalTime, NaiveDate, NaiveTime, Weekday};
fn set_weekday_with_num_days_from_sunday(p: &mut Parsed, v: i64) -> ParseResult<()> {
p.set_weekday(match v {
@@ -151,7 +151,7 @@ fn parse_rfc2822<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
Ok((s, ()))
}
pub(crate) fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a str, ())> {
pub(crate) fn parse_rfc3339(mut s: &str) -> ParseResult<DateTime<FixedOffset>> {
macro_rules! try_consume {
($e:expr) => {{
let (s_, v) = $e?;
@@ -189,40 +189,81 @@ pub(crate) fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseRes
//
// - For readability a full-date and a full-time may be separated by a space character.
parsed.set_year(try_consume!(scan::number(s, 4, 4)))?;
s = scan::char(s, b'-')?;
parsed.set_month(try_consume!(scan::number(s, 2, 2)))?;
s = scan::char(s, b'-')?;
parsed.set_day(try_consume!(scan::number(s, 2, 2)))?;
let bytes = s.as_bytes();
if bytes.len() < 19 {
return Err(TOO_SHORT);
}
s = match s.as_bytes().first() {
Some(&b't' | &b'T' | &b' ') => &s[1..],
Some(_) => return Err(INVALID),
None => return Err(TOO_SHORT),
let fixed = <&[u8; 19]>::try_from(&bytes[..19]).unwrap(); // we just checked the length
let year = digit(fixed, 0)? as u16 * 1000
+ digit(fixed, 1)? as u16 * 100
+ digit(fixed, 2)? as u16 * 10
+ digit(fixed, 3)? as u16;
if bytes.get(4) != Some(&b'-') {
return Err(INVALID);
}
let month = digit(fixed, 5)? * 10 + digit(fixed, 6)?;
if bytes.get(7) != Some(&b'-') {
return Err(INVALID);
}
let day = digit(fixed, 8)? * 10 + digit(fixed, 9)?;
let date =
NaiveDate::from_ymd_opt(year as i32, month as u32, day as u32).ok_or(OUT_OF_RANGE)?;
if !matches!(bytes.get(10), Some(&b't' | &b'T' | &b' ')) {
return Err(INVALID);
}
let hour = digit(fixed, 11)? * 10 + digit(fixed, 12)?;
if bytes.get(13) != Some(&b':') {
return Err(INVALID);
}
let min = digit(fixed, 14)? * 10 + digit(fixed, 15)?;
if bytes.get(16) != Some(&b':') {
return Err(INVALID);
}
let sec = digit(fixed, 17)? * 10 + digit(fixed, 18)?;
let (sec, extra_nanos) = match sec {
60 => (59, 1_000_000_000), // rfc3339 allows leap seconds
_ => (sec, 0),
};
parsed.set_hour(try_consume!(scan::number(s, 2, 2)))?;
s = scan::char(s, b':')?;
parsed.set_minute(try_consume!(scan::number(s, 2, 2)))?;
s = scan::char(s, b':')?;
parsed.set_second(try_consume!(scan::number(s, 2, 2)))?;
if s.starts_with('.') {
let nanosecond = try_consume!(scan::nanosecond(&s[1..]));
parsed.set_nanosecond(nanosecond)?;
}
let nano = if bytes.get(19) == Some(&b'.') {
let nanosecond = try_consume!(scan::nanosecond(&s[20..]));
extra_nanos + nanosecond
} else {
s = &s[19..];
extra_nanos
};
let time = NaiveTime::from_hms_nano_opt(hour as u32, min as u32, sec as u32, nano)
.ok_or(OUT_OF_RANGE)?;
let offset = try_consume!(scan::timezone_offset(s, |s| scan::char(s, b':'), true, false, true));
// This range check is similar to the one in `FixedOffset::east_opt`, so it would be redundant.
// But it is possible to read the offset directly from `Parsed`. We want to only successfully
// populate `Parsed` if the input is fully valid RFC 3339.
// Max for the hours field is `23`, and for the minutes field `59`.
const MAX_RFC3339_OFFSET: i32 = (23 * 60 + 59) * 60;
if !(-MAX_RFC3339_OFFSET..=MAX_RFC3339_OFFSET).contains(&offset) {
return Err(OUT_OF_RANGE);
let offset = try_consume!(scan::timezone_offset(s, |s| scan::char(s, b':'), true, false, true));
if !s.is_empty() {
return Err(TOO_LONG);
}
parsed.set_offset(i64::from(offset))?;
Ok((s, ()))
let tz = FixedOffset::east_opt(offset).ok_or(OUT_OF_RANGE)?;
Ok(match date.and_time(time).and_local_timezone(tz) {
MappedLocalTime::Single(dt) => dt,
// `FixedOffset::with_ymd_and_hms` doesn't return `MappedLocalTime::Ambiguous`
// and returns `MappedLocalTime::None` on invalid data
MappedLocalTime::Ambiguous(_, _) | MappedLocalTime::None => unreachable!(),
})
}
#[inline]
fn digit(bytes: &[u8; 19], index: usize) -> ParseResult<u8> {
match bytes[index].is_ascii_digit() {
true => Ok(bytes[index] - b'0'),
false => Err(INVALID),
}
}
/// Tries to parse given string into `parsed` with given formatting items.
@@ -420,7 +461,7 @@ where
&Nanosecond => {
if s.starts_with('.') {
let nano = try_consume!(scan::nanosecond(&s[1..]));
parsed.set_nanosecond(nano)?;
parsed.set_nanosecond(nano as i64)?;
}
}
@@ -1830,29 +1871,34 @@ mod tests {
"2015-01-20T17:35:20.00000000045208:00",
Ok(ymd_hmsn(2015, 1, 20, 17, 35, 20, 0, -8)),
), // too small with MINUS SIGN (U+2212)
("2023-11-05T01:30:00-04:00", Ok(ymd_hmsn(2023, 11, 5, 1, 30, 0, 0, -4))), // ambiguous timestamp
("2015-01-20 17:35:20-08:00", Ok(ymd_hmsn(2015, 1, 20, 17, 35, 20, 0, -8))), // without 'T'
("2015/01/20T17:35:20.001-08:00", Err(INVALID)), // wrong separator char YMD
("2015-01-20T17-35-20.001-08:00", Err(INVALID)), // wrong separator char HMS
("-01-20T17:35:20-08:00", Err(INVALID)), // missing year
("99-01-20T17:35:20-08:00", Err(INVALID)), // bad year format
("99999-01-20T17:35:20-08:00", Err(INVALID)), // bad year value
("-2000-01-20T17:35:20-08:00", Err(INVALID)), // bad year value
("2015-01-20_17:35:20-08:00", Err(INVALID)), // wrong date time separator
("2015/01/20T17:35:20.001-08:00", Err(INVALID)), // wrong separator char YM
("2015-01/20T17:35:20.001-08:00", Err(INVALID)), // wrong separator char MD
("2015-01-20T17-35-20.001-08:00", Err(INVALID)), // wrong separator char HM
("2015-01-20T17-35:20.001-08:00", Err(INVALID)), // wrong separator char MS
("-01-20T17:35:20-08:00", Err(INVALID)), // missing year
("99-01-20T17:35:20-08:00", Err(INVALID)), // bad year format
("99999-01-20T17:35:20-08:00", Err(INVALID)), // bad year value
("-2000-01-20T17:35:20-08:00", Err(INVALID)), // bad year value
("2015-00-30T17:35:20-08:00", Err(OUT_OF_RANGE)), // bad month value
("2015-02-30T17:35:20-08:00", Err(OUT_OF_RANGE)), // bad day of month value
("2015-01-20T25:35:20-08:00", Err(OUT_OF_RANGE)), // bad hour value
("2015-01-20T17:65:20-08:00", Err(OUT_OF_RANGE)), // bad minute value
("2015-01-20T17:35:90-08:00", Err(OUT_OF_RANGE)), // bad second value
("2015-01-20T17:35:20-24:00", Err(OUT_OF_RANGE)), // bad offset value
("15-01-20T17:35:20-08:00", Err(INVALID)), // bad year format
("15-01-20T17:35:20-08:00:00", Err(INVALID)), // bad year format, bad offset format
("2015-01-20T17:35:2008:00", Err(INVALID)), // missing offset sign
("2015-01-20T17:35:20 08:00", Err(INVALID)), // missing offset sign
("2015-01-20T17:35:20Zulu", Err(TOO_LONG)), // bad offset format
("2015-01-20T17:35:20 Zulu", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20GMT", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20 GMT", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20+GMT", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20++08:00", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20--08:00", Err(INVALID)), // bad offset format
("15-01-20T17:35:20-08:00", Err(INVALID)), // bad year format
("15-01-20T17:35:20-08:00:00", Err(INVALID)), // bad year format, bad offset format
("2015-01-20T17:35:2008:00", Err(INVALID)), // missing offset sign
("2015-01-20T17:35:20 08:00", Err(INVALID)), // missing offset sign
("2015-01-20T17:35:20Zulu", Err(TOO_LONG)), // bad offset format
("2015-01-20T17:35:20 Zulu", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20GMT", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20 GMT", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20+GMT", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20++08:00", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20--08:00", Err(INVALID)), // bad offset format
("2015-01-20T17:35:20−−08:00", Err(INVALID)), // bad offset format with MINUS SIGN (U+2212)
("2015-01-20T17:35:20±08:00", Err(INVALID)), // bad offset sign
("2015-01-20T17:35:20-08-00", Err(INVALID)), // bad offset separator
@@ -126,6 +126,7 @@ use crate::{DateTime, Datelike, TimeDelta, Timelike, Weekday};
/// ```
#[allow(clippy::manual_non_exhaustive)]
#[derive(Clone, PartialEq, Eq, Debug, Default, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct Parsed {
#[doc(hidden)]
pub year: Option<i32>,
@@ -47,14 +47,15 @@ pub(super) fn number(s: &str, min: usize, max: usize) -> ParseResult<(&str, i64)
/// Tries to consume at least one digits as a fractional second.
/// Returns the number of whole nanoseconds (0--999,999,999).
pub(super) fn nanosecond(s: &str) -> ParseResult<(&str, i64)> {
pub(super) fn nanosecond(s: &str) -> ParseResult<(&str, u32)> {
// record the number of digits consumed for later scaling.
let origlen = s.len();
let (s, v) = number(s, 1, 9)?;
let v = u32::try_from(v).expect("999,999,999 should fit u32");
let consumed = origlen - s.len();
// scale the number accordingly.
static SCALE: [i64; 10] =
const SCALE: [u32; 10] =
[0, 100_000_000, 10_000_000, 1_000_000, 100_000, 10_000, 1_000, 100, 10, 1];
let v = v.checked_mul(SCALE[consumed]).ok_or(OUT_OF_RANGE)?;
@@ -189,6 +189,7 @@ use alloc::vec::Vec;
/// [`DateTime`]: crate::DateTime
/// [`format::parse()`]: crate::format::parse()
#[derive(Clone, Debug)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct StrftimeItems<'a> {
/// Remaining portion of the string.
remainder: &'a str,
@@ -501,12 +501,10 @@
//! [chrono#1095]: https://github.com/chronotope/chrono/pull/1095
#![doc(html_root_url = "https://docs.rs/chrono/latest/", test(attr(deny(warnings))))]
#![deny(missing_docs)]
#![deny(missing_debug_implementations)]
#![warn(unreachable_pub)]
#![deny(clippy::tests_outside_test_module)]
#![warn(clippy::tests_outside_test_module)]
#![cfg_attr(not(any(feature = "std", test)), no_std)]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[cfg(feature = "alloc")]
extern crate alloc;
@@ -687,6 +685,13 @@ impl fmt::Debug for OutOfRange {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for OutOfRange {
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "out of range");
}
}
#[cfg(feature = "std")]
impl std::error::Error for OutOfRange {}
@@ -34,10 +34,11 @@ use crate::naive::NaiveDate;
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Month {
/// January
January = 0,
@@ -248,6 +249,7 @@ impl num_traits::FromPrimitive for Month {
/// A duration in calendar months
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct Months(pub(crate) u32);
impl Months {
@@ -287,6 +289,13 @@ impl fmt::Debug for ParseMonthError {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for ParseMonthError {
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "ParseMonthError {{ .. }}")
}
}
#[cfg(feature = "serde")]
mod month_serde {
use super::Month;
@@ -96,7 +96,7 @@ mod tests;
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
pub struct NaiveDate {
@@ -1148,7 +1148,7 @@ impl NaiveDate {
/// );
/// ```
#[must_use]
pub const fn signed_duration_since(self, rhs: NaiveDate) -> TimeDelta {
pub const fn signed_duration_since(self, rhs: Self) -> TimeDelta {
let year1 = self.year();
let year2 = rhs.year();
let (year1_div_400, year1_mod_400) = div_mod_floor(year1, 400);
@@ -1161,11 +1161,40 @@ impl NaiveDate {
expect(TimeDelta::try_days(days), "always in range")
}
/// Returns the absolute difference between two `NaiveDate`s measured as the number of days.
///
/// This is always an integer, non-negative number, similar to `abs_diff` in `std`.
///
/// # Example
///
/// ```
/// # use chrono::{Days, NaiveDate};
/// #
/// let date1: NaiveDate = "2020-01-01".parse().unwrap();
/// let date2: NaiveDate = "2020-01-31".parse().unwrap();
/// assert_eq!(date2.abs_diff(date1), Days::new(30));
/// assert_eq!(date1.abs_diff(date2), Days::new(30));
/// ```
pub const fn abs_diff(self, rhs: Self) -> Days {
Days::new(i32::abs_diff(self.num_days_from_ce(), rhs.num_days_from_ce()) as u64)
}
/// Returns the number of whole years from the given `base` until `self`.
///
/// # Errors
///
/// Returns `None` if `base > self`.
///
/// # Example
///
/// ```
/// # use chrono::{NaiveDate};
/// #
/// let base: NaiveDate = "2025-01-01".parse().unwrap();
/// let date: NaiveDate = "2030-01-01".parse().unwrap();
///
/// assert_eq!(date.years_since(base), Some(5))
/// ```
#[must_use]
pub const fn years_since(&self, base: Self) -> Option<u32> {
let mut years = self.year() - base.year();
@@ -2284,6 +2313,23 @@ impl fmt::Debug for NaiveDate {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for NaiveDate {
fn format(&self, fmt: defmt::Formatter) {
let year = self.year();
let mdf = self.mdf();
if (0..=9999).contains(&year) {
defmt::write!(fmt, "{:02}{:02}", year / 100, year % 100);
} else {
// ISO 8601 requires the explicit sign for out-of-range years
let sign = ['+', '-'][(year < 0) as usize];
defmt::write!(fmt, "{}{:05}", sign, year.abs());
}
defmt::write!(fmt, "-{:02}-{:02}", mdf.month(), mdf.day());
}
}
/// The `Display` output of the naive date `d` is the same as
/// [`d.format("%Y-%m-%d")`](crate::format::strftime).
///
@@ -69,7 +69,7 @@ pub const MAX_DATETIME: NaiveDateTime = NaiveDateTime::MAX;
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
@@ -2057,6 +2057,13 @@ impl fmt::Debug for NaiveDateTime {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for NaiveDateTime {
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "{}T{}", self.date, self.time);
}
}
/// The `Display` output of the naive date and time `dt` is the same as
/// [`dt.format("%Y-%m-%d %H:%M:%S%.f")`](crate::format::strftime).
///
@@ -21,7 +21,7 @@ use rkyv::{Archive, Deserialize, Serialize};
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
pub struct IsoWeek {
@@ -160,6 +160,21 @@ impl fmt::Debug for IsoWeek {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for IsoWeek {
fn format(&self, fmt: defmt::Formatter) {
let year = self.year();
let week = self.week();
if (0..=9999).contains(&year) {
defmt::write!(fmt, "{:04}-W{:02}", year, week)
} else {
// ISO 8601 requires the explicit sign for out-of-range years
let sign = ['+', '-'][(year < 0) as usize];
defmt::write!(fmt, "{}{:05}-W{:02}", sign, year.abs(), week)
}
}
}
#[cfg(test)]
mod tests {
#[cfg(feature = "rkyv-validation")]
@@ -31,6 +31,7 @@ pub use self::internals::YearFlags as __BenchYearFlags;
/// A week represented by a [`NaiveDate`] and a [`Weekday`] which is the first
/// day of the week.
#[derive(Clone, Copy, Debug, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct NaiveWeek {
date: NaiveDate,
start: Weekday,
@@ -226,6 +227,7 @@ impl Hash for NaiveWeek {
/// difference applies only when dealing with `DateTime<TimeZone>` data types and in other cases
/// `TimeDelta::days(n)` and `Days::new(n)` are equivalent.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct Days(pub(crate) u64);
impl Days {
@@ -214,7 +214,7 @@ mod tests;
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
pub struct NaiveTime {
@@ -1529,6 +1529,31 @@ impl fmt::Debug for NaiveTime {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for NaiveTime {
fn format(&self, fmt: defmt::Formatter) {
let (hour, min, sec) = self.hms();
let (sec, nano) = if self.frac >= 1_000_000_000 {
(sec + 1, self.frac - 1_000_000_000)
} else {
(sec, self.frac)
};
let (hour, min, sec) = (hour as u8, min as u8, sec as u8);
defmt::write!(fmt, "{:02}:{:02}:{:02}", hour, min, sec);
if nano == 0 {
return;
} else if nano % 1_000_000 == 0 {
defmt::write!(fmt, ".{:03}", nano / 1_000_000);
} else if nano % 1_000 == 0 {
defmt::write!(fmt, ".{:06}", nano / 1_000);
} else {
defmt::write!(fmt, ".{:09}", nano);
}
}
}
/// The `Display` output of the naive time `t` is the same as
/// [`t.format("%H:%M:%S%.f")`](crate::format::strftime).
///
@@ -24,7 +24,7 @@ use crate::naive::{NaiveDate, NaiveDateTime};
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, Hash, Debug)))
rkyv(derive(Clone, Copy, PartialEq, Eq, Hash, Debug))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
pub struct FixedOffset {
@@ -174,6 +174,23 @@ impl fmt::Display for FixedOffset {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for FixedOffset {
fn format(&self, f: defmt::Formatter) {
let offset = self.local_minus_utc;
let (sign, offset) = if offset < 0 { ('-', -offset) } else { ('+', offset) };
let sec = offset.rem_euclid(60);
let mins = offset.div_euclid(60);
let min = mins.rem_euclid(60);
let hour = mins.div_euclid(60);
if sec == 0 {
defmt::write!(f, "{}{:02}:{:02}", sign, hour, min)
} else {
defmt::write!(f, "{}{:02}:{:02}:{:02}", sign, hour, min, sec)
}
}
}
#[cfg(all(feature = "arbitrary", feature = "std"))]
impl arbitrary::Arbitrary<'_> for FixedOffset {
fn arbitrary(u: &mut arbitrary::Unstructured) -> arbitrary::Result<FixedOffset> {
@@ -122,6 +122,7 @@ mod tz_info;
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct Local;
impl Local {
@@ -76,7 +76,7 @@ pub use self::utc::Utc;
/// The type of `T` is usually a [`DateTime`] but may also be only an offset.
pub type MappedLocalTime<T> = LocalResult<T>;
#[derive(Clone, PartialEq, Debug, Copy, Eq, Hash)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
/// Old name of [`MappedLocalTime`]. See that type for more documentation.
pub enum LocalResult<T> {
/// The local time maps to a single unique result.
@@ -45,7 +45,7 @@ use crate::{Date, DateTime};
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
@@ -150,3 +150,10 @@ impl fmt::Display for Utc {
write!(f, "UTC")
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for Utc {
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "Z");
}
}
@@ -305,6 +305,7 @@ where
///
/// See: [`DurationRound`]
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum RoundingError {
/// Error when the TimeDelta exceeds the TimeDelta from or until the Unix epoch.
///
@@ -56,9 +56,10 @@ const SECS_PER_WEEK: i64 = 604_800;
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct TimeDelta {
secs: i64,
nanos: i32, // Always 0 <= nanos < NANOS_PER_SEC
@@ -624,6 +625,7 @@ impl fmt::Display for TimeDelta {
/// *seconds*, while this module supports signed range of up to
/// `i64::MAX` of *milliseconds*.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub struct OutOfRangeError(());
impl fmt::Display for OutOfRangeError {
@@ -34,10 +34,11 @@ use crate::OutOfRange;
any(feature = "rkyv", feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, Debug, Hash)))
rkyv(derive(Clone, Copy, PartialEq, Eq, Debug, Hash))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Weekday {
/// Monday.
Mon = 0,
@@ -256,6 +257,13 @@ impl fmt::Debug for ParseWeekdayError {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for ParseWeekdayError {
fn format(&self, fmt: defmt::Formatter) {
defmt::write!(fmt, "ParseWeekdayError {{ .. }}")
}
}
// the actual `FromStr` implementation is in the `format` module to leverage the existing code
#[cfg(feature = "serde")]
@@ -331,6 +331,23 @@ impl Debug for WeekdaySet {
}
}
#[cfg(feature = "defmt")]
impl defmt::Format for WeekdaySet {
fn format(&self, f: defmt::Formatter<'_>) {
defmt::write!(
f,
"WeekdaySet({}{}{}{}{}{}{})",
0x1 & (self.0 >> 6),
0x1 & (self.0 >> 5),
0x1 & (self.0 >> 4),
0x1 & (self.0 >> 3),
0x1 & (self.0 >> 2),
0x1 & (self.0 >> 1),
0x1 & (self.0 >> 0),
)
}
}
/// An iterator over a collection of weekdays, starting from a given day.
///
/// See [`WeekdaySet::iter()`].
-35
View File
@@ -1,35 +0,0 @@
[package]
name = "dashmap"
version = "7.0.0-rc2"
authors = ["Joel Wejdenstål <jwejdenstal@icloud.com>"]
edition = "2021"
# rust-version = "1.70"
license = "MIT"
repository = "https://github.com/xacrimon/dashmap"
homepage = "https://github.com/xacrimon/dashmap"
description = "Blazing fast concurrent HashMap for Rust."
readme = "README.md"
documentation = "https://docs.rs/dashmap"
keywords = ["atomic", "concurrent", "hashmap"]
categories = ["concurrency", "algorithms", "data-structures"]
[features]
all = ["raw-api", "typesize", "serde", "rayon", "arbitrary"]
raw-api = []
typesize = ["dep:typesize"]
inline-more = ["hashbrown/inline-more"]
[dependencies]
lock_api = "0.4.12"
parking_lot_core = "0.9.10"
equivalent = "1.0.1"
hashbrown = { version = "0.16.1", default-features = false }
serde = { version = "1.0.217", optional = true, features = ["derive"] }
cfg-if = "1.0.0"
rayon = { version = "1.10.0", optional = true }
arbitrary = { version = "1.4.1", optional = true }
crossbeam-utils = "0.8"
typesize = { version = "0.1.13", default-features = false, features = ["hashbrown_15"], optional = true }
[package.metadata.docs.rs]
features = ["all"]
-21
View File
@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2025 Joel Wejdenstål
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-72
View File
@@ -1,72 +0,0 @@
# DashMap
Blazingly fast concurrent map in Rust.
DashMap is an implementation of a concurrent associative array/hashmap in Rust.
DashMap tries to implement an easy to use API similar to `std::collections::HashMap`
with some slight changes to handle concurrency.
DashMap tries to be very simple to use and to be a direct replacement for `RwLock<HashMap<K, V>>`.
To accomplish these goals, all methods take `&self` instead of modifying methods taking `&mut self`.
This allows you to put a DashMap in an `Arc<T>` and share it between threads while still being able to modify it.
DashMap puts great effort into performance and aims to be as fast as possible.
If you have any suggestions or tips do not hesitate to open an issue or a PR.
The current MSRV is 1.70 and is not changed in patch releases. You can pin a minor version if you want
perfect stability. Though `dashmap` always stays at least 1 year behind the current stable release.
[![version](https://img.shields.io/crates/v/dashmap)](https://crates.io/crates/dashmap)
[![documentation](https://docs.rs/dashmap/badge.svg)](https://docs.rs/dashmap)
[![downloads](https://img.shields.io/crates/d/dashmap)](https://crates.io/crates/dashmap)
[![minimum rustc version](https://img.shields.io/badge/rustc-1.70-orange.svg)](https://crates.io/crates/dashmap)
## Cargo features
- `serde` - Enables serde support.
- `raw-api` - Enables the unstable raw-shard api.
- `rayon` - Enables rayon support.
- `inline-more` - Enables `inline-more` feature from the `hashbrown` crate. Comes with the usual tradeoffs of possibly excessive inlining.
- `arbitrary` - Enables support for the `arbitrary` crate.
## Contributing
DashMap gladly accepts contributions!
Do not hesitate to open issues or PR's.
I will take a look as soon as I have time for it.
That said I do not get paid (yet) to work on open-source. This means
that my time is limited and my work here comes after my personal life.
## Performance
A comprehensive benchmark suite including DashMap can be found [here](https://github.com/xacrimon/conc-map-bench).
## Special thanks
- [Conrad Ludgate](https://github.com/conradludgate)
- [Jon Gjengset](https://github.com/jonhoo)
- [Yato](https://github.com/RustyYato)
- [Karl Bergström](https://github.com/kabergstrom)
- [Dylan DPC](https://github.com/Dylan-DPC)
- [Lokathor](https://github.com/Lokathor)
- [namibj](https://github.com/namibj)
## License
This project is licensed under MIT.
-13
View File
@@ -1,13 +0,0 @@
use arbitrary::{Arbitrary, Unstructured};
use core::hash::BuildHasher;
impl<'a, K, V, S> Arbitrary<'a> for crate::DashMap<K, V, S>
where
K: Eq + std::hash::Hash + Arbitrary<'a>,
V: Arbitrary<'a>,
S: Default + BuildHasher + Clone,
{
fn arbitrary(u: &mut Unstructured<'a>) -> arbitrary::Result<Self> {
u.arbitrary_iter()?.collect()
}
}
-217
View File
@@ -1,217 +0,0 @@
use crossbeam_utils::CachePadded;
use hashbrown::hash_table;
use super::mapref::multiple::{RefMulti, RefMutMulti};
use crate::lock::{RwLock, RwLockReadGuardDetached, RwLockWriteGuardDetached};
use crate::{DashMap, HashMap};
use core::hash::Hash;
use std::sync::Arc;
/// Iterator over a DashMap yielding key value pairs.
///
/// # Examples
///
/// ```
/// use dashmap::DashMap;
///
/// let map = DashMap::new();
/// map.insert("hello", "world");
/// map.insert("alex", "steve");
/// let pairs: Vec<(&'static str, &'static str)> = map.into_iter().collect();
/// assert_eq!(pairs.len(), 2);
/// ```
pub struct OwningIter<K, V> {
shards: std::vec::IntoIter<CachePadded<RwLock<HashMap<K, V>>>>,
current: Option<GuardOwningIter<K, V>>,
}
impl<K: Eq + Hash, V> OwningIter<K, V> {
pub(crate) fn new<S>(map: DashMap<K, V, S>) -> Self {
Self {
shards: map.shards.into_vec().into_iter(),
current: None,
}
}
}
type GuardOwningIter<K, V> = hash_table::IntoIter<(K, V)>;
impl<K: Eq + Hash, V> Iterator for OwningIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(current) = self.current.as_mut() {
if let Some((k, v)) = current.next() {
return Some((k, v));
}
}
let iter = self.shards.next()?.into_inner().into_inner().into_iter();
self.current = Some(iter);
}
}
}
type GuardIter<'a, K, V> = (
Arc<RwLockReadGuardDetached<'a>>,
hash_table::Iter<'a, (K, V)>,
);
type GuardIterMut<'a, K, V> = (
Arc<RwLockWriteGuardDetached<'a>>,
hash_table::IterMut<'a, (K, V)>,
);
/// Iterator over a DashMap yielding immutable references.
///
/// # Examples
///
/// ```
/// use dashmap::DashMap;
///
/// let map = DashMap::new();
/// map.insert("hello", "world");
/// assert_eq!(map.iter().count(), 1);
/// ```
pub struct Iter<'a, K, V> {
shards: std::slice::Iter<'a, CachePadded<RwLock<HashMap<K, V>>>>,
current: Option<GuardIter<'a, K, V>>,
}
impl<'i, K: Clone + Hash + Eq, V: Clone> Clone for Iter<'i, K, V> {
fn clone(&self) -> Self {
Iter {
shards: self.shards.clone(),
current: self.current.clone(),
}
}
}
impl<'a, K: 'a, V: 'a> Iter<'a, K, V> {
pub(crate) fn new<S>(map: &'a DashMap<K, V, S>) -> Self {
Self {
shards: map.shards.iter(),
current: None,
}
}
}
impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
type Item = RefMulti<'a, K, V>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(current) = self.current.as_mut() {
if let Some((k, v)) = current.1.next() {
let guard = current.0.clone();
return Some(RefMulti::new(guard, k, v));
}
}
let guard = self.shards.next()?.read();
// SAFETY: we keep the guard alive with the shard iterator,
// and with any refs produced by the iterator
let (guard, shard) = unsafe { RwLockReadGuardDetached::detach_from(guard) };
let iter = shard.iter();
self.current = Some((Arc::new(guard), iter));
}
}
}
/// Iterator over a DashMap yielding mutable references.
///
/// # Examples
///
/// ```
/// use dashmap::DashMap;
///
/// let map = DashMap::new();
/// map.insert("Johnny", 21);
/// map.iter_mut().for_each(|mut r| *r += 1);
/// assert_eq!(*map.get("Johnny").unwrap(), 22);
/// ```
pub struct IterMut<'a, K, V> {
shards: std::slice::Iter<'a, CachePadded<RwLock<HashMap<K, V>>>>,
current: Option<GuardIterMut<'a, K, V>>,
}
impl<'a, K: 'a, V: 'a> IterMut<'a, K, V> {
pub(crate) fn new<S>(map: &'a DashMap<K, V, S>) -> Self {
Self {
shards: map.shards.iter(),
current: None,
}
}
}
impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
type Item = RefMutMulti<'a, K, V>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(current) = self.current.as_mut() {
if let Some((k, v)) = current.1.next() {
let guard = current.0.clone();
return Some(RefMutMulti::new(guard, k, v));
}
}
let guard = self.shards.next()?.write();
// SAFETY: we keep the guard alive with the shard iterator,
// and with any refs produced by the iterator
let (guard, shard) = unsafe { RwLockWriteGuardDetached::detach_from(guard) };
let iter = shard.iter_mut();
self.current = Some((Arc::new(guard), iter));
}
}
}
#[cfg(test)]
mod tests {
use crate::DashMap;
#[test]
fn iter_mut_manual_count() {
let map = DashMap::new();
map.insert("Johnny", 21);
assert_eq!(map.len(), 1);
let mut c = 0;
for shard in map.shards() {
c += shard.write().iter().count();
}
assert_eq!(c, 1);
}
#[test]
fn iter_mut_count() {
let map = DashMap::new();
map.insert("Johnny", 21);
assert_eq!(map.len(), 1);
assert_eq!(map.iter_mut().count(), 1);
}
#[test]
fn iter_count() {
let map = DashMap::new();
map.insert("Johnny", 21);
assert_eq!(map.len(), 1);
assert_eq!(map.iter().count(), 1);
}
}
-38
View File
@@ -1,38 +0,0 @@
use crate::setref::multiple::RefMulti;
use core::hash::Hash;
pub struct OwningIter<K> {
inner: crate::iter::OwningIter<K, ()>,
}
impl<K: Eq + Hash> OwningIter<K> {
pub(crate) fn new(inner: crate::iter::OwningIter<K, ()>) -> Self {
Self { inner }
}
}
impl<K: Eq + Hash> Iterator for OwningIter<K> {
type Item = K;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(k, _)| k)
}
}
pub struct Iter<'a, K> {
inner: crate::iter::Iter<'a, K, ()>,
}
impl<'a, K: Eq + Hash + 'a> Iter<'a, K> {
pub(crate) fn new(inner: crate::iter::Iter<'a, K, ()>) -> Self {
Self { inner }
}
}
impl<'a, K: Eq + Hash + 'a> Iterator for Iter<'a, K> {
type Item = RefMulti<'a, K>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(RefMulti::new)
}
}
File diff suppressed because it is too large Load Diff
-300
View File
@@ -1,300 +0,0 @@
use core::sync::atomic::{AtomicUsize, Ordering};
use parking_lot_core::{ParkToken, SpinWait, UnparkToken};
pub type RwLock<T> = lock_api::RwLock<RawRwLock, T>;
pub(crate) type RwLockReadGuardDetached<'a> = crate::util::RwLockReadGuardDetached<'a, RawRwLock>;
pub(crate) type RwLockWriteGuardDetached<'a> = crate::util::RwLockWriteGuardDetached<'a, RawRwLock>;
const READERS_PARKED: usize = 0b0001;
const WRITERS_PARKED: usize = 0b0010;
const ONE_READER: usize = 0b0100;
const ONE_WRITER: usize = !(READERS_PARKED | WRITERS_PARKED);
pub struct RawRwLock {
state: AtomicUsize,
}
unsafe impl lock_api::RawRwLock for RawRwLock {
#[allow(clippy::declare_interior_mutable_const)]
const INIT: Self = Self {
state: AtomicUsize::new(0),
};
type GuardMarker = lock_api::GuardSend;
#[inline]
fn try_lock_exclusive(&self) -> bool {
self.state
.compare_exchange(0, ONE_WRITER, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
}
#[inline]
fn lock_exclusive(&self) {
if self
.state
.compare_exchange_weak(0, ONE_WRITER, Ordering::Acquire, Ordering::Relaxed)
.is_err()
{
self.lock_exclusive_slow();
}
}
#[inline]
unsafe fn unlock_exclusive(&self) {
if self
.state
.compare_exchange(ONE_WRITER, 0, Ordering::Release, Ordering::Relaxed)
.is_err()
{
self.unlock_exclusive_slow();
}
}
#[inline]
fn try_lock_shared(&self) -> bool {
self.try_lock_shared_fast() || self.try_lock_shared_slow()
}
#[inline]
fn lock_shared(&self) {
if !self.try_lock_shared_fast() {
self.lock_shared_slow();
}
}
#[inline]
unsafe fn unlock_shared(&self) {
let state = self.state.fetch_sub(ONE_READER, Ordering::Release);
if state == (ONE_READER | WRITERS_PARKED) {
self.unlock_shared_slow();
}
}
}
unsafe impl lock_api::RawRwLockDowngrade for RawRwLock {
#[inline]
unsafe fn downgrade(&self) {
let state = self
.state
.fetch_and(ONE_READER | WRITERS_PARKED, Ordering::Release);
if state & READERS_PARKED != 0 {
parking_lot_core::unpark_all((self as *const _ as usize) + 1, UnparkToken(0));
}
}
}
impl RawRwLock {
#[cold]
fn lock_exclusive_slow(&self) {
let mut acquire_with = 0;
loop {
let mut spin = SpinWait::new();
let mut state = self.state.load(Ordering::Relaxed);
loop {
while state & ONE_WRITER == 0 {
match self.state.compare_exchange_weak(
state,
state | ONE_WRITER | acquire_with,
Ordering::Acquire,
Ordering::Relaxed,
) {
Ok(_) => return,
Err(e) => state = e,
}
}
if state & WRITERS_PARKED == 0 {
if spin.spin() {
state = self.state.load(Ordering::Relaxed);
continue;
}
if let Err(e) = self.state.compare_exchange_weak(
state,
state | WRITERS_PARKED,
Ordering::Relaxed,
Ordering::Relaxed,
) {
state = e;
continue;
}
}
let _ = unsafe {
parking_lot_core::park(
self as *const _ as usize,
|| {
let state = self.state.load(Ordering::Relaxed);
(state & ONE_WRITER != 0) && (state & WRITERS_PARKED != 0)
},
|| {},
|_, _| {},
ParkToken(0),
None,
)
};
acquire_with = WRITERS_PARKED;
break;
}
}
}
#[cold]
fn unlock_exclusive_slow(&self) {
let state = self.state.load(Ordering::Relaxed);
assert_eq!(state & ONE_WRITER, ONE_WRITER);
let mut parked = state & (READERS_PARKED | WRITERS_PARKED);
assert_ne!(parked, 0);
if parked != (READERS_PARKED | WRITERS_PARKED) {
if let Err(new_state) =
self.state
.compare_exchange(state, 0, Ordering::Release, Ordering::Relaxed)
{
assert_eq!(new_state, ONE_WRITER | READERS_PARKED | WRITERS_PARKED);
parked = READERS_PARKED | WRITERS_PARKED;
}
}
if parked == (READERS_PARKED | WRITERS_PARKED) {
self.state.store(WRITERS_PARKED, Ordering::Release);
parked = READERS_PARKED;
}
if parked == READERS_PARKED {
return unsafe {
parking_lot_core::unpark_all((self as *const _ as usize) + 1, UnparkToken(0));
};
}
assert_eq!(parked, WRITERS_PARKED);
unsafe {
parking_lot_core::unpark_one(self as *const _ as usize, |_| UnparkToken(0));
}
}
#[inline(always)]
fn try_lock_shared_fast(&self) -> bool {
let state = self.state.load(Ordering::Relaxed);
if let Some(new_state) = state.checked_add(ONE_READER) {
if new_state & ONE_WRITER != ONE_WRITER {
return self
.state
.compare_exchange_weak(state, new_state, Ordering::Acquire, Ordering::Relaxed)
.is_ok();
}
}
false
}
#[cold]
fn try_lock_shared_slow(&self) -> bool {
let mut state = self.state.load(Ordering::Relaxed);
while let Some(new_state) = state.checked_add(ONE_READER) {
if new_state & ONE_WRITER == ONE_WRITER {
break;
}
match self.state.compare_exchange_weak(
state,
new_state,
Ordering::Acquire,
Ordering::Relaxed,
) {
Ok(_) => return true,
Err(e) => state = e,
}
}
false
}
#[cold]
fn lock_shared_slow(&self) {
loop {
let mut spin = SpinWait::new();
let mut state = self.state.load(Ordering::Relaxed);
loop {
let mut backoff = SpinWait::new();
while let Some(new_state) = state.checked_add(ONE_READER) {
assert_ne!(
new_state & ONE_WRITER,
ONE_WRITER,
"reader count overflowed",
);
if self
.state
.compare_exchange_weak(
state,
new_state,
Ordering::Acquire,
Ordering::Relaxed,
)
.is_ok()
{
return;
}
backoff.spin_no_yield();
state = self.state.load(Ordering::Relaxed);
}
if state & READERS_PARKED == 0 {
if spin.spin() {
state = self.state.load(Ordering::Relaxed);
continue;
}
if let Err(e) = self.state.compare_exchange_weak(
state,
state | READERS_PARKED,
Ordering::Relaxed,
Ordering::Relaxed,
) {
state = e;
continue;
}
}
let _ = unsafe {
parking_lot_core::park(
(self as *const _ as usize) + 1,
|| {
let state = self.state.load(Ordering::Relaxed);
(state & ONE_WRITER == ONE_WRITER) && (state & READERS_PARKED != 0)
},
|| {},
|_, _| {},
ParkToken(0),
None,
)
};
break;
}
}
}
#[cold]
fn unlock_shared_slow(&self) {
if self
.state
.compare_exchange(WRITERS_PARKED, 0, Ordering::Relaxed, Ordering::Relaxed)
.is_ok()
{
unsafe {
parking_lot_core::unpark_one(self as *const _ as usize, |_| UnparkToken(0));
}
}
}
}
-288
View File
@@ -1,288 +0,0 @@
use hashbrown::hash_table;
use super::one::RefMut;
use crate::lock::RwLockWriteGuardDetached;
use core::hash::Hash;
use core::mem;
pub enum Entry<'a, K, V> {
Occupied(OccupiedEntry<'a, K, V>),
Vacant(VacantEntry<'a, K, V>),
}
impl<'a, K: Eq + Hash, V> Entry<'a, K, V> {
/// Apply a function to the stored value if it exists.
pub fn and_modify(self, f: impl FnOnce(&mut V)) -> Self {
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
/// Get the key of the entry.
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Into the key of the entry.
pub fn into_key(self) -> K {
match self {
Entry::Occupied(entry) => entry.into_key(),
Entry::Vacant(entry) => entry.into_key(),
}
}
/// Return a mutable reference to the element if it exists,
/// otherwise insert the default and return a mutable reference to that.
pub fn or_default(self) -> RefMut<'a, K, V>
where
V: Default,
{
match self {
Entry::Occupied(entry) => entry.into_ref(),
Entry::Vacant(entry) => entry.insert(V::default()),
}
}
/// Return a mutable reference to the element if it exists,
/// otherwise a provided value and return a mutable reference to that.
pub fn or_insert(self, value: V) -> RefMut<'a, K, V> {
match self {
Entry::Occupied(entry) => entry.into_ref(),
Entry::Vacant(entry) => entry.insert(value),
}
}
/// Return a mutable reference to the element if it exists,
/// otherwise insert the result of a provided function and return a mutable reference to that.
pub fn or_insert_with(self, value: impl FnOnce() -> V) -> RefMut<'a, K, V> {
match self {
Entry::Occupied(entry) => entry.into_ref(),
Entry::Vacant(entry) => entry.insert(value()),
}
}
pub fn or_try_insert_with<E>(
self,
value: impl FnOnce() -> Result<V, E>,
) -> Result<RefMut<'a, K, V>, E> {
match self {
Entry::Occupied(entry) => Ok(entry.into_ref()),
Entry::Vacant(entry) => Ok(entry.insert(value()?)),
}
}
/// Sets the value of the entry, and returns a reference to the inserted value.
pub fn insert(self, value: V) -> RefMut<'a, K, V> {
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry.into_ref()
}
Entry::Vacant(entry) => entry.insert(value),
}
}
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// If you are not interested in the occupied entry,
/// consider [`insert`] as it doesn't need to clone the key.
///
/// [`insert`]: Entry::insert
pub fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V>
where
K: Clone,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
}
pub struct VacantEntry<'a, K, V> {
shard: RwLockWriteGuardDetached<'a>,
key: K,
entry: hash_table::VacantEntry<'a, (K, V)>,
}
impl<'a, K: Eq + Hash, V> VacantEntry<'a, K, V> {
pub(crate) fn new(
shard: RwLockWriteGuardDetached<'a>,
key: K,
entry: hash_table::VacantEntry<'a, (K, V)>,
) -> Self {
Self { shard, key, entry }
}
pub fn insert(self, value: V) -> RefMut<'a, K, V> {
let occupied = self.entry.insert((self.key, value));
let (k, v) = occupied.into_mut();
RefMut::new(self.shard, k, v)
}
/// Sets the value of the entry with the VacantEntrys key, and returns an OccupiedEntry.
pub fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V>
where
K: Clone,
{
let entry = self.entry.insert((self.key.clone(), value));
OccupiedEntry::new(self.shard, self.key, entry)
}
pub fn into_key(self) -> K {
self.key
}
pub fn key(&self) -> &K {
&self.key
}
}
pub struct OccupiedEntry<'a, K, V> {
shard: RwLockWriteGuardDetached<'a>,
entry: hash_table::OccupiedEntry<'a, (K, V)>,
key: K,
}
impl<'a, K: Eq + Hash, V> OccupiedEntry<'a, K, V> {
pub(crate) fn new(
shard: RwLockWriteGuardDetached<'a>,
key: K,
entry: hash_table::OccupiedEntry<'a, (K, V)>,
) -> Self {
Self { shard, entry, key }
}
pub fn get(&self) -> &V {
&self.entry.get().1
}
pub fn get_mut(&mut self) -> &mut V {
&mut self.entry.get_mut().1
}
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
pub fn into_ref(self) -> RefMut<'a, K, V> {
let (k, v) = self.entry.into_mut();
RefMut::new(self.shard, k, v)
}
pub fn into_key(self) -> K {
self.key
}
pub fn key(&self) -> &K {
&self.entry.get().0
}
pub fn remove(self) -> V {
let ((_k, v), _) = self.entry.remove();
v
}
pub fn remove_entry(self) -> (K, V) {
let ((k, v), _) = self.entry.remove();
(k, v)
}
pub fn replace_entry(self, value: V) -> (K, V) {
let (k, v) = mem::replace(self.entry.into_mut(), (self.key, value));
(k, v)
}
}
#[cfg(test)]
mod tests {
use crate::DashMap;
use super::*;
#[test]
fn test_insert_into_vacant() {
let map: DashMap<u32, u32> = DashMap::new();
let entry = map.entry(1);
assert!(matches!(entry, Entry::Vacant(_)));
let val = entry.insert(2);
assert_eq!(*val, 2);
drop(val);
assert_eq!(*map.get(&1).unwrap(), 2);
}
#[test]
fn test_insert_into_occupied() {
let map: DashMap<u32, u32> = DashMap::new();
map.insert(1, 1000);
let entry = map.entry(1);
assert!(matches!(&entry, Entry::Occupied(entry) if *entry.get() == 1000));
let val = entry.insert(2);
assert_eq!(*val, 2);
drop(val);
assert_eq!(*map.get(&1).unwrap(), 2);
}
#[test]
fn test_insert_entry_into_vacant() {
let map: DashMap<u32, u32> = DashMap::new();
let entry = map.entry(1);
assert!(matches!(entry, Entry::Vacant(_)));
let entry = entry.insert_entry(2);
assert_eq!(*entry.get(), 2);
drop(entry);
assert_eq!(*map.get(&1).unwrap(), 2);
}
#[test]
fn test_insert_entry_into_occupied() {
let map: DashMap<u32, u32> = DashMap::new();
map.insert(1, 1000);
let entry = map.entry(1);
assert!(matches!(&entry, Entry::Occupied(entry) if *entry.get() == 1000));
let entry = entry.insert_entry(2);
assert_eq!(*entry.get(), 2);
drop(entry);
assert_eq!(*map.get(&1).unwrap(), 2);
}
}
-319
View File
@@ -1,319 +0,0 @@
use hashbrown::hash_table;
use super::one::RefMut;
use crate::lock::RwLockWriteGuardDetached;
use core::hash::Hash;
use std::mem;
/// Entry with a borrowed key.
pub enum EntryRef<'a, 'q, K, Q, V> {
Occupied(OccupiedEntryRef<'a, 'q, K, Q, V>),
Vacant(VacantEntryRef<'a, 'q, K, Q, V>),
}
impl<'a, 'q, K: Eq + Hash, Q, V> EntryRef<'a, 'q, K, Q, V> {
/// Apply a function to the stored value if it exists.
pub fn and_modify(self, f: impl FnOnce(&mut V)) -> Self {
match self {
EntryRef::Occupied(mut entry) => {
f(entry.get_mut());
EntryRef::Occupied(entry)
}
EntryRef::Vacant(entry) => EntryRef::Vacant(entry),
}
}
}
impl<'a, 'q, K: Eq + Hash + From<&'q Q>, Q, V> EntryRef<'a, 'q, K, Q, V> {
/// Get the key of the entry.
pub fn key(&self) -> &Q {
match *self {
EntryRef::Occupied(ref entry) => entry.key(),
EntryRef::Vacant(ref entry) => entry.key(),
}
}
/// Into the key of the entry.
pub fn into_key(self) -> K {
match self {
EntryRef::Occupied(entry) => entry.into_key(),
EntryRef::Vacant(entry) => entry.into_key(),
}
}
/// Return a mutable reference to the element if it exists,
/// otherwise insert the default and return a mutable reference to that.
pub fn or_default(self) -> RefMut<'a, K, V>
where
V: Default,
{
match self {
EntryRef::Occupied(entry) => entry.into_ref(),
EntryRef::Vacant(entry) => entry.insert(V::default()),
}
}
/// Return a mutable reference to the element if it exists,
/// otherwise a provided value and return a mutable reference to that.
pub fn or_insert(self, value: V) -> RefMut<'a, K, V> {
match self {
EntryRef::Occupied(entry) => entry.into_ref(),
EntryRef::Vacant(entry) => entry.insert(value),
}
}
/// Return a mutable reference to the element if it exists,
/// otherwise insert the result of a provided function and return a mutable reference to that.
pub fn or_insert_with(self, value: impl FnOnce() -> V) -> RefMut<'a, K, V> {
match self {
EntryRef::Occupied(entry) => entry.into_ref(),
EntryRef::Vacant(entry) => entry.insert(value()),
}
}
pub fn or_try_insert_with<E>(
self,
value: impl FnOnce() -> Result<V, E>,
) -> Result<RefMut<'a, K, V>, E> {
match self {
EntryRef::Occupied(entry) => Ok(entry.into_ref()),
EntryRef::Vacant(entry) => Ok(entry.insert(value()?)),
}
}
/// Sets the value of the entry, and returns a reference to the inserted value.
pub fn insert(self, value: V) -> RefMut<'a, K, V> {
match self {
EntryRef::Occupied(mut entry) => {
entry.insert(value);
entry.into_ref()
}
EntryRef::Vacant(entry) => entry.insert(value),
}
}
/// Sets the value of the entry, and returns an OccupiedEntryRef.
///
/// If you are not interested in the occupied entry,
/// consider [`insert`] as it doesn't need to clone the key.
///
/// [`insert`]: EntryRef::insert
pub fn insert_entry(self, value: V) -> OccupiedEntryRef<'a, 'q, K, Q, V>
where
K: Clone,
{
match self {
EntryRef::Occupied(mut entry) => {
entry.insert(value);
entry
}
EntryRef::Vacant(entry) => entry.insert_entry(value),
}
}
}
pub struct VacantEntryRef<'a, 'q, K, Q, V> {
shard: RwLockWriteGuardDetached<'a>,
entry: hash_table::VacantEntry<'a, (K, V)>,
key: &'q Q,
}
impl<'a, 'q, K: Eq + Hash, Q, V> VacantEntryRef<'a, 'q, K, Q, V> {
pub(crate) fn new(
shard: RwLockWriteGuardDetached<'a>,
key: &'q Q,
entry: hash_table::VacantEntry<'a, (K, V)>,
) -> Self {
Self { shard, entry, key }
}
pub fn insert(self, value: V) -> RefMut<'a, K, V>
where
K: From<&'q Q>,
{
let k = K::from(self.key);
let occupied = self.entry.insert((k, value));
let (k, v) = occupied.into_mut();
RefMut::new(self.shard, k, v)
}
/// Sets the value of the entry with the VacantEntryRefs key, and returns an OccupiedEntry.
pub fn insert_entry(self, value: V) -> OccupiedEntryRef<'a, 'q, K, Q, V>
where
K: From<&'q Q>,
{
let k = K::from(self.key);
let entry = self.entry.insert((k, value));
OccupiedEntryRef::new(self.shard, self.key, entry)
}
pub fn into_key(self) -> K
where
K: From<&'q Q>,
{
K::from(self.key)
}
pub fn key(&self) -> &'q Q {
self.key
}
}
pub struct OccupiedEntryRef<'a, 'q, K, Q, V> {
shard: RwLockWriteGuardDetached<'a>,
entry: hash_table::OccupiedEntry<'a, (K, V)>,
key: &'q Q,
}
impl<'a, 'q, K: Eq + Hash, Q, V> OccupiedEntryRef<'a, 'q, K, Q, V> {
pub(crate) fn new(
shard: RwLockWriteGuardDetached<'a>,
key: &'q Q,
entry: hash_table::OccupiedEntry<'a, (K, V)>,
) -> Self {
Self { shard, entry, key }
}
pub fn get(&self) -> &V {
&self.entry.get().1
}
pub fn get_mut(&mut self) -> &mut V {
&mut self.entry.get_mut().1
}
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
pub fn into_ref(self) -> RefMut<'a, K, V> {
let (k, v) = self.entry.into_mut();
RefMut::new(self.shard, k, v)
}
pub fn into_key(self) -> K
where
K: From<&'q Q>,
{
K::from(self.key)
}
pub fn key(&self) -> &'q Q {
self.key
}
pub fn remove(self) -> V {
let ((_k, v), _) = self.entry.remove();
v
}
pub fn remove_entry(self) -> (K, V) {
let ((k, v), _) = self.entry.remove();
(k, v)
}
pub fn replace_entry(self, value: V) -> (K, V)
where
K: From<&'q Q>,
{
let (k, v) = mem::replace(self.entry.into_mut(), (K::from(self.key), value));
(k, v)
}
}
#[cfg(test)]
mod tests {
use equivalent::Equivalent;
use crate::DashMap;
use super::*;
#[derive(Hash, PartialEq, Eq, Debug)]
struct K(u32);
impl From<&K> for u32 {
fn from(value: &K) -> Self {
value.0
}
}
impl Equivalent<u32> for K {
fn equivalent(&self, key: &u32) -> bool {
self.0 == *key
}
}
#[test]
fn test_insert_into_vacant() {
let map: DashMap<u32, u32> = DashMap::new();
let entry = map.entry_ref(&K(1));
assert!(matches!(entry, EntryRef::Vacant(_)));
let val = entry.insert(2);
assert_eq!(*val, 2);
drop(val);
assert_eq!(*map.get(&1).unwrap(), 2);
}
#[test]
fn test_insert_into_occupied() {
let map: DashMap<u32, u32> = DashMap::new();
map.insert(1, 1000);
let entry = map.entry_ref(&K(1));
assert!(matches!(&entry, EntryRef::Occupied(entry) if *entry.get() == 1000));
let val = entry.insert(2);
assert_eq!(*val, 2);
drop(val);
assert_eq!(*map.get(&1).unwrap(), 2);
}
#[test]
fn test_insert_entry_into_vacant() {
let map: DashMap<u32, u32> = DashMap::new();
let entry = map.entry_ref(&K(1));
assert!(matches!(entry, EntryRef::Vacant(_)));
let entry = entry.insert_entry(2);
assert_eq!(*entry.get(), 2);
drop(entry);
assert_eq!(*map.get(&1).unwrap(), 2);
}
#[test]
fn test_insert_entry_into_occupied() {
let map: DashMap<u32, u32> = DashMap::new();
map.insert(1, 1000);
let entry = map.entry_ref(&K(1));
assert!(matches!(&entry, EntryRef::Occupied(entry) if *entry.get() == 1000));
let entry = entry.insert_entry(2);
assert_eq!(*entry.get(), 2);
drop(entry);
assert_eq!(*map.get(&1).unwrap(), 2);
}
}
-5
View File
@@ -1,5 +0,0 @@
pub mod entry;
pub mod entry_ref;
pub mod multiple;
pub mod one;
pub mod raw_entry;
-90
View File
@@ -1,90 +0,0 @@
use crate::lock::{RwLockReadGuardDetached, RwLockWriteGuardDetached};
use core::hash::Hash;
use core::ops::{Deref, DerefMut};
use std::sync::Arc;
pub struct RefMulti<'a, K, V> {
_guard: Arc<RwLockReadGuardDetached<'a>>,
k: &'a K,
v: &'a V,
}
impl<'a, K, V> RefMulti<'a, K, V> {
pub(crate) fn new(guard: Arc<RwLockReadGuardDetached<'a>>, k: &'a K, v: &'a V) -> Self {
Self {
_guard: guard,
k,
v,
}
}
pub fn key(&self) -> &K {
self.pair().0
}
pub fn value(&self) -> &V {
self.pair().1
}
pub fn pair(&self) -> (&K, &V) {
(self.k, self.v)
}
}
impl<'a, K: Eq + Hash, V> Deref for RefMulti<'a, K, V> {
type Target = V;
fn deref(&self) -> &V {
self.value()
}
}
pub struct RefMutMulti<'a, K, V> {
_guard: Arc<RwLockWriteGuardDetached<'a>>,
k: &'a K,
v: &'a mut V,
}
impl<'a, K, V> RefMutMulti<'a, K, V> {
pub(crate) fn new(guard: Arc<RwLockWriteGuardDetached<'a>>, k: &'a K, v: &'a mut V) -> Self {
Self {
_guard: guard,
k,
v,
}
}
pub fn key(&self) -> &K {
self.pair().0
}
pub fn value(&self) -> &V {
self.pair().1
}
pub fn value_mut(&mut self) -> &mut V {
self.pair_mut().1
}
pub fn pair(&self) -> (&K, &V) {
(self.k, self.v)
}
pub fn pair_mut(&mut self) -> (&K, &mut V) {
(self.k, self.v)
}
}
impl<'a, K: Eq + Hash, V> Deref for RefMutMulti<'a, K, V> {
type Target = V;
fn deref(&self) -> &V {
self.value()
}
}
impl<'a, K: Eq + Hash, V> DerefMut for RefMutMulti<'a, K, V> {
fn deref_mut(&mut self) -> &mut V {
self.value_mut()
}
}
-386
View File
@@ -1,386 +0,0 @@
use crate::lock::{RwLockReadGuardDetached, RwLockWriteGuardDetached};
use core::ops::{Deref, DerefMut};
use std::fmt::{Debug, Formatter};
pub struct Ref<'a, K: ?Sized, V: ?Sized> {
_guard: RwLockReadGuardDetached<'a>,
k: &'a K,
v: &'a V,
}
impl<'a, K: ?Sized, V: ?Sized> Ref<'a, K, V> {
pub(crate) fn new(guard: RwLockReadGuardDetached<'a>, k: &'a K, v: &'a V) -> Self {
Self {
_guard: guard,
k,
v,
}
}
pub fn key(&self) -> &K {
self.pair().0
}
pub fn value(&self) -> &V {
self.pair().1
}
pub fn pair(&self) -> (&K, &V) {
(self.k, self.v)
}
pub fn map<F, T: ?Sized>(self, f: F) -> MappedRef<'a, K, T>
where
F: FnOnce(&V) -> &T,
{
MappedRef {
_guard: self._guard,
k: self.k,
v: f(self.v),
}
}
pub fn try_map<F, T: ?Sized>(self, f: F) -> Result<MappedRef<'a, K, T>, Self>
where
F: FnOnce(&V) -> Option<&T>,
{
if let Some(v) = f(self.v) {
Ok(MappedRef {
_guard: self._guard,
k: self.k,
v,
})
} else {
Err(self)
}
}
}
impl<'a, K: Debug + ?Sized, V: Debug + ?Sized> Debug for Ref<'a, K, V> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Ref")
.field("k", &self.k)
.field("v", &self.v)
.finish()
}
}
impl<'a, K: ?Sized, V: ?Sized> Deref for Ref<'a, K, V> {
type Target = V;
fn deref(&self) -> &V {
self.value()
}
}
pub struct RefMut<'a, K: ?Sized, V: ?Sized> {
guard: RwLockWriteGuardDetached<'a>,
k: &'a K,
v: &'a mut V,
}
impl<'a, K: ?Sized, V: ?Sized> RefMut<'a, K, V> {
pub(crate) fn new(guard: RwLockWriteGuardDetached<'a>, k: &'a K, v: &'a mut V) -> Self {
Self { guard, k, v }
}
pub fn key(&self) -> &K {
self.pair().0
}
pub fn value(&self) -> &V {
self.pair().1
}
pub fn value_mut(&mut self) -> &mut V {
self.pair_mut().1
}
pub fn pair(&self) -> (&K, &V) {
(self.k, self.v)
}
pub fn pair_mut(&mut self) -> (&K, &mut V) {
(self.k, self.v)
}
pub fn downgrade(self) -> Ref<'a, K, V> {
Ref::new(
unsafe { RwLockWriteGuardDetached::downgrade(self.guard) },
self.k,
self.v,
)
}
pub fn map<F, T: ?Sized>(self, f: F) -> MappedRefMut<'a, K, T>
where
F: FnOnce(&mut V) -> &mut T,
{
MappedRefMut {
_guard: self.guard,
k: self.k,
v: f(&mut *self.v),
}
}
pub fn try_map<F, T: ?Sized>(self, f: F) -> Result<MappedRefMut<'a, K, T>, Self>
where
F: FnOnce(&mut V) -> Option<&mut T>,
{
let v = match f(unsafe { &mut *(self.v as *mut _) }) {
Some(v) => v,
None => return Err(self),
};
let guard = self.guard;
let k = self.k;
Ok(MappedRefMut {
_guard: guard,
k,
v,
})
}
}
impl<'a, K: Debug+ ?Sized, V: Debug + ?Sized> Debug for RefMut<'a, K, V> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RefMut")
.field("k", &self.k)
.field("v", &self.v)
.finish()
}
}
impl<'a, K: ?Sized, V: ?Sized> Deref for RefMut<'a, K, V> {
type Target = V;
fn deref(&self) -> &V {
self.value()
}
}
impl<'a, K: ?Sized, V: ?Sized> DerefMut for RefMut<'a, K, V> {
fn deref_mut(&mut self) -> &mut V {
self.value_mut()
}
}
pub struct MappedRef<'a, K: ?Sized, T: ?Sized> {
_guard: RwLockReadGuardDetached<'a>,
k: &'a K,
v: &'a T,
}
impl<'a, K: ?Sized, T: ?Sized> MappedRef<'a, K, T> {
pub fn key(&self) -> &K {
self.pair().0
}
pub fn value(&self) -> &T {
self.pair().1
}
pub fn pair(&self) -> (&K, &T) {
(self.k, self.v)
}
pub fn map<F, T2>(self, f: F) -> MappedRef<'a, K, T2>
where
F: FnOnce(&T) -> &T2,
{
MappedRef {
_guard: self._guard,
k: self.k,
v: f(self.v),
}
}
pub fn try_map<F, T2: ?Sized>(self, f: F) -> Result<MappedRef<'a, K, T2>, Self>
where
F: FnOnce(&T) -> Option<&T2>,
{
let v = match f(self.v) {
Some(v) => v,
None => return Err(self),
};
let guard = self._guard;
Ok(MappedRef {
_guard: guard,
k: self.k,
v,
})
}
}
impl<'a, K: Debug + ?Sized, T: Debug + ?Sized> Debug for MappedRef<'a, K, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MappedRef")
.field("k", &self.k)
.field("v", &self.v)
.finish()
}
}
impl<'a, K: ?Sized, T: ?Sized> Deref for MappedRef<'a, K, T> {
type Target = T;
fn deref(&self) -> &T {
self.value()
}
}
impl<'a, K: ?Sized, T: std::fmt::Display + ?Sized> std::fmt::Display for MappedRef<'a, K, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.value(), f)
}
}
impl<'a, K: ?Sized, T: ?Sized + AsRef<TDeref>, TDeref: ?Sized> AsRef<TDeref>
for MappedRef<'a, K, T>
{
fn as_ref(&self) -> &TDeref {
self.value().as_ref()
}
}
pub struct MappedRefMut<'a, K: ?Sized, T: ?Sized> {
_guard: RwLockWriteGuardDetached<'a>,
k: &'a K,
v: &'a mut T,
}
impl<'a, K: ?Sized, T: ?Sized> MappedRefMut<'a, K, T> {
pub fn key(&self) -> &K {
self.pair().0
}
pub fn value(&self) -> &T {
self.pair().1
}
pub fn value_mut(&mut self) -> &mut T {
self.pair_mut().1
}
pub fn pair(&self) -> (&K, &T) {
(self.k, self.v)
}
pub fn pair_mut(&mut self) -> (&K, &mut T) {
(self.k, self.v)
}
pub fn map<F, T2: ?Sized>(self, f: F) -> MappedRefMut<'a, K, T2>
where
F: FnOnce(&mut T) -> &mut T2,
{
MappedRefMut {
_guard: self._guard,
k: self.k,
v: f(self.v),
}
}
pub fn try_map<F, T2: ?Sized>(self, f: F) -> Result<MappedRefMut<'a, K, T2>, Self>
where
F: FnOnce(&mut T) -> Option<&mut T2>,
{
let v = match f(unsafe { &mut *(self.v as *mut _) }) {
Some(v) => v,
None => return Err(self),
};
let guard = self._guard;
let k = self.k;
Ok(MappedRefMut {
_guard: guard,
k,
v,
})
}
}
impl<'a, K: Debug + ?Sized, T: Debug + ?Sized> Debug for MappedRefMut<'a, K, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("MappedRefMut")
.field("k", &self.k)
.field("v", &self.v)
.finish()
}
}
impl<'a, K: ?Sized, T: ?Sized> Deref for MappedRefMut<'a, K, T> {
type Target = T;
fn deref(&self) -> &T {
self.value()
}
}
impl<'a, K: ?Sized, T: ?Sized> DerefMut for MappedRefMut<'a, K, T> {
fn deref_mut(&mut self) -> &mut T {
self.value_mut()
}
}
#[cfg(test)]
mod tests {
use crate::DashMap;
#[test]
fn downgrade() {
let data = DashMap::new();
data.insert("test", "test");
if let Some(mut w_ref) = data.get_mut("test") {
*w_ref.value_mut() = "test2";
let r_ref = w_ref.downgrade();
assert_eq!(*r_ref.value(), "test2");
};
}
#[test]
fn mapped_mut() {
let data = DashMap::new();
data.insert("test", *b"test");
if let Some(b_ref) = data.get_mut("test") {
let mut s_ref = b_ref.try_map(|b| std::str::from_utf8_mut(b).ok()).unwrap();
s_ref.value_mut().make_ascii_uppercase();
}
assert_eq!(data.get("test").unwrap().value(), b"TEST");
}
#[test]
fn mapped_mut_again() {
let data = DashMap::new();
data.insert("test", *b"hello world");
if let Some(b_ref) = data.get_mut("test") {
let s_ref = b_ref.try_map(|b| std::str::from_utf8_mut(b).ok()).unwrap();
let mut hello_ref = s_ref.try_map(|s| s.get_mut(..5)).unwrap();
hello_ref.value_mut().make_ascii_uppercase();
}
assert_eq!(data.get("test").unwrap().value(), b"HELLO world");
}
#[test]
fn mapped_ref() {
let data = DashMap::new();
data.insert("test", *b"test");
if let Some(b_ref) = data.get("test") {
let s_ref = b_ref.try_map(|b| std::str::from_utf8(b).ok()).unwrap();
assert_eq!(s_ref.value(), "test");
};
}
#[test]
fn mapped_ref_again() {
let data = DashMap::new();
data.insert("test", *b"hello world");
if let Some(b_ref) = data.get("test") {
let s_ref = b_ref.try_map(|b| std::str::from_utf8(b).ok()).unwrap();
let hello_ref = s_ref.try_map(|s| s.get(..5)).unwrap();
assert_eq!(hello_ref.value(), "hello");
};
}
}
-373
View File
@@ -1,373 +0,0 @@
use crate::{
DashMap, Equivalent,
lock::{RwLockReadGuardDetached, RwLockWriteGuardDetached},
mapref::one::Ref,
};
use core::{
hash::{BuildHasher, Hash},
mem,
};
use hashbrown::hash_table;
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<Q, S>(hash_builder: &S, val: &Q) -> u64
where
Q: Hash + ?Sized,
S: BuildHasher,
{
hash_builder.hash_one(val)
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hasher<Q, V, S>(hash_builder: &S) -> impl Fn(&(Q, V)) -> u64 + '_
where
Q: Hash,
S: BuildHasher,
{
move |val| make_hash::<Q, S>(hash_builder, &val.0)
}
impl<K, V, S> DashMap<K, V, S> {
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&self) -> RawEntryBuilderMut<'_, K, V, S> {
RawEntryBuilderMut { map: self }
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> { RawEntryBuilder { map: self } }
}
pub struct RefMut<'a, K: ?Sized, V: ?Sized> {
guard: RwLockWriteGuardDetached<'a>,
pub key: &'a mut K,
pub value: &'a mut V,
}
impl<'a, K: ?Sized, V: ?Sized> RefMut<'a, K, V> {
pub(crate) fn new(
guard: RwLockWriteGuardDetached<'a>,
key: &'a mut K,
value: &'a mut V,
) -> Self {
Self { guard, key, value }
}
pub fn downgrade(self) -> Ref<'a, K, V> {
Ref::new(unsafe { RwLockWriteGuardDetached::downgrade(self.guard) }, self.key, self.value)
}
}
impl<'a, K: core::fmt::Debug + ?Sized, V: core::fmt::Debug + ?Sized> core::fmt::Debug
for RefMut<'a, K, V>
{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("RefMut").field("key", &self.key).field("value", &self.value).finish()
}
}
/// A builder for creating a raw entry in a `DashMap`.
pub struct RawEntryBuilderMut<'a, K, V, S> {
map: &'a DashMap<K, V, S>,
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
pub fn from_key<Q>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Hash,
Q: Hash + Equivalent<K> + ?Sized,
{
let hash = self.map.hash_u64(k);
self.from_key_hashed_nocheck(hash, k)
}
/// Access an entry by a pre-computed hash and a key.
#[cfg_attr(feature = "inline-more", inline)]
pub fn from_key_hashed_nocheck<Q>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Hash,
Q: Equivalent<K> + ?Sized,
{
self.from_hash(hash, |q| k.equivalent(q))
}
/// Access an entry by a pre-computed hash and a matching function.
pub fn from_hash<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Hash,
F: FnMut(&K) -> bool,
{
let idx = self.map.determine_shard(hash as usize);
let shard_lock = self.map.shards[idx].write();
// SAFETY: The guard is stored in the returned RawEntryMut, ensuring the lock
// is held as long as the entry exists.
let (shard_guard, table) = unsafe { RwLockWriteGuardDetached::detach_from(shard_lock) };
match table.find_entry(
hash,
|(k, _)| is_match(k),
) {
Ok(entry) => RawEntryMut::Occupied(RawOccupiedEntryMut {
shard: shard_guard,
entry,
}),
Err(entry) => RawEntryMut::Vacant(RawVacantEntryMut {
shard: shard_guard,
table: entry.into_table(),
hash_builder: &self.map.hasher,
}),
}
}
}
/// A raw entry in the map.
pub enum RawEntryMut<'a, K, V, S> {
Occupied(RawOccupiedEntryMut<'a, K, V>),
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
/// Sets the value of the entry, and returns an OccupiedEntry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> RefMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> RefMut<'a, K, V>
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(mut self, f: F) -> Self
where F: FnOnce(&mut K, &mut V) {
if let RawEntryMut::Occupied(entry) = &mut self {
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
self
}
}
pub struct RawOccupiedEntryMut<'a, K, V> {
shard: RwLockWriteGuardDetached<'a>,
entry: hash_table::OccupiedEntry<'a, (K, V)>,
// hash_builder: &'a S,
}
impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K { &self.entry.get().0 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K { &mut self.entry.get_mut().0 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K { &mut self.entry.into_mut().0 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V { &self.entry.get().1 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V { &mut self.entry.into_mut().1 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V { &mut self.entry.get_mut().1 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&self) -> (&K, &V) {
let (k, v) = self.entry.get();
(k, v)
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
let (k, v) = self.entry.get_mut();
(k, v)
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> RefMut<'a, K, V> {
let (k, v) = self.entry.into_mut();
RefMut::new(self.shard, k, v)
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V { mem::replace(self.get_mut(), value) }
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K { mem::replace(self.key_mut(), key) }
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V { self.remove_entry().1 }
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) { self.entry.remove().0 }
// #[cfg_attr(feature = "inline-more", inline)]
// pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S>
// where F: FnOnce(&K, V) -> Option<V> {
// let proxy: hashbrown::hash_map::RawOccupiedEntryMut<'a, K, V, S> = unsafe {
// let (bucket, table): (
// core::ptr::NonNull<(K, V)>,
// &'a mut hash_table::HashTable<(K, V)>,
// ) = core::mem::transmute(self.entry);
// core::mem::transmute((bucket, table, self.hash_builder))
// };
// let result = proxy.replace_entry_with(f);
// match result {
// hashbrown::hash_map::RawEntryMut::Occupied(entry) => {
// let (bucket, table, hash_builder): (
// core::ptr::NonNull<(K, V)>,
// &'a mut hash_table::HashTable<(K, V)>,
// &'a S,
// ) = unsafe { core::mem::transmute(entry) };
// RawEntryMut::Occupied(RawOccupiedEntryMut {
// shard: self.shard,
// entry: unsafe { core::mem::transmute((bucket, table)) },
// hash_builder,
// })
// }
// hashbrown::hash_map::RawEntryMut::Vacant(entry) => {
// let (table, hash_builder) = unsafe { core::mem::transmute(entry) };
// RawEntryMut::Vacant(RawVacantEntryMut { shard: self.shard, table, hash_builder })
// }
// }
// }
}
pub struct RawVacantEntryMut<'a, K, V, S> {
shard: RwLockWriteGuardDetached<'a>,
table: &'a mut hash_table::HashTable<(K, V)>,
hash_builder: &'a S,
}
impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RefMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let hash = make_hash::<K, S>(self.hash_builder, &key);
self.insert_hashed_nocheck(hash, key, value)
}
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> RefMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let &mut (ref mut k, ref mut v) = self
.table
.insert_unique(hash, (key, value), make_hasher::<_, V, S>(self.hash_builder))
.into_mut();
RefMut::new(self.shard, k, v)
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
let &mut (ref mut k, ref mut v) =
self.table.insert_unique(hash, (key, value), |x| hasher(&x.0)).into_mut();
(k, v)
}
/// Helper to match hashbrown behavior for API compatibility.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let hash = make_hash::<K, S>(self.hash_builder, &key);
let entry =
self.table.insert_unique(hash, (key, value), make_hasher::<_, V, S>(self.hash_builder));
RawOccupiedEntryMut { shard: self.shard, entry }
}
}
pub struct RawEntryBuilder<'a, K, V, S> {
map: &'a DashMap<K, V, S>,
}
impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> {
#[cfg_attr(feature = "inline-more", inline)]
pub fn from_key<Q>(self, k: &Q) -> Option<Ref<'a, K, V>>
where
S: BuildHasher,
K: Hash,
Q: Hash + Equivalent<K> + ?Sized,
{
let hash = self.map.hash_u64(k);
self.from_key_hashed_nocheck(hash, k)
}
#[cfg_attr(feature = "inline-more", inline)]
pub fn from_key_hashed_nocheck<Q>(self, hash: u64, k: &Q) -> Option<Ref<'a, K, V>>
where Q: Equivalent<K> + ?Sized {
self.from_hash(hash, |q| k.equivalent(q))
}
pub fn from_hash<F>(self, hash: u64, mut is_match: F) -> Option<Ref<'a, K, V>>
where F: FnMut(&K) -> bool {
let idx = self.map.determine_shard(hash as usize);
let shard_lock = self.map.shards[idx].read();
// SAFETY: Detach guard to return Ref which holds the lock.
let (shard_guard, table) = unsafe { RwLockReadGuardDetached::detach_from(shard_lock) };
match table.find(hash, |(k, _)| is_match(k)) {
Some((k, v)) => Some(Ref::new(shard_guard, k, v)),
None => None,
}
}
}
-212
View File
@@ -1,212 +0,0 @@
use crate::lock::{RwLock, RwLockReadGuardDetached, RwLockWriteGuardDetached};
use crate::mapref::multiple::{RefMulti, RefMutMulti};
use crate::{DashMap, HashMap};
use core::hash::{BuildHasher, Hash};
use crossbeam_utils::CachePadded;
use rayon::iter::plumbing::UnindexedConsumer;
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
use std::sync::Arc;
impl<K, V, S> ParallelExtend<(K, V)> for DashMap<K, V, S>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
S: Send + Sync + Clone + BuildHasher,
{
fn par_extend<I>(&mut self, par_iter: I)
where
I: IntoParallelIterator<Item = (K, V)>,
{
(&*self).par_extend(par_iter);
}
}
// Since we don't actually need mutability, we can implement this on a
// reference, similar to `io::Write for &File`.
impl<K, V, S> ParallelExtend<(K, V)> for &'_ DashMap<K, V, S>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
S: Send + Sync + Clone + BuildHasher,
{
fn par_extend<I>(&mut self, par_iter: I)
where
I: IntoParallelIterator<Item = (K, V)>,
{
let &mut map = self;
par_iter.into_par_iter().for_each(move |(key, value)| {
map.insert(key, value);
});
}
}
impl<K, V, S> FromParallelIterator<(K, V)> for DashMap<K, V, S>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
S: Send + Sync + Clone + Default + BuildHasher,
{
fn from_par_iter<I>(par_iter: I) -> Self
where
I: IntoParallelIterator<Item = (K, V)>,
{
let map = Self::default();
(&map).par_extend(par_iter);
map
}
}
// Implementation note: while the shards will iterate in parallel, we flatten
// sequentially within each shard (`flat_map_iter`), because the standard
// `HashMap` only implements `ParallelIterator` by collecting to a `Vec` first.
// There is real parallel support in the `hashbrown/rayon` feature, but we don't
// always use that map.
impl<K, V, S> IntoParallelIterator for DashMap<K, V, S>
where
K: Send + Eq + Hash,
V: Send,
S: Send + Clone + BuildHasher,
{
type Iter = OwningIter<K, V>;
type Item = (K, V);
fn into_par_iter(self) -> Self::Iter {
OwningIter {
shards: self.shards,
}
}
}
pub struct OwningIter<K, V> {
pub(super) shards: Box<[CachePadded<RwLock<HashMap<K, V>>>]>,
}
impl<K, V> ParallelIterator for OwningIter<K, V>
where
K: Send + Eq + Hash,
V: Send,
{
type Item = (K, V);
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
Vec::from(self.shards)
.into_par_iter()
.flat_map_iter(|shard| shard.into_inner().into_inner().into_iter())
.drive_unindexed(consumer)
}
}
// This impl also enables `IntoParallelRefIterator::par_iter`
impl<'a, K, V, S> IntoParallelIterator for &'a DashMap<K, V, S>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
S: Send + Sync + Clone + BuildHasher,
{
type Iter = Iter<'a, K, V>;
type Item = RefMulti<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
Iter {
shards: &self.shards,
}
}
}
pub struct Iter<'a, K, V> {
pub(super) shards: &'a [CachePadded<RwLock<HashMap<K, V>>>],
}
impl<'a, K, V> ParallelIterator for Iter<'a, K, V>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
{
type Item = RefMulti<'a, K, V>;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
self.shards
.into_par_iter()
.flat_map_iter(|shard| {
// SAFETY: we keep the guard alive with the shard iterator,
// and with any refs produced by the iterator
let (guard, shard) = unsafe { RwLockReadGuardDetached::detach_from(shard.read()) };
let guard = Arc::new(guard);
shard.iter().map(move |(k, v)| {
let guard = Arc::clone(&guard);
RefMulti::new(guard, k, v)
})
})
.drive_unindexed(consumer)
}
}
// This impl also enables `IntoParallelRefMutIterator::par_iter_mut`
impl<'a, K, V> IntoParallelIterator for &'a mut DashMap<K, V>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
{
type Iter = IterMut<'a, K, V>;
type Item = RefMutMulti<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
IterMut {
shards: &self.shards,
}
}
}
impl<K, V, S> DashMap<K, V, S>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
{
// Unlike `IntoParallelRefMutIterator::par_iter_mut`, we only _need_ `&self`.
pub fn par_iter_mut(&self) -> IterMut<'_, K, V> {
IterMut {
shards: &self.shards,
}
}
}
pub struct IterMut<'a, K, V> {
shards: &'a [CachePadded<RwLock<HashMap<K, V>>>],
}
impl<'a, K, V> ParallelIterator for IterMut<'a, K, V>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
{
type Item = RefMutMulti<'a, K, V>;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
self.shards
.into_par_iter()
.flat_map_iter(|shard| {
// SAFETY: we keep the guard alive with the shard iterator,
// and with any refs produced by the iterator
let (guard, shard) =
unsafe { RwLockWriteGuardDetached::detach_from(shard.write()) };
let guard = Arc::new(guard);
shard.iter_mut().map(move |(k, v)| {
let guard = Arc::clone(&guard);
RefMutMulti::new(guard, k, v)
})
})
.drive_unindexed(consumer)
}
}
-96
View File
@@ -1,96 +0,0 @@
use crate::mapref::multiple::RefMulti;
use crate::rayon::map::Iter;
use crate::ReadOnlyView;
use core::hash::{BuildHasher, Hash};
use rayon::iter::IntoParallelIterator;
impl<K, V, S> IntoParallelIterator for ReadOnlyView<K, V, S>
where
K: Send + Eq + Hash,
V: Send,
S: Send + Clone + BuildHasher,
{
type Iter = super::map::OwningIter<K, V>;
type Item = (K, V);
fn into_par_iter(self) -> Self::Iter {
super::map::OwningIter {
shards: self.map.shards,
}
}
}
// This impl also enables `IntoParallelRefIterator::par_iter`
impl<'a, K, V, S> IntoParallelIterator for &'a ReadOnlyView<K, V, S>
where
K: Send + Sync + Eq + Hash,
V: Send + Sync,
S: Send + Sync + Clone + BuildHasher,
{
type Iter = Iter<'a, K, V>;
type Item = RefMulti<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
Iter {
shards: &self.map.shards,
}
}
}
#[cfg(test)]
mod tests {
use crate::DashMap;
use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator};
fn construct_sample_map() -> DashMap<i32, String> {
let map = DashMap::new();
map.insert(1, "one".to_string());
map.insert(10, "ten".to_string());
map.insert(27, "twenty seven".to_string());
map.insert(45, "forty five".to_string());
map
}
#[test]
fn test_par_iter() {
let map = construct_sample_map();
let view = map.clone().into_read_only();
view.par_iter().for_each(|entry| {
let key = *entry.key();
assert!(view.contains_key(&key));
let map_entry = map.get(&key).unwrap();
assert_eq!(view.get(&key).unwrap(), map_entry.value());
let key_value: (&i32, &String) = view.get_key_value(&key).unwrap();
assert_eq!(key_value.0, map_entry.key());
assert_eq!(key_value.1, map_entry.value());
});
}
#[test]
fn test_into_par_iter() {
let map = construct_sample_map();
let view = map.clone().into_read_only();
view.into_par_iter().for_each(|(key, value)| {
let map_entry = map.get(&key).unwrap();
assert_eq!(&key, map_entry.key());
assert_eq!(&value, map_entry.value());
});
}
}
-118
View File
@@ -1,118 +0,0 @@
use crate::setref::multiple::RefMulti;
use crate::DashSet;
use core::hash::{BuildHasher, Hash};
use rayon::iter::plumbing::UnindexedConsumer;
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
impl<K, S> ParallelExtend<K> for DashSet<K, S>
where
K: Send + Sync + Eq + Hash,
S: Send + Sync + Clone + BuildHasher,
{
fn par_extend<I>(&mut self, par_iter: I)
where
I: IntoParallelIterator<Item = K>,
{
(&*self).par_extend(par_iter);
}
}
// Since we don't actually need mutability, we can implement this on a
// reference, similar to `io::Write for &File`.
impl<K, S> ParallelExtend<K> for &'_ DashSet<K, S>
where
K: Send + Sync + Eq + Hash,
S: Send + Sync + Clone + BuildHasher,
{
fn par_extend<I>(&mut self, par_iter: I)
where
I: IntoParallelIterator<Item = K>,
{
let &mut set = self;
par_iter.into_par_iter().for_each(move |key| {
set.insert(key);
});
}
}
impl<K, S> FromParallelIterator<K> for DashSet<K, S>
where
K: Send + Sync + Eq + Hash,
S: Send + Sync + Clone + Default + BuildHasher,
{
fn from_par_iter<I>(par_iter: I) -> Self
where
I: IntoParallelIterator<Item = K>,
{
let set = Self::default();
(&set).par_extend(par_iter);
set
}
}
impl<K, S> IntoParallelIterator for DashSet<K, S>
where
K: Send + Eq + Hash,
S: Send + Clone + BuildHasher,
{
type Iter = OwningIter<K>;
type Item = K;
fn into_par_iter(self) -> Self::Iter {
OwningIter {
inner: self.inner.into_par_iter(),
}
}
}
pub struct OwningIter<K> {
inner: super::map::OwningIter<K, ()>,
}
impl<K> ParallelIterator for OwningIter<K>
where
K: Send + Eq + Hash,
{
type Item = K;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
self.inner.map(|(k, _)| k).drive_unindexed(consumer)
}
}
// This impl also enables `IntoParallelRefIterator::par_iter`
impl<'a, K, S> IntoParallelIterator for &'a DashSet<K, S>
where
K: Send + Sync + Eq + Hash,
S: Send + Sync + Clone + BuildHasher,
{
type Iter = Iter<'a, K>;
type Item = RefMulti<'a, K>;
fn into_par_iter(self) -> Self::Iter {
Iter {
inner: (&self.inner).into_par_iter(),
}
}
}
pub struct Iter<'a, K> {
inner: super::map::Iter<'a, K, ()>,
}
impl<'a, K> ParallelIterator for Iter<'a, K>
where
K: Send + Sync + Eq + Hash,
{
type Item = RefMulti<'a, K>;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
self.inner.map(RefMulti::new).drive_unindexed(consumer)
}
}
-255
View File
@@ -1,255 +0,0 @@
use crate::lock::RwLock;
use crate::{DashMap, HashMap};
use cfg_if::cfg_if;
use core::fmt;
use core::hash::{BuildHasher, Hash};
use crossbeam_utils::CachePadded;
use equivalent::Equivalent;
use std::collections::hash_map::RandomState;
/// A read-only view into a `DashMap`. Allows to obtain raw references to the stored values.
pub struct ReadOnlyView<K, V, S = RandomState> {
pub(crate) map: DashMap<K, V, S>,
}
impl<K: Eq + Hash + Clone, V: Clone, S: Clone> Clone for ReadOnlyView<K, V, S> {
fn clone(&self) -> Self {
Self {
map: self.map.clone(),
}
}
}
impl<K: Eq + Hash + fmt::Debug, V: fmt::Debug, S: BuildHasher + Clone> fmt::Debug
for ReadOnlyView<K, V, S>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.map.fmt(f)
}
}
impl<K, V, S> ReadOnlyView<K, V, S> {
pub(crate) fn new(map: DashMap<K, V, S>) -> Self {
Self { map }
}
/// Consumes this `ReadOnlyView`, returning the underlying `DashMap`.
pub fn into_inner(self) -> DashMap<K, V, S> {
self.map
}
}
impl<'a, K: 'a + Eq + Hash, V: 'a, S: BuildHasher + Clone> ReadOnlyView<K, V, S> {
/// Returns the number of elements in the map.
pub fn len(&self) -> usize {
self.map.len()
}
/// Returns `true` if the map contains no elements.
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
/// Returns the number of elements the map can hold without reallocating.
pub fn capacity(&self) -> usize {
self.map.capacity()
}
/// Returns `true` if the map contains a value for the specified key.
pub fn contains_key<Q>(&'a self, key: &Q) -> bool
where
Q: Hash + Equivalent<K> + ?Sized,
{
self.get(key).is_some()
}
/// Returns a reference to the value corresponding to the key.
pub fn get<Q>(&'a self, key: &Q) -> Option<&'a V>
where
Q: Hash + Equivalent<K> + ?Sized,
{
self.get_key_value(key).map(|(_k, v)| v)
}
/// Returns the key-value pair corresponding to the supplied key.
pub fn get_key_value<Q>(&'a self, key: &Q) -> Option<(&'a K, &'a V)>
where
Q: Hash + Equivalent<K> + ?Sized,
{
let hash = self.map.hash_u64(&key);
let idx = self.map.determine_shard(hash as usize);
let shard = &self.map.shards[idx];
let shard = unsafe { &*shard.data_ptr() };
shard
.find(hash, |(k, _v)| key.equivalent(k))
.map(|(k, v)| (k, v))
}
/// An iterator visiting all key-value pairs in arbitrary order. The iterator element type is `(&'a K, &'a V)`.
pub fn iter(&'a self) -> impl Iterator<Item = (&'a K, &'a V)> + 'a {
self.map
.shards
.iter()
.map(|shard| unsafe { &*shard.data_ptr() })
.flat_map(|shard| shard.iter())
.map(|(k, v)| (k, v))
}
/// An iterator visiting all keys in arbitrary order. The iterator element type is `&'a K`.
pub fn keys(&'a self) -> impl Iterator<Item = &'a K> + 'a {
self.iter().map(|(k, _v)| k)
}
/// An iterator visiting all values in arbitrary order. The iterator element type is `&'a V`.
pub fn values(&'a self) -> impl Iterator<Item = &'a V> + 'a {
self.iter().map(|(_k, v)| v)
}
cfg_if! {
if #[cfg(feature = "raw-api")] {
/// Allows you to peek at the inner shards that store your data.
/// You should probably not use this unless you know what you are doing.
///
/// Requires the `raw-api` feature to be enabled.
///
/// # Examples
///
/// ```
/// use dashmap::DashMap;
///
/// let map = DashMap::<(), ()>::new().into_read_only();
/// println!("Amount of shards: {}", map.shards().len());
/// ```
pub fn shards(&self) -> &[CachePadded<RwLock<HashMap<K, V>>>] {
&self.map.shards
}
} else {
#[allow(dead_code)]
pub(crate) fn shards(&self) -> &[CachePadded<RwLock<HashMap<K, V>>>] {
&self.map.shards
}
}
}
}
#[cfg(test)]
mod tests {
use crate::DashMap;
fn construct_sample_map() -> DashMap<i32, String> {
let map = DashMap::new();
map.insert(1, "one".to_string());
map.insert(10, "ten".to_string());
map.insert(27, "twenty seven".to_string());
map.insert(45, "forty five".to_string());
map
}
#[test]
fn test_properties() {
let map = construct_sample_map();
let view = map.clone().into_read_only();
assert_eq!(view.is_empty(), map.is_empty());
assert_eq!(view.len(), map.len());
assert_eq!(view.capacity(), map.capacity());
let new_map = view.into_inner();
assert_eq!(new_map.is_empty(), map.is_empty());
assert_eq!(new_map.len(), map.len());
assert_eq!(new_map.capacity(), map.capacity());
}
#[test]
fn test_get() {
let map = construct_sample_map();
let view = map.clone().into_read_only();
for key in map.iter().map(|entry| *entry.key()) {
assert!(view.contains_key(&key));
let map_entry = map.get(&key).unwrap();
assert_eq!(view.get(&key).unwrap(), map_entry.value());
let key_value: (&i32, &String) = view.get_key_value(&key).unwrap();
assert_eq!(key_value.0, map_entry.key());
assert_eq!(key_value.1, map_entry.value());
}
}
#[test]
fn test_iters() {
let map = construct_sample_map();
let view = map.clone().into_read_only();
let mut visited_items = Vec::new();
for (key, value) in view.iter() {
map.contains_key(key);
let map_entry = map.get(key).unwrap();
assert_eq!(key, map_entry.key());
assert_eq!(value, map_entry.value());
visited_items.push((key, value));
}
let mut visited_keys = Vec::new();
for key in view.keys() {
map.contains_key(key);
let map_entry = map.get(key).unwrap();
assert_eq!(key, map_entry.key());
assert_eq!(view.get(key).unwrap(), map_entry.value());
visited_keys.push(key);
}
let mut visited_values = Vec::new();
for value in view.values() {
visited_values.push(value);
}
for entry in map.iter() {
let key = entry.key();
let value = entry.value();
assert!(visited_keys.contains(&key));
assert!(visited_values.contains(&value));
assert!(visited_items.contains(&(key, value)));
}
}
}
-203
View File
@@ -1,203 +0,0 @@
use crate::{mapref, setref, DashMap, DashSet};
use core::fmt;
use core::hash::{BuildHasher, Hash};
use core::marker::PhantomData;
use serde::de::{Deserialize, MapAccess, SeqAccess, Visitor};
use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
use serde::Deserializer;
pub struct DashMapVisitor<K, V, S> {
marker: PhantomData<fn() -> DashMap<K, V, S>>,
}
impl<K, V, S> DashMapVisitor<K, V, S>
where
K: Eq + Hash,
S: BuildHasher + Clone,
{
fn new() -> Self {
DashMapVisitor {
marker: PhantomData,
}
}
}
impl<'de, K, V, S> Visitor<'de> for DashMapVisitor<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: BuildHasher + Clone + Default,
{
type Value = DashMap<K, V, S>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a DashMap")
}
fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
let map =
DashMap::with_capacity_and_hasher(access.size_hint().unwrap_or(0), Default::default());
while let Some((key, value)) = access.next_entry()? {
map.insert(key, value);
}
Ok(map)
}
}
impl<'de, K, V, S> Deserialize<'de> for DashMap<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: BuildHasher + Clone + Default,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(DashMapVisitor::<K, V, S>::new())
}
}
impl<K, V, H> Serialize for DashMap<K, V, H>
where
K: Serialize + Eq + Hash,
V: Serialize,
H: BuildHasher + Clone,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(self.len()))?;
for ref_multi in self.iter() {
map.serialize_entry(ref_multi.key(), ref_multi.value())?;
}
map.end()
}
}
pub struct DashSetVisitor<K, S> {
marker: PhantomData<fn() -> DashSet<K, S>>,
}
impl<K, S> DashSetVisitor<K, S>
where
K: Eq + Hash,
S: BuildHasher + Clone,
{
fn new() -> Self {
DashSetVisitor {
marker: PhantomData,
}
}
}
impl<'de, K, S> Visitor<'de> for DashSetVisitor<K, S>
where
K: Deserialize<'de> + Eq + Hash,
S: BuildHasher + Clone + Default,
{
type Value = DashSet<K, S>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a DashSet")
}
fn visit_seq<M>(self, mut access: M) -> Result<Self::Value, M::Error>
where
M: SeqAccess<'de>,
{
let map =
DashSet::with_capacity_and_hasher(access.size_hint().unwrap_or(0), Default::default());
while let Some(key) = access.next_element()? {
map.insert(key);
}
Ok(map)
}
}
impl<'de, K, S> Deserialize<'de> for DashSet<K, S>
where
K: Deserialize<'de> + Eq + Hash,
S: BuildHasher + Clone + Default,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(DashSetVisitor::<K, S>::new())
}
}
impl<K, H> Serialize for DashSet<K, H>
where
K: Serialize + Eq + Hash,
H: BuildHasher + Clone,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(self.len()))?;
for ref_multi in self.iter() {
seq.serialize_element(ref_multi.key())?;
}
seq.end()
}
}
macro_rules! serialize_impl {
() => {
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
Ser: serde::Serializer,
{
std::ops::Deref::deref(self).serialize(serializer)
}
};
}
// Map
impl<'a, K: Eq + Hash, V: Serialize> Serialize for mapref::multiple::RefMulti<'a, K, V> {
serialize_impl! {}
}
impl<'a, K: Eq + Hash, V: Serialize> Serialize for mapref::multiple::RefMutMulti<'a, K, V> {
serialize_impl! {}
}
impl<'a, K: Eq + Hash, V: Serialize> Serialize for mapref::one::Ref<'a, K, V> {
serialize_impl! {}
}
impl<'a, K: Eq + Hash, V: Serialize> Serialize for mapref::one::RefMut<'a, K, V> {
serialize_impl! {}
}
impl<'a, K: Eq + Hash, T: Serialize> Serialize for mapref::one::MappedRef<'a, K, T> {
serialize_impl! {}
}
impl<'a, K: Eq + Hash, T: Serialize> Serialize for mapref::one::MappedRefMut<'a, K, T> {
serialize_impl! {}
}
// Set
impl<'a, V: Hash + Eq + Serialize> Serialize for setref::multiple::RefMulti<'a, V> {
serialize_impl! {}
}
impl<'a, V: Hash + Eq + Serialize> Serialize for setref::one::Ref<'a, V> {
serialize_impl! {}
}
-500
View File
@@ -1,500 +0,0 @@
use crate::iter_set::{Iter, OwningIter};
#[cfg(feature = "raw-api")]
use crate::lock::RwLock;
use crate::setref::one::Ref;
use crate::DashMap;
#[cfg(feature = "raw-api")]
use crate::HashMap;
use cfg_if::cfg_if;
use core::fmt;
use core::hash::{BuildHasher, Hash};
use core::iter::FromIterator;
#[cfg(feature = "raw-api")]
use crossbeam_utils::CachePadded;
use equivalent::Equivalent;
use std::collections::hash_map::RandomState;
/// DashSet is a thin wrapper around [`DashMap`] using `()` as the value type. It uses
/// methods and types which are more convenient to work with on a set.
///
/// [`DashMap`]: struct.DashMap.html
pub struct DashSet<K, S = RandomState> {
pub(crate) inner: DashMap<K, (), S>,
}
impl<K: Eq + Hash + fmt::Debug, S: BuildHasher + Clone> fmt::Debug for DashSet<K, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.inner, f)
}
}
impl<K: Eq + Hash + Clone, S: Clone> Clone for DashSet<K, S> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
fn clone_from(&mut self, source: &Self) {
self.inner.clone_from(&source.inner)
}
}
impl<K, S> Default for DashSet<K, S>
where
K: Eq + Hash,
S: Default + BuildHasher + Clone,
{
fn default() -> Self {
Self::with_hasher(Default::default())
}
}
impl<'a, K: 'a + Eq + Hash> DashSet<K, RandomState> {
/// Creates a new DashSet with a capacity of 0.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let games = DashSet::new();
/// games.insert("Veloren");
/// ```
pub fn new() -> Self {
Self::with_hasher(RandomState::default())
}
/// Creates a new DashMap with a specified starting capacity.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let numbers = DashSet::with_capacity(2);
/// numbers.insert(2);
/// numbers.insert(8);
/// ```
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, RandomState::default())
}
}
impl<'a, K: 'a + Eq + Hash, S: BuildHasher + Clone> DashSet<K, S> {
/// Creates a new DashMap with a capacity of 0 and the provided hasher.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let games = DashSet::with_hasher(s);
/// games.insert("Veloren");
/// ```
pub fn with_hasher(hasher: S) -> Self {
Self::with_capacity_and_hasher(0, hasher)
}
/// Creates a new DashMap with a specified starting capacity and hasher.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let numbers = DashSet::with_capacity_and_hasher(2, s);
/// numbers.insert(2);
/// numbers.insert(8);
/// ```
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
Self {
inner: DashMap::with_capacity_and_hasher(capacity, hasher),
}
}
/// Hash a given item to produce a usize.
/// Uses the provided or default HashBuilder.
pub fn hash_usize<T: Hash + ?Sized>(&self, item: &T) -> usize {
self.inner.hash_usize(item)
}
cfg_if! {
if #[cfg(feature = "raw-api")] {
/// Allows you to peek at the inner shards that store your data.
/// You should probably not use this unless you know what you are doing.
///
/// Requires the `raw-api` feature to be enabled.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let set = DashSet::<()>::new();
/// println!("Amount of shards: {}", set.shards().len());
/// ```
pub fn shards(&self) -> &[CachePadded<RwLock<HashMap<K, ()>>>] {
self.inner.shards()
}
}
}
cfg_if! {
if #[cfg(feature = "raw-api")] {
/// Finds which shard a certain key is stored in.
/// You should probably not use this unless you know what you are doing.
/// Note that shard selection is dependent on the default or provided HashBuilder.
///
/// Requires the `raw-api` feature to be enabled.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let set = DashSet::new();
/// set.insert("coca-cola");
/// println!("coca-cola is stored in shard: {}", set.determine_map("coca-cola"));
/// ```
pub fn determine_map<Q>(&self, key: &Q) -> usize
where
Q: Hash + Equivalent<K> + ?Sized,
{
self.inner.determine_map(key)
}
}
}
cfg_if! {
if #[cfg(feature = "raw-api")] {
/// Finds which shard a certain hash is stored in.
///
/// Requires the `raw-api` feature to be enabled.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let set: DashSet<i32> = DashSet::new();
/// let key = "key";
/// let hash = set.hash_usize(&key);
/// println!("hash is stored in shard: {}", set.determine_shard(hash));
/// ```
pub fn determine_shard(&self, hash: usize) -> usize {
self.inner.determine_shard(hash)
}
}
}
/// Inserts a key into the set. Returns true if the key was not already in the set.
/// Does not update the key if it was already present.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let set = DashSet::new();
/// set.insert("I am the key!");
/// ```
pub fn insert(&self, key: K) -> bool {
self.inner.insert(key, ()).is_none()
}
/// Removes an entry from the map, returning the key if it existed in the map.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let soccer_team = DashSet::new();
/// soccer_team.insert("Jack");
/// assert_eq!(soccer_team.remove("Jack").unwrap(), "Jack");
/// ```
pub fn remove<Q>(&self, key: &Q) -> Option<K>
where
Q: Hash + Equivalent<K> + ?Sized,
{
self.inner.remove(key).map(|(k, _)| k)
}
/// Removes an entry from the set, returning the key
/// if the entry existed and the provided conditional function returned true.
///
/// ```
/// use dashmap::DashSet;
///
/// let soccer_team = DashSet::new();
/// soccer_team.insert("Sam");
/// soccer_team.remove_if("Sam", |player| player.starts_with("Ja"));
/// assert!(soccer_team.contains("Sam"));
/// ```
/// ```
/// use dashmap::DashSet;
///
/// let soccer_team = DashSet::new();
/// soccer_team.insert("Sam");
/// soccer_team.remove_if("Jacob", |player| player.starts_with("Ja"));
/// assert!(!soccer_team.contains("Jacob"));
/// ```
pub fn remove_if<Q>(&self, key: &Q, f: impl FnOnce(&K) -> bool) -> Option<K>
where
Q: Hash + Equivalent<K> + ?Sized,
{
// TODO: Don't create another closure around f
self.inner.remove_if(key, |k, _| f(k)).map(|(k, _)| k)
}
/// Creates an iterator over a DashMap yielding immutable references.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let words = DashSet::new();
/// words.insert("hello");
/// assert_eq!(words.iter().count(), 1);
/// ```
pub fn iter(&'a self) -> Iter<'a, K> {
let iter = self.inner.iter();
Iter::new(iter)
}
/// Get a reference to an entry in the set
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let youtubers = DashSet::new();
/// youtubers.insert("Bosnian Bill");
/// assert_eq!(*youtubers.get("Bosnian Bill").unwrap(), "Bosnian Bill");
/// ```
pub fn get<Q>(&'a self, key: &Q) -> Option<Ref<'a, K>>
where
Q: Hash + Equivalent<K> + ?Sized,
{
self.inner.get(key).map(Ref::new)
}
/// Remove excess capacity to reduce memory usage.
pub fn shrink_to_fit(&self) {
self.inner.shrink_to_fit()
}
/// Retain elements that whose predicates return true
/// and discard elements whose predicates return false.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let people = DashSet::new();
/// people.insert("Albin");
/// people.insert("Jones");
/// people.insert("Charlie");
/// people.retain(|name| name.contains('i'));
/// assert_eq!(people.len(), 2);
/// ```
pub fn retain(&self, mut f: impl FnMut(&K) -> bool) {
self.inner.retain(|k, _| f(k))
}
/// Fetches the total number of keys stored in the set.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let people = DashSet::new();
/// people.insert("Albin");
/// people.insert("Jones");
/// people.insert("Charlie");
/// assert_eq!(people.len(), 3);
/// ```
pub fn len(&self) -> usize {
self.inner.len()
}
/// Checks if the set is empty or not.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let map = DashSet::<()>::new();
/// assert!(map.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
/// Removes all keys in the set.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let people = DashSet::new();
/// people.insert("Albin");
/// assert!(!people.is_empty());
/// people.clear();
/// assert!(people.is_empty());
/// ```
pub fn clear(&self) {
self.inner.clear()
}
/// Returns how many keys the set can store without reallocating.
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
/// Checks if the set contains a specific key.
///
/// # Examples
///
/// ```
/// use dashmap::DashSet;
///
/// let people = DashSet::new();
/// people.insert("Dakota Cherries");
/// assert!(people.contains("Dakota Cherries"));
/// ```
pub fn contains<Q>(&self, key: &Q) -> bool
where
Q: Hash + Equivalent<K> + ?Sized,
{
self.inner.contains_key(key)
}
}
impl<K: Eq + Hash, S: BuildHasher + Clone> PartialEq for DashSet<K, S> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().all(|r| other.contains(r.key()))
}
}
impl<K: Eq + Hash, S: BuildHasher + Clone> Eq for DashSet<K, S> {}
impl<K: Eq + Hash, S: BuildHasher + Clone> IntoIterator for DashSet<K, S> {
type Item = K;
type IntoIter = OwningIter<K>;
fn into_iter(self) -> Self::IntoIter {
OwningIter::new(self.inner.into_iter())
}
}
impl<K: Eq + Hash, S: BuildHasher + Clone> Extend<K> for DashSet<K, S> {
fn extend<T: IntoIterator<Item = K>>(&mut self, iter: T) {
let iter = iter.into_iter().map(|k| (k, ()));
self.inner.extend(iter)
}
}
impl<K: Eq + Hash, S: BuildHasher + Clone + Default> FromIterator<K> for DashSet<K, S> {
fn from_iter<I: IntoIterator<Item = K>>(iter: I) -> Self {
let mut set = DashSet::default();
set.extend(iter);
set
}
}
#[cfg(feature = "typesize")]
impl<K, S> typesize::TypeSize for DashSet<K, S>
where
K: typesize::TypeSize + Eq + Hash,
S: typesize::TypeSize + Clone + BuildHasher,
{
fn extra_size(&self) -> usize {
self.inner.extra_size()
}
typesize::if_typesize_details! {
fn get_collection_item_count(&self) -> Option<usize> {
Some(self.len())
}
}
}
#[cfg(test)]
mod tests {
use crate::DashSet;
#[test]
fn test_basic() {
let set = DashSet::new();
set.insert(0);
assert_eq!(set.get(&0).as_deref(), Some(&0));
}
#[test]
fn test_default() {
let set: DashSet<u32> = DashSet::default();
set.insert(0);
assert_eq!(set.get(&0).as_deref(), Some(&0));
}
#[test]
fn test_equal() {
let set1 = DashSet::new();
let set2 = DashSet::new();
assert_eq!(set1, set2);
set1.insert("Hello, world!");
assert_ne!(set1, set2);
set1.insert("Goodbye, world!");
assert_ne!(set1, set2);
set2.insert("Hello, world!");
assert_ne!(set1, set2);
set2.insert("Goodbye, world!");
assert_eq!(set1, set2);
}
#[test]
fn test_multiple_hashes() {
let set = DashSet::<u32>::default();
for i in 0..100 {
assert!(set.insert(i));
}
for i in 0..100 {
assert!(!set.insert(i));
}
for i in 0..100 {
assert_eq!(Some(i), set.remove(&i));
}
for i in 0..100 {
assert_eq!(None, set.remove(&i));
}
}
}
-2
View File
@@ -1,2 +0,0 @@
pub mod multiple;
pub mod one;
-25
View File
@@ -1,25 +0,0 @@
use crate::mapref;
use core::hash::Hash;
use core::ops::Deref;
pub struct RefMulti<'a, K> {
inner: mapref::multiple::RefMulti<'a, K, ()>,
}
impl<'a, K: Eq + Hash> RefMulti<'a, K> {
pub(crate) fn new(inner: mapref::multiple::RefMulti<'a, K, ()>) -> Self {
Self { inner }
}
pub fn key(&self) -> &K {
self.inner.key()
}
}
impl<'a, K: Eq + Hash> Deref for RefMulti<'a, K> {
type Target = K;
fn deref(&self) -> &K {
self.key()
}
}

Some files were not shown because too many files have changed in this diff Show More