From 96104e99bd7fafbb6b3d5065959ffe080f92c71d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anthony=20Rompr=C3=A9?= <77812208+arompr@users.noreply.github.com> Date: Sun, 21 Dec 2025 14:27:56 -0500 Subject: [PATCH 1/5] Refactor + CAS + jmh tests (#1) * small refactor and added performance tests with jmh * revert tests to inital values --- .factorypath | 6 + .mvn/wrapper/maven-wrapper.properties | 3 + Readme.md | 2 +- mvnw | 295 ++++++++++++++++++ mvnw.cmd | 189 +++++++++++ pom.xml | 42 ++- .../snowflake/benchmark/BenchmarkRunner.java | 7 + .../benchmark/SnowflakeBenchmark.java | 40 +++ .../com/callicoder/snowflake/Snowflake.java | 133 -------- .../snowflake/SnowflakeIdGenerator.java | 171 ++++++++++ .../snowflake/SnowflakePerformanceTest.java | 17 +- .../arompr}/snowflake/SnowflakeTest.java | 33 +- 12 files changed, 777 insertions(+), 161 deletions(-) create mode 100644 .factorypath create mode 100644 .mvn/wrapper/maven-wrapper.properties create mode 100755 mvnw create mode 100644 mvnw.cmd create mode 100644 src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java create mode 100644 src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java delete mode 100644 src/main/java/com/callicoder/snowflake/Snowflake.java create mode 100644 src/main/java/io/github/arompr/snowflake/SnowflakeIdGenerator.java rename src/test/java/{com/callicoder => io/github/arompr}/snowflake/SnowflakePerformanceTest.java (79%) rename src/test/java/{com/callicoder => io/github/arompr}/snowflake/SnowflakeTest.java (67%) diff --git a/.factorypath b/.factorypath new file mode 100644 index 0000000..a2b0889 --- /dev/null +++ b/.factorypath @@ -0,0 +1,6 @@ + + + + + + diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000..c0bcafe --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,3 @@ +wrapperVersion=3.3.4 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.11/apache-maven-3.9.11-bin.zip diff --git a/Readme.md b/Readme.md index 0aae2c0..cbeed3d 100644 --- a/Readme.md +++ b/Readme.md @@ -4,7 +4,7 @@ You can read about Twitter snowflake [here](https://blog.twitter.com/engineering The IDs are 64-bits in size and are generated with the combination of the following: -+ **Epoch timestamp in milliseconds precision** - **41 bits**. The maximum timestamp that can be represented using 41 bits is `2^41 - 1`, or `2199023255551`, which comes out to be `Wednesday, September 7, 2039 3:47:35.551 PM`. That gives us 69 years with respect to a custom epoch.. ++ **Epoch timestamp in milliseconds precision** - **41 bits**. The maximum timestamp that can be represented using 41 bits is `2^41 - 1`, or `2199023255551`, That gives us 69 years with respect to a custom epoch.. + **Node ID** - **10 bits**. This gives us 1024 nodes/machines. + **Local counter per machine** - **12 bits**. The counter’s max value would be 4095. diff --git a/mvnw b/mvnw new file mode 100755 index 0000000..bd8896b --- /dev/null +++ b/mvnw @@ -0,0 +1,295 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Apache Maven Wrapper startup batch script, version 3.3.4 +# +# Optional ENV vars +# ----------------- +# JAVA_HOME - location of a JDK home dir, required when download maven via java source +# MVNW_REPOURL - repo url base for downloading maven distribution +# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output +# ---------------------------------------------------------------------------- + +set -euf +[ "${MVNW_VERBOSE-}" != debug ] || set -x + +# OS specific support. +native_path() { printf %s\\n "$1"; } +case "$(uname)" in +CYGWIN* | MINGW*) + [ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")" + native_path() { cygpath --path --windows "$1"; } + ;; +esac + +# set JAVACMD and JAVACCMD +set_java_home() { + # For Cygwin and MinGW, ensure paths are in Unix format before anything is touched + if [ -n "${JAVA_HOME-}" ]; then + if [ -x "$JAVA_HOME/jre/sh/java" ]; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACCMD="$JAVA_HOME/jre/sh/javac" + else + JAVACMD="$JAVA_HOME/bin/java" + JAVACCMD="$JAVA_HOME/bin/javac" + + if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then + echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2 + echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2 + return 1 + fi + fi + else + JAVACMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v java + )" || : + JAVACCMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v javac + )" || : + + if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then + echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2 + return 1 + fi + fi +} + +# hash string like Java String::hashCode +hash_string() { + str="${1:-}" h=0 + while [ -n "$str" ]; do + char="${str%"${str#?}"}" + h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296)) + str="${str#?}" + done + printf %x\\n $h +} + +verbose() { :; } +[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; } + +die() { + printf %s\\n "$1" >&2 + exit 1 +} + +trim() { + # MWRAPPER-139: + # Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds. + # Needed for removing poorly interpreted newline sequences when running in more + # exotic environments such as mingw bash on Windows. + printf "%s" "${1}" | tr -d '[:space:]' +} + +scriptDir="$(dirname "$0")" +scriptName="$(basename "$0")" + +# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties +while IFS="=" read -r key value; do + case "${key-}" in + distributionUrl) distributionUrl=$(trim "${value-}") ;; + distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; + esac +done <"$scriptDir/.mvn/wrapper/maven-wrapper.properties" +[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" + +case "${distributionUrl##*/}" in +maven-mvnd-*bin.*) + MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ + case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in + *AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;; + :Darwin*x86_64) distributionPlatform=darwin-amd64 ;; + :Darwin*arm64) distributionPlatform=darwin-aarch64 ;; + :Linux*x86_64*) distributionPlatform=linux-amd64 ;; + *) + echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2 + distributionPlatform=linux-amd64 + ;; + esac + distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" + ;; +maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; +*) MVN_CMD="mvn${scriptName#mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; +esac + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}" +distributionUrlName="${distributionUrl##*/}" +distributionUrlNameMain="${distributionUrlName%.*}" +distributionUrlNameMain="${distributionUrlNameMain%-bin}" +MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}" +MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")" + +exec_maven() { + unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || : + exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD" +} + +if [ -d "$MAVEN_HOME" ]; then + verbose "found existing MAVEN_HOME at $MAVEN_HOME" + exec_maven "$@" +fi + +case "${distributionUrl-}" in +*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;; +*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;; +esac + +# prepare tmp dir +if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then + clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; } + trap clean HUP INT TERM EXIT +else + die "cannot create temp dir" +fi + +mkdir -p -- "${MAVEN_HOME%/*}" + +# Download and Install Apache Maven +verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +verbose "Downloading from: $distributionUrl" +verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +# select .zip or .tar.gz +if ! command -v unzip >/dev/null; then + distributionUrl="${distributionUrl%.zip}.tar.gz" + distributionUrlName="${distributionUrl##*/}" +fi + +# verbose opt +__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR='' +[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v + +# normalize http auth +case "${MVNW_PASSWORD:+has-password}" in +'') MVNW_USERNAME='' MVNW_PASSWORD='' ;; +has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;; +esac + +if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then + verbose "Found wget ... using wget" + wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl" +elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then + verbose "Found curl ... using curl" + curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl" +elif set_java_home; then + verbose "Falling back to use Java to download" + javaSource="$TMP_DOWNLOAD_DIR/Downloader.java" + targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName" + cat >"$javaSource" <<-END + public class Downloader extends java.net.Authenticator + { + protected java.net.PasswordAuthentication getPasswordAuthentication() + { + return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() ); + } + public static void main( String[] args ) throws Exception + { + setDefault( new Downloader() ); + java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() ); + } + } + END + # For Cygwin/MinGW, switch paths to Windows format before running javac and java + verbose " - Compiling Downloader.java ..." + "$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java" + verbose " - Running Downloader.java ..." + "$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")" +fi + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +if [ -n "${distributionSha256Sum-}" ]; then + distributionSha256Result=false + if [ "$MVN_CMD" = mvnd.sh ]; then + echo "Checksum validation is not supported for maven-mvnd." >&2 + echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + elif command -v sha256sum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c - >/dev/null 2>&1; then + distributionSha256Result=true + fi + elif command -v shasum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2 + echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + fi + if [ $distributionSha256Result = false ]; then + echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2 + echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2 + exit 1 + fi +fi + +# unzip and move +if command -v unzip >/dev/null; then + unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip" +else + tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" +fi + +# Find the actual extracted directory name (handles snapshots where filename != directory name) +actualDistributionDir="" + +# First try the expected directory name (for regular distributions) +if [ -d "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" ]; then + if [ -f "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/bin/$MVN_CMD" ]; then + actualDistributionDir="$distributionUrlNameMain" + fi +fi + +# If not found, search for any directory with the Maven executable (for snapshots) +if [ -z "$actualDistributionDir" ]; then + # enable globbing to iterate over items + set +f + for dir in "$TMP_DOWNLOAD_DIR"/*; do + if [ -d "$dir" ]; then + if [ -f "$dir/bin/$MVN_CMD" ]; then + actualDistributionDir="$(basename "$dir")" + break + fi + fi + done + set -f +fi + +if [ -z "$actualDistributionDir" ]; then + verbose "Contents of $TMP_DOWNLOAD_DIR:" + verbose "$(ls -la "$TMP_DOWNLOAD_DIR")" + die "Could not find Maven distribution directory in extracted archive" +fi + +verbose "Found extracted Maven distribution directory: $actualDistributionDir" +printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$actualDistributionDir/mvnw.url" +mv -- "$TMP_DOWNLOAD_DIR/$actualDistributionDir" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" + +clean || : +exec_maven "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100644 index 0000000..5761d94 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,189 @@ +<# : batch portion +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Apache Maven Wrapper startup batch script, version 3.3.4 +@REM +@REM Optional ENV vars +@REM MVNW_REPOURL - repo url base for downloading maven distribution +@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output +@REM ---------------------------------------------------------------------------- + +@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0) +@SET __MVNW_CMD__= +@SET __MVNW_ERROR__= +@SET __MVNW_PSMODULEP_SAVE=%PSModulePath% +@SET PSModulePath= +@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @( + IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B) +) +@SET PSModulePath=%__MVNW_PSMODULEP_SAVE% +@SET __MVNW_PSMODULEP_SAVE= +@SET __MVNW_ARG0_NAME__= +@SET MVNW_USERNAME= +@SET MVNW_PASSWORD= +@IF NOT "%__MVNW_CMD__%"=="" ("%__MVNW_CMD__%" %*) +@echo Cannot start maven from wrapper >&2 && exit /b 1 +@GOTO :EOF +: end batch / begin powershell #> + +$ErrorActionPreference = "Stop" +if ($env:MVNW_VERBOSE -eq "true") { + $VerbosePreference = "Continue" +} + +# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties +$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl +if (!$distributionUrl) { + Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" +} + +switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) { + "maven-mvnd-*" { + $USE_MVND = $true + $distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip" + $MVN_CMD = "mvnd.cmd" + break + } + default { + $USE_MVND = $false + $MVN_CMD = $script -replace '^mvnw','mvn' + break + } +} + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +if ($env:MVNW_REPOURL) { + $MVNW_REPO_PATTERN = if ($USE_MVND -eq $False) { "/org/apache/maven/" } else { "/maven/mvnd/" } + $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace "^.*$MVNW_REPO_PATTERN",'')" +} +$distributionUrlName = $distributionUrl -replace '^.*/','' +$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' + +$MAVEN_M2_PATH = "$HOME/.m2" +if ($env:MAVEN_USER_HOME) { + $MAVEN_M2_PATH = "$env:MAVEN_USER_HOME" +} + +if (-not (Test-Path -Path $MAVEN_M2_PATH)) { + New-Item -Path $MAVEN_M2_PATH -ItemType Directory | Out-Null +} + +$MAVEN_WRAPPER_DISTS = $null +if ((Get-Item $MAVEN_M2_PATH).Target[0] -eq $null) { + $MAVEN_WRAPPER_DISTS = "$MAVEN_M2_PATH/wrapper/dists" +} else { + $MAVEN_WRAPPER_DISTS = (Get-Item $MAVEN_M2_PATH).Target[0] + "/wrapper/dists" +} + +$MAVEN_HOME_PARENT = "$MAVEN_WRAPPER_DISTS/$distributionUrlNameMain" +$MAVEN_HOME_NAME = ([System.Security.Cryptography.SHA256]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join '' +$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" + +if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { + Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME" + Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" + exit $? +} + +if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) { + Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl" +} + +# prepare tmp dir +$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile +$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir" +$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null +trap { + if ($TMP_DOWNLOAD_DIR.Exists) { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } + } +} + +New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null + +# Download and Install Apache Maven +Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +Write-Verbose "Downloading from: $distributionUrl" +Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +$webclient = New-Object System.Net.WebClient +if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) { + $webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD) +} +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum +if ($distributionSha256Sum) { + if ($USE_MVND) { + Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." + } + Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash + if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) { + Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property." + } +} + +# unzip and move +Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null + +# Find the actual extracted directory name (handles snapshots where filename != directory name) +$actualDistributionDir = "" + +# First try the expected directory name (for regular distributions) +$expectedPath = Join-Path "$TMP_DOWNLOAD_DIR" "$distributionUrlNameMain" +$expectedMvnPath = Join-Path "$expectedPath" "bin/$MVN_CMD" +if ((Test-Path -Path $expectedPath -PathType Container) -and (Test-Path -Path $expectedMvnPath -PathType Leaf)) { + $actualDistributionDir = $distributionUrlNameMain +} + +# If not found, search for any directory with the Maven executable (for snapshots) +if (!$actualDistributionDir) { + Get-ChildItem -Path "$TMP_DOWNLOAD_DIR" -Directory | ForEach-Object { + $testPath = Join-Path $_.FullName "bin/$MVN_CMD" + if (Test-Path -Path $testPath -PathType Leaf) { + $actualDistributionDir = $_.Name + } + } +} + +if (!$actualDistributionDir) { + Write-Error "Could not find Maven distribution directory in extracted archive" +} + +Write-Verbose "Found extracted Maven distribution directory: $actualDistributionDir" +Rename-Item -Path "$TMP_DOWNLOAD_DIR/$actualDistributionDir" -NewName $MAVEN_HOME_NAME | Out-Null +try { + Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null +} catch { + if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) { + Write-Error "fail to move MAVEN_HOME" + } +} finally { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } +} + +Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" diff --git a/pom.xml b/pom.xml index cd741e7..a381382 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 - com.callicoder + io.github.arompr snowflake 1.0.0 @@ -12,6 +12,7 @@ UTF-8 1.8 5.5.2 + 1.37 @@ -21,6 +22,17 @@ ${junit.jupiter.version} test + + org.openjdk.jmh + jmh-core + ${jmh.version} + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh.version} + provided + @@ -32,6 +44,13 @@ ${java.version} ${java.version} + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh.version} + + @@ -44,7 +63,26 @@ + + org.codehaus.mojo + build-helper-maven-plugin + 3.5.0 + + + add-jmh-source + generate-sources + + add-source + + + + src/jmh/java + + + + + - \ No newline at end of file + diff --git a/src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java b/src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java new file mode 100644 index 0000000..093b9ee --- /dev/null +++ b/src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java @@ -0,0 +1,7 @@ +package io.github.arompr.snowflake.benchmark; + +public class BenchmarkRunner { + public static void main(String[] args) throws Exception { + org.openjdk.jmh.Main.main(args); + } +} diff --git a/src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java b/src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java new file mode 100644 index 0000000..ba01208 --- /dev/null +++ b/src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java @@ -0,0 +1,40 @@ +package io.github.arompr.snowflake.benchmark; + +import io.github.arompr.snowflake.SnowflakeIdGenerator; +import org.openjdk.jmh.annotations.*; + +import java.util.concurrent.TimeUnit; + +@BenchmarkMode(Mode.Throughput) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 5, time = 1) +@Measurement(iterations = 5, time = 1) +@Fork(2) +@State(Scope.Benchmark) +public class SnowflakeBenchmark { + + private SnowflakeIdGenerator generator; + + @Setup + public void setup() { + generator = new SnowflakeIdGenerator(1); + } + + @Benchmark + @Threads(1) + public long singleThread() { + return generator.nextId(); + } + + @Benchmark + @Threads(8) + public long eightThreads() { + return generator.nextId(); + } + + @Benchmark + @Threads(32) + public long thirtyTwoThreads() { + return generator.nextId(); + } +} diff --git a/src/main/java/com/callicoder/snowflake/Snowflake.java b/src/main/java/com/callicoder/snowflake/Snowflake.java deleted file mode 100644 index 5d050c1..0000000 --- a/src/main/java/com/callicoder/snowflake/Snowflake.java +++ /dev/null @@ -1,133 +0,0 @@ -package com.callicoder.snowflake; - -import java.net.NetworkInterface; -import java.security.SecureRandom; -import java.time.Instant; -import java.util.Enumeration; - -/** - * Distributed Sequence Generator. - * Inspired by Twitter snowflake: https://github.com/twitter/snowflake/tree/snowflake-2010 - * - * This class should be used as a Singleton. - * Make sure that you create and reuse a Single instance of Snowflake per node in your distributed system cluster. - */ -public class Snowflake { - private static final int UNUSED_BITS = 1; // Sign bit, Unused (always set to 0) - private static final int EPOCH_BITS = 41; - private static final int NODE_ID_BITS = 10; - private static final int SEQUENCE_BITS = 12; - - private static final long maxNodeId = (1L << NODE_ID_BITS) - 1; - private static final long maxSequence = (1L << SEQUENCE_BITS) - 1; - - // Custom Epoch (January 1, 2015 Midnight UTC = 2015-01-01T00:00:00Z) - private static final long DEFAULT_CUSTOM_EPOCH = 1420070400000L; - - private final long nodeId; - private final long customEpoch; - - private volatile long lastTimestamp = -1L; - private volatile long sequence = 0L; - - // Create Snowflake with a nodeId and custom epoch - public Snowflake(long nodeId, long customEpoch) { - if(nodeId < 0 || nodeId > maxNodeId) { - throw new IllegalArgumentException(String.format("NodeId must be between %d and %d", 0, maxNodeId)); - } - this.nodeId = nodeId; - this.customEpoch = customEpoch; - } - - // Create Snowflake with a nodeId - public Snowflake(long nodeId) { - this(nodeId, DEFAULT_CUSTOM_EPOCH); - } - - // Let Snowflake generate a nodeId - public Snowflake() { - this.nodeId = createNodeId(); - this.customEpoch = DEFAULT_CUSTOM_EPOCH; - } - - public synchronized long nextId() { - long currentTimestamp = timestamp(); - - if(currentTimestamp < lastTimestamp) { - throw new IllegalStateException("Invalid System Clock!"); - } - - if (currentTimestamp == lastTimestamp) { - sequence = (sequence + 1) & maxSequence; - if(sequence == 0) { - // Sequence Exhausted, wait till next millisecond. - currentTimestamp = waitNextMillis(currentTimestamp); - } - } else { - // reset sequence to start with zero for the next millisecond - sequence = 0; - } - - lastTimestamp = currentTimestamp; - - long id = currentTimestamp << (NODE_ID_BITS + SEQUENCE_BITS) - | (nodeId << SEQUENCE_BITS) - | sequence; - - return id; - } - - - // Get current timestamp in milliseconds, adjust for the custom epoch. - private long timestamp() { - return Instant.now().toEpochMilli() - customEpoch; - } - - // Block and wait till next millisecond - private long waitNextMillis(long currentTimestamp) { - while (currentTimestamp == lastTimestamp) { - currentTimestamp = timestamp(); - } - return currentTimestamp; - } - - private long createNodeId() { - long nodeId; - try { - StringBuilder sb = new StringBuilder(); - Enumeration networkInterfaces = NetworkInterface.getNetworkInterfaces(); - while (networkInterfaces.hasMoreElements()) { - NetworkInterface networkInterface = networkInterfaces.nextElement(); - byte[] mac = networkInterface.getHardwareAddress(); - if (mac != null) { - for(byte macPort: mac) { - sb.append(String.format("%02X", macPort)); - } - } - } - nodeId = sb.toString().hashCode(); - } catch (Exception ex) { - nodeId = (new SecureRandom().nextInt()); - } - nodeId = nodeId & maxNodeId; - return nodeId; - } - - public long[] parse(long id) { - long maskNodeId = ((1L << NODE_ID_BITS) - 1) << SEQUENCE_BITS; - long maskSequence = (1L << SEQUENCE_BITS) - 1; - - long timestamp = (id >> (NODE_ID_BITS + SEQUENCE_BITS)) + customEpoch; - long nodeId = (id & maskNodeId) >> SEQUENCE_BITS; - long sequence = id & maskSequence; - - return new long[]{timestamp, nodeId, sequence}; - } - - @Override - public String toString() { - return "Snowflake Settings [EPOCH_BITS=" + EPOCH_BITS + ", NODE_ID_BITS=" + NODE_ID_BITS - + ", SEQUENCE_BITS=" + SEQUENCE_BITS + ", CUSTOM_EPOCH=" + customEpoch - + ", NodeId=" + nodeId + "]"; - } -} \ No newline at end of file diff --git a/src/main/java/io/github/arompr/snowflake/SnowflakeIdGenerator.java b/src/main/java/io/github/arompr/snowflake/SnowflakeIdGenerator.java new file mode 100644 index 0000000..3f25433 --- /dev/null +++ b/src/main/java/io/github/arompr/snowflake/SnowflakeIdGenerator.java @@ -0,0 +1,171 @@ +package io.github.arompr.snowflake; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Distributed Sequence Generator. + * Inspired by Twitter snowflake: + * https://github.com/twitter/snowflake/tree/snowflake-2010 + * + * This class should be used as a Singleton. + * Make sure that you create and reuse a Single instance of Snowflake per node + * in your distributed system cluster. + */ +public class SnowflakeIdGenerator { + + private static final int UNUSED_BITS = 1; // Sign bit, unused + private static final int EPOCH_BITS = 41; + private static final int NODE_ID_BITS = 10; + private static final int SEQUENCE_BITS = 12; + + private static final long maxNodeId = (1L << NODE_ID_BITS) - 1; + private static final long maxSequence = (1L << SEQUENCE_BITS) - 1; + + private static final long DEFAULT_CUSTOM_EPOCH = 1420070400000L; + + private final long nodeId; + private final long epoch; + + private final AtomicLong lastTimestamp = new AtomicLong(-1L); + private final AtomicLong sequence = new AtomicLong(0L); + + /** + * Creates a new Snowflake ID generator instance with a specified node ID and + * custom epoch. + * + *

+ * The node ID uniquely identifies this generator in a distributed system. It + * must be between 0 and {@code maxNodeId} (inclusive), otherwise an + * {@link IllegalArgumentException} is thrown. The custom epoch is used as the + * reference timestamp for generating IDs. + *

+ * + * @param nodeId the unique identifier for this node (0..maxNodeId) + * @param customEpoch the custom epoch (in milliseconds) to use as the reference + * timestamp + * @throws IllegalArgumentException if {@code nodeId} is out of range + */ + public SnowflakeIdGenerator(long nodeId, long customEpoch) { + if (nodeId < 0 || nodeId > maxNodeId) { + throw new IllegalArgumentException(String.format("NodeId must be between %d and %d", 0, maxNodeId)); + } + this.nodeId = nodeId; + this.epoch = customEpoch; + } + + /** + * Creates a new Snowflake ID generator instance with a specified node ID. + * + *

+ * The generator will use the default custom epoch {@code DEFAULT_CUSTOM_EPOCH} + * as the reference timestamp. The node ID uniquely identifies this generator in + * a distributed system and must be between 0 and {@code maxNodeId} (inclusive). + *

+ * + * @param nodeId the unique identifier for this node (0..maxNodeId) + * @throws IllegalArgumentException if {@code nodeId} is out of range + */ + public SnowflakeIdGenerator(long nodeId) { + this(nodeId, DEFAULT_CUSTOM_EPOCH); + } + + /** + * Generates a new unique 64-bit Snowflake ID. + * + *

+ * This method is thread-safe and can be called concurrently by multiple threads + * from the same instance. It handles the following cases: + *

    + *
  • Ensures IDs are monotonically increasing even if the system clock moves + * backward.
  • + *
  • Resets the sequence number at the start of a new millisecond.
  • + *
  • Waits for the next millisecond if the sequence overflows within the same + * millisecond.
  • + *
+ * + * @return a unique 64-bit ID composed of timestamp, node ID, and sequence + * number. + */ + public long nextId() { + for (;;) { + long currentTimestamp = timestamp(); + long previousTimestamp = lastTimestamp.get(); + + currentTimestamp = handleClockRollBack(currentTimestamp, previousTimestamp); + + if (currentTimestamp == previousTimestamp) { + long sequenceForCurrentMillisecond = incrementSequence(); + + // Sequence overflow, wait for next millisecond + if (sequenceForCurrentMillisecond == 0) { + currentTimestamp = waitUntilNextMillis(previousTimestamp); + } + + if (lastTimestamp.compareAndSet(previousTimestamp, currentTimestamp)) { + return generateId(currentTimestamp, sequenceForCurrentMillisecond); + } + } else { + // New millisecond, reset sequence + if (isNewMillisecond(currentTimestamp, previousTimestamp)) { + return generateId(currentTimestamp, 0L); + } + } + } + } + + private boolean isNewMillisecond(long currentTimestamp, long previousTimestamp) { + return sequence.compareAndSet(sequence.get(), 0L) && + lastTimestamp.compareAndSet(previousTimestamp, currentTimestamp); + } + + private long incrementSequence() { + return (sequence.incrementAndGet()) & maxSequence; + } + + private long handleClockRollBack(long currentTimestamp, long previousTimestamp) { + if (currentTimestamp < previousTimestamp) { + currentTimestamp = waitUntilNextMillis(previousTimestamp); + } + + return currentTimestamp; + } + + private long generateId(long timestamp, long seq) { + return (timestamp << (NODE_ID_BITS + SEQUENCE_BITS)) + | (nodeId << SEQUENCE_BITS) + | seq; + } + + private long timestamp() { + return Instant.now().toEpochMilli() - epoch; + } + + private long waitUntilNextMillis(long lastTs) { + long ts = timestamp(); + while (ts <= lastTs) { + ts = timestamp(); + } + return ts; + } + + public long[] parse(long id) { + long maskNodeId = ((1L << NODE_ID_BITS) - 1) << SEQUENCE_BITS; + long maskSequence = (1L << SEQUENCE_BITS) - 1; + + long timestamp = (id >> (NODE_ID_BITS + SEQUENCE_BITS)) + epoch; + long nodeId = (id & maskNodeId) >> SEQUENCE_BITS; + long sequence = id & maskSequence; + + return new long[] { timestamp, nodeId, sequence }; + } + + @Override + public String toString() { + return "Snowflake Settings [EPOCH_BITS=" + EPOCH_BITS + + ", NODE_ID_BITS=" + NODE_ID_BITS + + ", SEQUENCE_BITS=" + SEQUENCE_BITS + + ", CUSTOM_EPOCH=" + epoch + + ", NodeId=" + nodeId + "]"; + } +} diff --git a/src/test/java/com/callicoder/snowflake/SnowflakePerformanceTest.java b/src/test/java/io/github/arompr/snowflake/SnowflakePerformanceTest.java similarity index 79% rename from src/test/java/com/callicoder/snowflake/SnowflakePerformanceTest.java rename to src/test/java/io/github/arompr/snowflake/SnowflakePerformanceTest.java index 36f4642..d155c93 100644 --- a/src/test/java/com/callicoder/snowflake/SnowflakePerformanceTest.java +++ b/src/test/java/io/github/arompr/snowflake/SnowflakePerformanceTest.java @@ -1,19 +1,16 @@ -package com.callicoder.snowflake; +package io.github.arompr.snowflake; import org.junit.jupiter.api.Test; -import java.time.Instant; import java.util.concurrent.*; -import static org.junit.jupiter.api.Assertions.*; - public class SnowflakePerformanceTest { @Test public void nextId_withSingleThread() { int iterations = 1000000; // 1 million - Snowflake snowflake = new Snowflake(897); + SnowflakeIdGenerator snowflake = new SnowflakeIdGenerator(897); long beginTimestamp = System.currentTimeMillis(); for (int i = 0; i < iterations; i++) { snowflake.nextId(); @@ -21,7 +18,7 @@ public void nextId_withSingleThread() { long endTimestamp = System.currentTimeMillis(); long cost = (endTimestamp - beginTimestamp); - long costMs = iterations/cost; + long costMs = iterations / cost; System.out.println("Single Thread:: IDs per ms: " + costMs); } @@ -33,10 +30,10 @@ public void nextId_withMultipleThreads() throws InterruptedException { ExecutorService executorService = Executors.newFixedThreadPool(numThreads); CountDownLatch latch = new CountDownLatch(numThreads); - Snowflake snowflake = new Snowflake(897); + SnowflakeIdGenerator snowflake = new SnowflakeIdGenerator(897); long beginTimestamp = System.currentTimeMillis(); - for(int i = 0; i < iterations; i++) { + for (int i = 0; i < iterations; i++) { executorService.submit(() -> { snowflake.nextId(); latch.countDown(); @@ -46,7 +43,7 @@ public void nextId_withMultipleThreads() throws InterruptedException { latch.await(); long endTimestamp = System.currentTimeMillis(); long cost = (endTimestamp - beginTimestamp); - long costMs = iterations/cost; + long costMs = iterations / cost; System.out.println(numThreads + " Threads:: IDs per ms: " + costMs); } -} \ No newline at end of file +} diff --git a/src/test/java/com/callicoder/snowflake/SnowflakeTest.java b/src/test/java/io/github/arompr/snowflake/SnowflakeTest.java similarity index 67% rename from src/test/java/com/callicoder/snowflake/SnowflakeTest.java rename to src/test/java/io/github/arompr/snowflake/SnowflakeTest.java index 9478c8d..6721693 100644 --- a/src/test/java/com/callicoder/snowflake/SnowflakeTest.java +++ b/src/test/java/io/github/arompr/snowflake/SnowflakeTest.java @@ -1,4 +1,4 @@ -package com.callicoder.snowflake; +package io.github.arompr.snowflake; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -13,7 +13,7 @@ public class SnowflakeTest { @Test public void nextId_shouldGenerateIdWithCorrectBitsFilled() { - Snowflake snowflake = new Snowflake(784); + SnowflakeIdGenerator snowflake = new SnowflakeIdGenerator(784); long beforeTimestamp = Instant.now().toEpochMilli(); @@ -28,46 +28,49 @@ public void nextId_shouldGenerateIdWithCorrectBitsFilled() { @Test public void nextId_shouldGenerateUniqueId() { - Snowflake snowflake = new Snowflake(234); + SnowflakeIdGenerator snowflake = new SnowflakeIdGenerator(234); int iterations = 5000; // Validate that the IDs are not same even if they are generated in the same ms long[] ids = new long[iterations]; - for(int i = 0; i < iterations; i++) { + for (int i = 0; i < iterations; i++) { ids[i] = snowflake.nextId(); } - for(int i = 0; i < ids.length; i++) { - for(int j = i+1; j < ids.length; j++) { + for (int i = 0; i < ids.length; i++) { + for (int j = i + 1; j < ids.length; j++) { assertFalse(ids[i] == ids[j]); } } } @Test - public void nextId_shouldGenerateUniqueIdIfCalledFromMultipleThreads() throws InterruptedException, ExecutionException { + public void nextId_shouldGenerateUniqueIdIfCalledFromMultipleThreads() + throws InterruptedException, ExecutionException { int numThreads = 50; ExecutorService executorService = Executors.newFixedThreadPool(numThreads); CountDownLatch latch = new CountDownLatch(numThreads); - Snowflake snowflake = new Snowflake(234); + SnowflakeIdGenerator snowflake = new SnowflakeIdGenerator(234); int iterations = 10000; - // Validate that the IDs are not same even if they are generated in the same ms in different threads + // Validate that the IDs are not same even if they are generated in the same ms + // in different threads Future[] futures = new Future[iterations]; - for(int i = 0; i < iterations; i++) { - futures[i] = executorService.submit(() -> { + for (int i = 0; i < iterations; i++) { + futures[i] = executorService.submit(() -> { long id = snowflake.nextId(); - latch.countDown();; + latch.countDown(); + ; return id; }); } latch.await(); - for(int i = 0; i < futures.length; i++) { - for(int j = i+1; j < futures.length; j++) { + for (int i = 0; i < futures.length; i++) { + for (int j = i + 1; j < futures.length; j++) { assertFalse(futures[i].get() == futures[j].get()); } } } -} \ No newline at end of file +} From e872c506d2e7d4d6dfe1fdfd5173daf2d5d8e796 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anthony=20Rompr=C3=A9?= <77812208+arompr@users.noreply.github.com> Date: Sun, 21 Dec 2025 14:29:08 -0500 Subject: [PATCH 2/5] Update Readme.md --- Readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Readme.md b/Readme.md index cbeed3d..eec8e82 100644 --- a/Readme.md +++ b/Readme.md @@ -13,7 +13,7 @@ The IDs are 64-bits in size and are generated with the combination of the follow The `Snowflake` class should be used as a singleton in your application. ```java -Snowflake s = new Snowflake(275) +SnowflakeIdGenerator s = new SnowflakeIdGenerator(275) s.nextId() ``` From 8d755067951f1119ac7afaa5bc2d28caa0cb7df2 Mon Sep 17 00:00:00 2001 From: Anthony Rompre Date: Sun, 21 Dec 2025 19:56:44 -0500 Subject: [PATCH 3/5] added github package deployment to pom.xml --- pom.xml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pom.xml b/pom.xml index a381382..3f7db43 100644 --- a/pom.xml +++ b/pom.xml @@ -15,6 +15,14 @@ 1.37 + + + github + GitHub Packages + https://maven.pkg.github.com/arompr/java-snowflake + + + org.junit.jupiter From bdeb9656e22ba468ed491eb50f444c17f77b2ed5 Mon Sep 17 00:00:00 2001 From: Anthony Rompre Date: Sun, 21 Dec 2025 20:37:55 -0500 Subject: [PATCH 4/5] moved benchmarks tests to tests folder --- pom.xml | 19 ------------------- .../arompr/snowflake}/BenchmarkRunner.java | 2 +- .../arompr/snowflake}/SnowflakeBenchmark.java | 3 +-- 3 files changed, 2 insertions(+), 22 deletions(-) rename src/{jmh/java/io/github/arompr/snowflake/benchmark => test/java/io/github/arompr/snowflake}/BenchmarkRunner.java (75%) rename src/{jmh/java/io/github/arompr/snowflake/benchmark => test/java/io/github/arompr/snowflake}/SnowflakeBenchmark.java (88%) diff --git a/pom.xml b/pom.xml index 3f7db43..b333374 100644 --- a/pom.xml +++ b/pom.xml @@ -71,25 +71,6 @@ - - org.codehaus.mojo - build-helper-maven-plugin - 3.5.0 - - - add-jmh-source - generate-sources - - add-source - - - - src/jmh/java - - - - - diff --git a/src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java b/src/test/java/io/github/arompr/snowflake/BenchmarkRunner.java similarity index 75% rename from src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java rename to src/test/java/io/github/arompr/snowflake/BenchmarkRunner.java index 093b9ee..efc2126 100644 --- a/src/jmh/java/io/github/arompr/snowflake/benchmark/BenchmarkRunner.java +++ b/src/test/java/io/github/arompr/snowflake/BenchmarkRunner.java @@ -1,4 +1,4 @@ -package io.github.arompr.snowflake.benchmark; +package io.github.arompr.snowflake; public class BenchmarkRunner { public static void main(String[] args) throws Exception { diff --git a/src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java b/src/test/java/io/github/arompr/snowflake/SnowflakeBenchmark.java similarity index 88% rename from src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java rename to src/test/java/io/github/arompr/snowflake/SnowflakeBenchmark.java index ba01208..cd7a85a 100644 --- a/src/jmh/java/io/github/arompr/snowflake/benchmark/SnowflakeBenchmark.java +++ b/src/test/java/io/github/arompr/snowflake/SnowflakeBenchmark.java @@ -1,6 +1,5 @@ -package io.github.arompr.snowflake.benchmark; +package io.github.arompr.snowflake; -import io.github.arompr.snowflake.SnowflakeIdGenerator; import org.openjdk.jmh.annotations.*; import java.util.concurrent.TimeUnit; From a3911aedddd6454c8f9b6c5f51bc61b068f7c1a7 Mon Sep 17 00:00:00 2001 From: Anthony Rompre Date: Sun, 21 Dec 2025 20:41:10 -0500 Subject: [PATCH 5/5] updated version to 1.0.1 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b333374..b29d3c0 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ io.github.arompr snowflake - 1.0.0 + 1.0.1 UTF-8