mirror of
https://github.com/tcsenpai/Cascade.git
synced 2025-06-02 16:00:04 +00:00
Initial commit
This commit is contained in:
commit
1d4bf6058e
6
.eslintignore
Normal file
6
.eslintignore
Normal file
@ -0,0 +1,6 @@
|
||||
node_modules
|
||||
diagrams
|
||||
data
|
||||
dist
|
||||
.github
|
||||
.vscode
|
29
.eslintrc.js
Normal file
29
.eslintrc.js
Normal file
@ -0,0 +1,29 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
commonjs: true,
|
||||
es6: true,
|
||||
node: true,
|
||||
},
|
||||
globals: {
|
||||
NodeJS: "readonly",
|
||||
},
|
||||
extends: "eslint:recommended",
|
||||
parserOptions: {
|
||||
ecmaVersion: 2020,
|
||||
sourceType: "module",
|
||||
},
|
||||
parser: "@typescript-eslint/parser",
|
||||
plugins: ["@typescript-eslint"],
|
||||
rules: {
|
||||
// indent: ["error", 4, { SwitchCase: 1 }],
|
||||
"linebreak-style": ["error", "unix"],
|
||||
quotes: ["error", "double"],
|
||||
semi: ["error", "never"],
|
||||
// "no-console": "warn",
|
||||
"no-unused-vars": ["warn"],
|
||||
"switch-colon-spacing": ["error", { after: true, before: false }],
|
||||
"no-extra-semi": "error",
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"no-restricted-imports": ["warn"],
|
||||
},
|
||||
}
|
169
.gitignore
vendored
Normal file
169
.gitignore
vendored
Normal file
@ -0,0 +1,169 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
13
.prettierrc
Normal file
13
.prettierrc
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"$schema": "http://json.schemastore.org/prettierrc",
|
||||
"arrowParens": "avoid",
|
||||
"bracketSpacing": true,
|
||||
"endOfLine": "lf",
|
||||
"printWidth": 80,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 4,
|
||||
"semi": false,
|
||||
"trailingComma": "all",
|
||||
"useTabs": false,
|
||||
"SwitchCase": 1
|
||||
}
|
BIN
Cascade Hashing Algorithm.pdf
Normal file
BIN
Cascade Hashing Algorithm.pdf
Normal file
Binary file not shown.
86
README.md
Normal file
86
README.md
Normal file
@ -0,0 +1,86 @@
|
||||
# Cascade Hashing Algorithm
|
||||
|
||||
## Overview
|
||||
|
||||
Cascade is an innovative hashing algorithm that offers a unique blend of security and efficiency. It's designed for enhanced protection against common cryptographic attacks and features a flexible approach with iterative transformations, dynamic salting, and optional seed integration. Perfect for applications requiring robust hashing capabilities.
|
||||
|
||||
## Features
|
||||
|
||||
- **Dynamic Salting**: Enhances security against precomputed hash attacks.
|
||||
- **Optional Seed Integration**: Adds an extra layer of customization and security.
|
||||
- **Iterative Process**: Ensures a high degree of input sensitivity and diffusion.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Ensure you have Node.js installed on your system. Cascade is implemented in TypeScript, so make sure you have TypeScript set up in your development environment.
|
||||
|
||||
### Installation
|
||||
|
||||
Clone the repository to your local machine:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/thecookingsenpai/Cascade
|
||||
cd Cascade
|
||||
```
|
||||
|
||||
Install the necessary dependencies (there should be none anyway):
|
||||
|
||||
```typescript
|
||||
bun install
|
||||
```
|
||||
|
||||
Or
|
||||
|
||||
```typescript
|
||||
npm install
|
||||
```
|
||||
|
||||
Or
|
||||
|
||||
```typescript
|
||||
yarn install
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
You can test Cascade by doing:
|
||||
|
||||
```typescript
|
||||
bun src/index.ts
|
||||
```
|
||||
|
||||
Or
|
||||
|
||||
```typescript
|
||||
tsx src/index.ts
|
||||
```
|
||||
|
||||
Or
|
||||
|
||||
```typescript
|
||||
yarn src/index.ts
|
||||
```
|
||||
|
||||
To use Cascade in your project, import the `Cascade` class from the `cascade.ts` file and create an instance:
|
||||
|
||||
```typescript
|
||||
import Cascade from './cascade';
|
||||
|
||||
const hasher = new Cascade();
|
||||
const result = hasher.hash("YourInputString");
|
||||
console.log("Hash:", result.hash);
|
||||
console.log("Salt:", result.salt);
|
||||
```
|
||||
|
||||
You can also specify custom parameters like prime number, salt size, number of iterations, and seed:
|
||||
|
||||
```typescript
|
||||
const hasher = new Cascade(37, 20, 5, "optional_seed");
|
||||
const result = hasher.hash("YourInputString", "optional_salt");
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License.
|
14
package.json
Normal file
14
package.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "cascade",
|
||||
"module": "cascade.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"bun-types": "latest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"crypto": "^1.0.1"
|
||||
}
|
||||
}
|
191
src/cascade.html
Normal file
191
src/cascade.html
Normal file
@ -0,0 +1,191 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<title>Cascade Hashing Algorithm</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2 {
|
||||
color: #333;
|
||||
}
|
||||
|
||||
p {
|
||||
color: #666;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Cascade Hashing Algorithm</h1>
|
||||
|
||||
<!-- Abstract -->
|
||||
<h2>Abstract</h2>
|
||||
<p>This paper introduces "Cascade," a novel hashing algorithm designed to provide a unique blend of security,
|
||||
efficiency, and versatility. Distinguished by its iterative processing and integration of dynamic elements like
|
||||
salts and seeds, Cascade stands out in its approach to transforming input data into fixed-size, non-reversible
|
||||
hash values. This document outlines the algorithm's structure, its distinctive features, and potential
|
||||
applications.</p>
|
||||
|
||||
<h2>Introduction</h2>
|
||||
<h3>Background</h3>
|
||||
<p>In the realm of digital security, hashing algorithms play a pivotal role. These algorithms are fundamental to a
|
||||
variety of applications, ranging from secure password storage to ensuring the integrity of data transmission. By
|
||||
transforming data into a fixed-size string of characters, hashing algorithms create unique 'fingerprints' for
|
||||
data, which are crucial for verification and comparison purposes.</p>
|
||||
|
||||
<p>Hashing is distinct from encryption in several key ways. While encryption is a two-way process allowing for the
|
||||
original data to be recovered, hashing is a one-way process designed to prevent the recovery of the original
|
||||
data from its hash value. This one-way nature makes hashing algorithms particularly valuable for situations
|
||||
where data privacy and integrity are paramount.</p>
|
||||
|
||||
<p>Over the years, the evolution of hashing algorithms has been driven by the dual needs of robust security and
|
||||
computational efficiency. Early hashing functions, while pioneering, were soon outpaced by the increasing
|
||||
capabilities of computational hardware and advancements in cryptographic analysis. This led to the development
|
||||
of more sophisticated algorithms, such as the MD5 and SHA families. However, vulnerabilities discovered in some
|
||||
of these algorithms, such as MD5 and SHA-1, highlighted the need for continual innovation in hashing technology.
|
||||
</p>
|
||||
|
||||
<p>The primary goal of a hashing algorithm is to achieve a balance between several key attributes:
|
||||
- <strong>Collision Resistance</strong>: The ability to minimize the probability that two different inputs will
|
||||
produce the same hash output.
|
||||
- <strong>Speed and Efficiency</strong>: The capability to process data quickly and efficiently, which is
|
||||
especially important for applications that handle large volumes of data.
|
||||
- <strong>Avalanche Effect</strong>: Ensuring that a small change in the input results in a significant and
|
||||
unpredictable change in the output hash.</p>
|
||||
|
||||
<p>The introduction of "Cascade" represents the next step in the evolution of hashing algorithms. Designed with
|
||||
modern security challenges in mind, Cascade aims to address the limitations of previous hashing functions while
|
||||
offering enhanced security and efficiency. The development of Cascade is a response to the ever-growing need for
|
||||
reliable, robust, and secure ways of handling digital data in an increasingly interconnected world.</p>
|
||||
|
||||
<h3>Purpose</h3>
|
||||
<p>The purpose of the "Cascade" hashing algorithm is to provide an advanced solution that meets the modern demands
|
||||
of data security. In the digital age, where data breaches and cyber threats are increasingly prevalent, there is
|
||||
a pressing need for more secure, efficient, and reliable methods of protecting data. Cascade is developed with
|
||||
the intent to offer improved security features, particularly in terms of collision resistance and sensitivity to
|
||||
input changes, while maintaining high computational efficiency. It aims to serve as a versatile tool in various
|
||||
applications, from secure data storage to verifying the integrity of digital transactions.</p>
|
||||
|
||||
<h3>Scope</h3>
|
||||
<p>This paper will detail the design and functionality of the Cascade hashing algorithm. It will cover the
|
||||
algorithm's unique approach to processing input data, including its use of dynamic elements such as salts and
|
||||
seeds, and its iterative hashing process. The paper will also provide a comprehensive analysis of Cascade's
|
||||
security features, examining its resistance to common cryptographic attacks and its efficiency in various
|
||||
operational contexts. Finally, potential applications and advantages of Cascade over existing hashing methods
|
||||
will be explored, highlighting its suitability for diverse security needs in the digital landscape.</p>
|
||||
|
||||
<h2>Algorithm Overview</h2>
|
||||
<p>The core principle of Cascade involves a combination of iterative processing and dynamic elements integration, distinguishing it from traditional hashing functions. The algorithm is structured to ensure that even minor variations in the input lead to significantly different and unpredictable hash outputs, a property known as the avalanche effect.</p>
|
||||
|
||||
<h3>Fundamental Components</h3>
|
||||
<ul>
|
||||
<li><strong>Dynamic Salting</strong>: Enhances security against precomputed hash attacks.</li>
|
||||
<li><strong>Optional Seed Integration</strong>: Adds an extra layer of customization and security.</li>
|
||||
<li><strong>Iterative Process</strong>: Ensures a high degree of input sensitivity and diffusion.</li>
|
||||
</ul>
|
||||
|
||||
<h3>The Hashing Process</h3>
|
||||
<p>The hashing process in Cascade can be summarized in several key steps:</p>
|
||||
<ol>
|
||||
<li><strong>Input Preparation</strong>: The input data is prepared for hashing, involving normalization and initial processing steps. This preparation ensures consistent handling of different types of input data.</li>
|
||||
<li><strong>Salt Generation and Application</strong>: A salt is either generated or provided, and then applied to the prepared input. This step is critical for enhancing the uniqueness and security of the hash.</li>
|
||||
<li><strong>Seed-Based Transformation</strong> (Optional): If a seed value is provided, it is integrated during this phase. This integration is done through a complex mixing function that ensures the seed significantly influences the hashing process, adding an extra layer of customization and security.</li>
|
||||
<li><strong>Iterative Hashing and Transformation</strong>: The core of Cascade's functionality lies in its iterative processing. Each iteration applies a set of cryptographic operations to the input, thoroughly mixing and transforming the data.</li>
|
||||
<li><strong>Final Hash Computation</strong>: After the iterative process, the transformed data undergoes a final set of operations to produce the fixed-size hash output. This output retains the essential characteristics of the input data in a non-reversible form.</li>
|
||||
</ol>
|
||||
|
||||
<p>"Cascade" is designed to be versatile and adaptable, capable of handling various types of data while providing robust security features. The algorithm's iterative nature and dynamic component integration set it apart in terms of security, efficiency, and applicability in today's digital landscape.</p>
|
||||
<h2>Detailed Algorithm Design</h2>
|
||||
<h3>Input Processing</h3>
|
||||
<p>The first step in the Cascade hashing process involves preparing the input data to ensure consistency and optimal handling. This preparation includes two critical phases: standardization and normalization.</p>
|
||||
<ul>
|
||||
<li><strong>Standardization</strong>: Regardless of the input type (text, numerical data, binary content), it is first standardized into a uniform format. This standardization typically involves converting the input into a byte array. The goal is to create a consistent starting point for the hashing process, ensuring that inputs with the same content always yield identical byte representations.</li>
|
||||
<li><strong>Normalization</strong>: The next step is normalization, which is particularly important for textual data. Normalization includes converting characters to a standard form (like UTF-8 encoding) and handling case sensitivity. This step is crucial for maintaining the integrity of the hashing process, especially when dealing with inputs that might have multiple valid representations (like text with accented characters).</li>
|
||||
</ul>
|
||||
|
||||
<p>Once the input is standardized and normalized, it undergoes a pre-hash processing phase, which includes the application of salts and optional seeds.</p>
|
||||
<ul>
|
||||
<li><strong>Salting</strong>: A critical aspect of Cascade is its use of dynamic salts. If a salt is not provided externally, the algorithm generates a random salt of a specified length. This salt is then concatenated with the input data. The inclusion of a salt ensures that even identical inputs produce distinct hash values, significantly enhancing security against pre-computed hash attacks (like rainbow tables).</li>
|
||||
<li><strong>Seed Integration</strong> (Optional): If a seed value is provided, it is integrated during this phase. The seed acts as an additional modifier, altering the hash computation in a unique way based on the seed's value. This integration is done through a complex mixing function that ensures the seed significantly influences the hashing process, adding an extra layer of customization and security.</li>
|
||||
</ul>
|
||||
|
||||
<p>The final step in input processing is preparing the data for the iterative hashing stages of Cascade. This preparation involves:</p>
|
||||
<ul>
|
||||
<li><strong>Padding</strong>: To ensure that the input data aligns with the fixed-size requirements of the hashing process, padding is applied. The padding scheme in Cascade is designed to be deterministic yet less predictable, influenced by a basic hash of the input itself. This approach ensures that the padding varies with the input but remains consistent for the same input, preserving the reproducibility of the hash.</li>
|
||||
<li><strong>Initial Hash Value Setup</strong>: Before entering the iterative stages, an initial hash value is set up. This value is derived from the prepared input and acts as the starting point for the iterative transformations.</li>
|
||||
</ul>
|
||||
|
||||
<h3>Salt Generation and Usage</h3>
|
||||
<p>In the Cascade algorithm, salts are generated dynamically for each input unless a salt is provided externally. This dynamic generation ensures that each hash computation uses a unique salt, significantly increasing the difficulty of attacks that rely on precomputed hash databases, like rainbow table attacks.</p>
|
||||
<ul>
|
||||
<li><strong>Length and Randomness</strong>: The length of the salt is configurable, allowing for flexibility based on specific security requirements. Cascade generates salts using a cryptographically secure random number generator to ensure high entropy and unpredictability.</li>
|
||||
<li><strong>Concatenation with Input</strong>: Once generated, the salt is concatenated with the input data. This combination alters the initial state of the input, ensuring that even identical inputs will produce different hash outputs when different salts are used.</li>
|
||||
<li><strong>Consistent Use in Repetitive Hashing</strong>: For scenarios where reproducibility of the hash is required (like password verification), the same salt must be used across hashing instances. This necessitates storing the salt alongside the hash output, though it does not need to be kept secret like a cryptographic key.</li>
|
||||
</ul>
|
||||
|
||||
<h3>Seed Integration</h3>
|
||||
<p>The inclusion of a seed parameter in the Cascade hashing algorithm serves as an additional layer of customization and security. The seed acts as an extra input modifier in the hashing process. When provided, it is combined with the input (and salt, if used) in a way that significantly alters the course of the hash computation. This ensures that the same input, when hashed with different seeds, will produce distinct hash outputs.</p>
|
||||
<ul>
|
||||
<li><strong>Customization and Security</strong>: By allowing users to specify a seed, Cascade offers a way to customize the hashing process for different applications or datasets. This can be particularly useful in environments where an additional level of security is required, as it adds complexity that attackers would need to overcome.</li>
|
||||
<li><strong>Mixing with Input Data</strong>: In Cascade, the seed is integrated through a complex mixing function. This function combines the seed with the input (and salt) in a non-linear and intricate manner, ensuring that the seed significantly influences the resulting hash.</li>
|
||||
<li><strong>Non-Linearity and Complexity</strong>: The mixing process involves non-linear operations, such as bitwise rotations and variable shifts, which are influenced by the content of the seed. This approach ensures that the seed's impact on the hash is substantial and cryptographically sound, making the hash more resistant to various forms of cryptanalysis.</li>
|
||||
<li><strong>Ensuring Reproducibility</strong>: When reproducing a hash (for instance, during password verification), the same seed must be used to ensure that the hash output matches. This means that, similar to the salt, the seed (if used) should be stored or known for future hash computations.</li>
|
||||
</ul>
|
||||
<h3>Iterative Steps, Transformations, and Calculations</h3>
|
||||
<p>Each iteration of the Cascade algorithm plays a vital role in the transformation of the input data, contributing to the final hash output. The key components of each iteration include:</p>
|
||||
|
||||
<ol>
|
||||
<li><strong>Iteration Initialization</strong>: At the start of each iteration, the current state of the data is taken as the input. This state evolves with each iteration, reflecting the cumulative effect of the transformations applied.</li>
|
||||
<li><strong>Transformation Steps</strong>: Within each iteration, the data undergoes a series of transformations:
|
||||
<ul>
|
||||
<li>Bitwise Operations: Essential cryptographic operations like AND, OR, XOR, and NOT are applied to each byte, contributing to data dispersion and security.</li>
|
||||
<li>Shifting and Rotating: Bit shifting and rotating are used to redistribute the bits of each byte, crucial for achieving the avalanche effect where minor input changes lead to significant output differences.</li>
|
||||
<li>Modular Arithmetic: The use of modular arithmetic, often involving large prime numbers, helps maintain the data within a specific size range while adding mathematical complexity.</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><strong>Combination with Seed</strong>: If a seed value is used, it is intricately combined with the data at each iteration. This process ensures that the seed significantly influences the hash output.</li>
|
||||
<li><strong>Accumulation of Changes</strong>: The changes made in each iteration accumulate, ensuring that the final hash value is a complex combination of all the transformations applied throughout the process.</li>
|
||||
</ol>
|
||||
|
||||
<h3>Final Hash Computation</h3>
|
||||
<p>The final hash computation involves consolidating the outcomes of all iterations into a single hash value. This process includes:</p>
|
||||
|
||||
<ol>
|
||||
<li><strong>Aggregation of Iterative Results</strong>: The transformed data from each iteration is combined to form a comprehensive result, capturing the impact of the entire iterative process.</li>
|
||||
<li><strong>Final Transformation</strong>: Additional cryptographic operations are applied to this aggregated result to ensure a high level of security and to prepare it for the final output.</li>
|
||||
<li><strong>Output Formatting</strong>: The final hash is then formatted to meet the predetermined size requirements, typically represented as a hexadecimal string for consistency and ease of use.</li>
|
||||
</ol>
|
||||
|
||||
<p>This comprehensive approach to hashing, characterized by iterative processing and complex transformations, positions Cascade as a robust and secure hashing solution, suitable for a wide range of applications in the digital domain.</p>
|
||||
|
||||
<h2>Security Analysis</h2>
|
||||
|
||||
<h3>Hash Strength: Resistance to Common Attacks</h3>
|
||||
<p>The "Cascade" hashing algorithm is designed to provide robust resistance against common cryptographic attacks, particularly pre-image and collision attacks. Its complex iterative process and the integration of dynamic elements like salt and seeds make reverse-engineering or predicting the input from the hash output computationally infeasible, thereby bolstering its defense against pre-image attacks. Moreover, the algorithm's sensitivity to input changes and seed variability significantly reduces the likelihood of second pre-image attacks, where a different input produces the same hash output as a given input.</p>
|
||||
|
||||
<h3>Collision Resistance: Mitigating the Risk of Hash Collisions</h3>
|
||||
<p>Cascade's design incorporates several features that contribute to its strong collision resistance. The dynamic salting mechanism ensures that even identical inputs produce distinct hash values, reducing the probability of different inputs yielding the same hash output. The iterative nature of the algorithm and the use of modular arithmetic in the final hash computation contribute to an even distribution of hash values, minimizing collision occurrences.</p>
|
||||
|
||||
<h3>Avalanche Effect: Sensitivity to Input Changes</h3>
|
||||
<p>One of the critical properties of a robust hashing algorithm is the avalanche effect, where minor changes in the input result in substantial and unpredictable changes in the output. Cascade exhibits a strong avalanche effect due to its complex bitwise operations, bit shifting, and non-linear transformations in each iteration. The impact of the seed and dynamic salt further amplifies this effect, ensuring that any alteration in the input leads to a drastically different hash output, thus enhancing the algorithm's overall security.</p>
|
||||
|
||||
<h2>Conclusion</h2>
|
||||
|
||||
<h3>Summary of Cascade's Key Features</h3>
|
||||
<p>"Cascade" represents a novel approach in the field of hashing algorithms. Its design is characterized by its unique combination of iterative transformations, dynamic salting, and optional seed integration. The algorithm stands out for its design choices aimed at enhancing security and efficiency. Key features of Cascade include robust security measures against common cryptographic attacks, a versatile and flexible approach to handling various types of data, and the potential to significantly impact the evolution of hashing algorithms, especially in areas requiring enhanced security and customized hashing.</p>
|
||||
|
||||
<h3>Future Work and Research Directions</h3>
|
||||
<p>The development of Cascade opens several avenues for future work and research. Key areas include empirical testing and validation, performance optimization, and adaptation and integration into existing systems and emerging technologies. The ongoing scrutiny and testing in practical applications will be essential to validate and potentially refine Cascade's security properties, ensuring its reliability and effectiveness as a hashing solution in the digital era.</p>
|
||||
|
||||
|
||||
</body>
|
||||
|
||||
</html>
|
148
src/cascade.ts
Normal file
148
src/cascade.ts
Normal file
@ -0,0 +1,148 @@
|
||||
/* HUGE DISCLAIMER: NOT TESTED | MADE FOR FUN | IT IS AN EXPERIMENT | still...it works surprisingly well! */
|
||||
|
||||
import { randomBytes } from "crypto"
|
||||
|
||||
export default class Cascade {
|
||||
// Class properties with type annotations
|
||||
P: number
|
||||
hashSize: number = 32
|
||||
saltSize: number
|
||||
modulo: bigint = BigInt(
|
||||
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
)
|
||||
iterations: number
|
||||
seed: string | null
|
||||
|
||||
constructor(
|
||||
prime: number = 31,
|
||||
saltSize: number = 16,
|
||||
iterations: number = 1,
|
||||
seed: string | null = null,
|
||||
) {
|
||||
this.P = prime
|
||||
this.saltSize = saltSize
|
||||
this.iterations = iterations
|
||||
this.seed = seed
|
||||
}
|
||||
|
||||
generateSalt(): Buffer {
|
||||
return randomBytes(this.saltSize)
|
||||
}
|
||||
generatePadding(input: string): string {
|
||||
let padding = ""
|
||||
let inputHash = this.simpleHash(input)
|
||||
|
||||
for (let i = 0; i < this.hashSize - this.saltSize; i++) {
|
||||
let paddingChar = (inputHash + i) % 256
|
||||
padding += String.fromCharCode(paddingChar)
|
||||
}
|
||||
|
||||
return padding
|
||||
}
|
||||
|
||||
private simpleHash(input: string): number {
|
||||
let hash = 0
|
||||
for (const char of input) {
|
||||
hash = (hash + char.charCodeAt(0)) % 256
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
interleaveWithSalt(inputBuffer: Buffer, saltBuffer: Buffer): Buffer {
|
||||
let interleavedBuffer = Buffer.alloc(
|
||||
inputBuffer.length + saltBuffer.length,
|
||||
)
|
||||
let inputIndex = 0,
|
||||
saltIndex = 0
|
||||
|
||||
for (let i = 0; i < interleavedBuffer.length; i++) {
|
||||
if (i % 2 === 0 && inputIndex < inputBuffer.length) {
|
||||
interleavedBuffer[i] = inputBuffer[inputIndex++]
|
||||
} else if (saltIndex < saltBuffer.length) {
|
||||
interleavedBuffer[i] = saltBuffer[saltIndex++]
|
||||
}
|
||||
}
|
||||
return interleavedBuffer
|
||||
}
|
||||
mixSeed(combinedBuffer: Buffer): Buffer {
|
||||
if (this.seed === null) {
|
||||
return combinedBuffer
|
||||
}
|
||||
|
||||
const seedBuffer = Buffer.from(this.seed)
|
||||
let extendedSeedBuffer = seedBuffer
|
||||
while (extendedSeedBuffer.length < combinedBuffer.length) {
|
||||
extendedSeedBuffer = Buffer.concat([extendedSeedBuffer, seedBuffer])
|
||||
}
|
||||
|
||||
for (let i = 0; i < combinedBuffer.length; i++) {
|
||||
// Enhanced mixing: Combine XOR with bitwise rotations
|
||||
let mix = combinedBuffer[i] ^ extendedSeedBuffer[i]
|
||||
mix = (mix << i % 8) | (mix >>> (8 - (i % 8))) // Bitwise rotation
|
||||
combinedBuffer[i] = mix
|
||||
}
|
||||
|
||||
return combinedBuffer
|
||||
}
|
||||
|
||||
hash(input: string, providedSalt?: string): { hash: string; salt: string } {
|
||||
// Validate and sanitize input
|
||||
if (typeof input !== "string") {
|
||||
throw new Error("Invalid input type")
|
||||
}
|
||||
|
||||
let inputBuffer = Buffer.from(input)
|
||||
let salt: Buffer
|
||||
|
||||
// Validate and use provided salt, if available
|
||||
if (providedSalt !== undefined) {
|
||||
if (typeof providedSalt !== "string") {
|
||||
throw new Error(
|
||||
"Invalid salt format: Salt must be a UTF-8 string",
|
||||
)
|
||||
}
|
||||
salt = Buffer.from(providedSalt, "utf8")
|
||||
} else {
|
||||
salt = this.generateSalt()
|
||||
}
|
||||
|
||||
inputBuffer = this.interleaveWithSalt(inputBuffer, salt)
|
||||
|
||||
if (this.seed !== null) {
|
||||
inputBuffer = this.mixSeed(inputBuffer)
|
||||
}
|
||||
|
||||
let hashValue = BigInt(0)
|
||||
for (let iter = 0; iter < this.iterations; iter++) {
|
||||
inputBuffer.forEach((byte, index) => {
|
||||
hashValue =
|
||||
((hashValue << BigInt(5)) +
|
||||
hashValue +
|
||||
BigInt(byte) +
|
||||
BigInt(index)) %
|
||||
this.modulo
|
||||
})
|
||||
|
||||
if (iter < this.iterations - 1) {
|
||||
inputBuffer = Buffer.from(hashValue.toString(16), "hex")
|
||||
}
|
||||
}
|
||||
|
||||
let finalHash = hashValue ^ BigInt("0x123456789ABCDEF123456789ABCDEF12")
|
||||
let hashHexString = finalHash
|
||||
.toString(16)
|
||||
.padStart(this.hashSize * 2, "0")
|
||||
.substring(0, this.hashSize * 2)
|
||||
|
||||
return { hash: hashHexString, salt: salt.toString("hex") }
|
||||
}
|
||||
}
|
||||
|
||||
/* NOTE Usage --------------------------------
|
||||
|
||||
const hasher = new Cascade(37, 20, 5, "optional_seed")
|
||||
const result = hasher.hash("Hello, world!", "opional_salt")
|
||||
console.log("Salt:", result.salt)
|
||||
console.log("Hash:", result.hash)
|
||||
|
||||
*/
|
10
src/index.ts
Normal file
10
src/index.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import Cascade from "./cascade"
|
||||
|
||||
async function go() {
|
||||
const hasher = new Cascade(37, 20, 5, "optional_seed")
|
||||
const result = hasher.hash("Hello, world!", "opional_salt")
|
||||
console.log("Salt:", result.salt)
|
||||
console.log("Hash:", result.hash)
|
||||
}
|
||||
|
||||
go()
|
110
src/style.css
Normal file
110
src/style.css
Normal file
@ -0,0 +1,110 @@
|
||||
body {
|
||||
font-family: 'Arial', sans-serif;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background: #f4f4f4;
|
||||
color: #333;
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 80%;
|
||||
margin: auto;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
header {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
padding-top: 30px;
|
||||
min-height: 70px;
|
||||
border-bottom: #bbb 1px solid;
|
||||
}
|
||||
|
||||
header a {
|
||||
color: #fff;
|
||||
text-decoration: none;
|
||||
text-transform: uppercase;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
header ul {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
list-style: none;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
header li {
|
||||
float: left;
|
||||
display: inline;
|
||||
padding: 0 20px 0 20px;
|
||||
}
|
||||
|
||||
header #branding {
|
||||
float: left;
|
||||
}
|
||||
|
||||
header #branding h1 {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
header nav {
|
||||
float: right;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
header .highlight, header .current a {
|
||||
color: #e8491d;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
header a:hover {
|
||||
color: #ffffff;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
article#main-col {
|
||||
float: left;
|
||||
width: 70%;
|
||||
}
|
||||
|
||||
aside#sidebar {
|
||||
float: right;
|
||||
width: 30%;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
article#main-col h1 {
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Headings */
|
||||
h1, h2, h3 {
|
||||
margin-bottom: 20px;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Links */
|
||||
a {
|
||||
color: #333;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
footer {
|
||||
padding: 20px;
|
||||
margin-top: 20px;
|
||||
color: #ffffff;
|
||||
background-color: #333;
|
||||
text-align: center;
|
||||
}
|
24
tsconfig.json
Normal file
24
tsconfig.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"declaration": true,
|
||||
"removeComments": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"target": "ESNext",
|
||||
"sourceMap": true,
|
||||
"sourceRoot": "/",
|
||||
"inlineSources": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": "./",
|
||||
"types": ["node"],
|
||||
"incremental": true,
|
||||
"skipLibCheck": true,
|
||||
"strictNullChecks": false,
|
||||
"noImplicitAny": false,
|
||||
"strictBindCallApply": false,
|
||||
"forceConsistentCasingInFileNames": false,
|
||||
"noFallthroughCasesInSwitch": false
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user