Custom HashMap Implementation in java
Clash Royale CLAN TAG#URR8PPP
.everyoneloves__top-leaderboard:empty,.everyoneloves__mid-leaderboard:empty margin-bottom:0;
up vote
1
down vote
favorite
I implemented a CustomHashMap
in Java, and would like to know if my approach is good or not.
Below I am sharing the complete code of my CustomHashMap
:
CustomHashMap.java
import java.util.Iterator;
import java.util.Objects;
import com.custom.utils.*;
public class CustomHashMap<K, T> implements Iterable<Entity>
private static final int capacity_init = 10;
static final float DEFAULT_LOAD_FACTOR = 0.75f;
private int indexCount = 0;
Entity bucket = null;
Entity bucketItr = null;
public CustomHashMap()
init(capacity_init);
public CustomHashMap(int capacity)
init(capacity);
private void init(int intcapacity)
bucket = new Entity[intcapacity];
private int hash(Object key)
return key.hashCode() >> 1;
public void add(K key, T value)
int hashcode = hash(key);
if(indexCount == bucket.length)
Entity obj = bucket;
int k = (int) (obj.length*2);
bucket = new Entity[k];
System.arraycopy(obj, 0, bucket, 0, indexCount-1);
if (bucket[hashcode] == null)
indexCount++;
Entity head = new Entity(key, value, null);
bucket[hashcode] = head;
else
Entity temp = bucket[hashcode];
while (temp.next != null)
temp = temp.next;
Entity neEntity = new Entity(key, value, null);
neEntity.next = temp;
bucket[hashcode] = neEntity;
bucketItr = new Entity[bucket.length];
System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);
public T get(K key)
int hashcode = hash(key);
if (bucket[hashcode] == null)
return null;
else
Entity temp = bucket[hashcode];
while (temp != null)
if (temp.key.equals(key))
return (T) temp.value;
temp = temp.next;
return null;
public void delete(K key)
int hashcode = hash(key);
Entity deleteNode = bucket[hashcode];
deleteNode = null;
bucket[hashcode] = deleteNode;
@Override
public Iterator<Entity> iterator()
// TODO Auto-generated method stub
bucketItr = new Entity[indexCount];
int entryCount = 0;
for (Entity e : bucket)
if (Objects.nonNull(e))
bucketItr[entryCount] = e;
entryCount++;
return new MapItr();
class MapItr implements Iterator<Entity>
int count = 0;
Entity node = bucketItr[0];
@Override
public boolean hasNext()
// TODO Auto-generated method stub
return count < bucketItr.length;
@Override
public Entity next()
Entity result = null;
if (hasNext())
result = node;
if ( Objects.nonNull(node) && node.next != null)
node = node.next;
else
count++;
if (count < bucketItr.length)
node = bucketItr[count];
return result;
else
return null;
Entity.java
public class Entity<K, T>
public K key;
public T value;
public Entity next;
public Entity(K key, T value, Entity next)
this.key = key;
this.value = value;
this.next = next;
Please suggest improvements and things which you feel I need to add to it.
java hash-map
add a comment |Â
up vote
1
down vote
favorite
I implemented a CustomHashMap
in Java, and would like to know if my approach is good or not.
Below I am sharing the complete code of my CustomHashMap
:
CustomHashMap.java
import java.util.Iterator;
import java.util.Objects;
import com.custom.utils.*;
public class CustomHashMap<K, T> implements Iterable<Entity>
private static final int capacity_init = 10;
static final float DEFAULT_LOAD_FACTOR = 0.75f;
private int indexCount = 0;
Entity bucket = null;
Entity bucketItr = null;
public CustomHashMap()
init(capacity_init);
public CustomHashMap(int capacity)
init(capacity);
private void init(int intcapacity)
bucket = new Entity[intcapacity];
private int hash(Object key)
return key.hashCode() >> 1;
public void add(K key, T value)
int hashcode = hash(key);
if(indexCount == bucket.length)
Entity obj = bucket;
int k = (int) (obj.length*2);
bucket = new Entity[k];
System.arraycopy(obj, 0, bucket, 0, indexCount-1);
if (bucket[hashcode] == null)
indexCount++;
Entity head = new Entity(key, value, null);
bucket[hashcode] = head;
else
Entity temp = bucket[hashcode];
while (temp.next != null)
temp = temp.next;
Entity neEntity = new Entity(key, value, null);
neEntity.next = temp;
bucket[hashcode] = neEntity;
bucketItr = new Entity[bucket.length];
System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);
public T get(K key)
int hashcode = hash(key);
if (bucket[hashcode] == null)
return null;
else
Entity temp = bucket[hashcode];
while (temp != null)
if (temp.key.equals(key))
return (T) temp.value;
temp = temp.next;
return null;
public void delete(K key)
int hashcode = hash(key);
Entity deleteNode = bucket[hashcode];
deleteNode = null;
bucket[hashcode] = deleteNode;
@Override
public Iterator<Entity> iterator()
// TODO Auto-generated method stub
bucketItr = new Entity[indexCount];
int entryCount = 0;
for (Entity e : bucket)
if (Objects.nonNull(e))
bucketItr[entryCount] = e;
entryCount++;
return new MapItr();
class MapItr implements Iterator<Entity>
int count = 0;
Entity node = bucketItr[0];
@Override
public boolean hasNext()
// TODO Auto-generated method stub
return count < bucketItr.length;
@Override
public Entity next()
Entity result = null;
if (hasNext())
result = node;
if ( Objects.nonNull(node) && node.next != null)
node = node.next;
else
count++;
if (count < bucketItr.length)
node = bucketItr[count];
return result;
else
return null;
Entity.java
public class Entity<K, T>
public K key;
public T value;
public Entity next;
public Entity(K key, T value, Entity next)
this.key = key;
this.value = value;
this.next = next;
Please suggest improvements and things which you feel I need to add to it.
java hash-map
add a comment |Â
up vote
1
down vote
favorite
up vote
1
down vote
favorite
I implemented a CustomHashMap
in Java, and would like to know if my approach is good or not.
Below I am sharing the complete code of my CustomHashMap
:
CustomHashMap.java
import java.util.Iterator;
import java.util.Objects;
import com.custom.utils.*;
public class CustomHashMap<K, T> implements Iterable<Entity>
private static final int capacity_init = 10;
static final float DEFAULT_LOAD_FACTOR = 0.75f;
private int indexCount = 0;
Entity bucket = null;
Entity bucketItr = null;
public CustomHashMap()
init(capacity_init);
public CustomHashMap(int capacity)
init(capacity);
private void init(int intcapacity)
bucket = new Entity[intcapacity];
private int hash(Object key)
return key.hashCode() >> 1;
public void add(K key, T value)
int hashcode = hash(key);
if(indexCount == bucket.length)
Entity obj = bucket;
int k = (int) (obj.length*2);
bucket = new Entity[k];
System.arraycopy(obj, 0, bucket, 0, indexCount-1);
if (bucket[hashcode] == null)
indexCount++;
Entity head = new Entity(key, value, null);
bucket[hashcode] = head;
else
Entity temp = bucket[hashcode];
while (temp.next != null)
temp = temp.next;
Entity neEntity = new Entity(key, value, null);
neEntity.next = temp;
bucket[hashcode] = neEntity;
bucketItr = new Entity[bucket.length];
System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);
public T get(K key)
int hashcode = hash(key);
if (bucket[hashcode] == null)
return null;
else
Entity temp = bucket[hashcode];
while (temp != null)
if (temp.key.equals(key))
return (T) temp.value;
temp = temp.next;
return null;
public void delete(K key)
int hashcode = hash(key);
Entity deleteNode = bucket[hashcode];
deleteNode = null;
bucket[hashcode] = deleteNode;
@Override
public Iterator<Entity> iterator()
// TODO Auto-generated method stub
bucketItr = new Entity[indexCount];
int entryCount = 0;
for (Entity e : bucket)
if (Objects.nonNull(e))
bucketItr[entryCount] = e;
entryCount++;
return new MapItr();
class MapItr implements Iterator<Entity>
int count = 0;
Entity node = bucketItr[0];
@Override
public boolean hasNext()
// TODO Auto-generated method stub
return count < bucketItr.length;
@Override
public Entity next()
Entity result = null;
if (hasNext())
result = node;
if ( Objects.nonNull(node) && node.next != null)
node = node.next;
else
count++;
if (count < bucketItr.length)
node = bucketItr[count];
return result;
else
return null;
Entity.java
public class Entity<K, T>
public K key;
public T value;
public Entity next;
public Entity(K key, T value, Entity next)
this.key = key;
this.value = value;
this.next = next;
Please suggest improvements and things which you feel I need to add to it.
java hash-map
I implemented a CustomHashMap
in Java, and would like to know if my approach is good or not.
Below I am sharing the complete code of my CustomHashMap
:
CustomHashMap.java
import java.util.Iterator;
import java.util.Objects;
import com.custom.utils.*;
public class CustomHashMap<K, T> implements Iterable<Entity>
private static final int capacity_init = 10;
static final float DEFAULT_LOAD_FACTOR = 0.75f;
private int indexCount = 0;
Entity bucket = null;
Entity bucketItr = null;
public CustomHashMap()
init(capacity_init);
public CustomHashMap(int capacity)
init(capacity);
private void init(int intcapacity)
bucket = new Entity[intcapacity];
private int hash(Object key)
return key.hashCode() >> 1;
public void add(K key, T value)
int hashcode = hash(key);
if(indexCount == bucket.length)
Entity obj = bucket;
int k = (int) (obj.length*2);
bucket = new Entity[k];
System.arraycopy(obj, 0, bucket, 0, indexCount-1);
if (bucket[hashcode] == null)
indexCount++;
Entity head = new Entity(key, value, null);
bucket[hashcode] = head;
else
Entity temp = bucket[hashcode];
while (temp.next != null)
temp = temp.next;
Entity neEntity = new Entity(key, value, null);
neEntity.next = temp;
bucket[hashcode] = neEntity;
bucketItr = new Entity[bucket.length];
System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);
public T get(K key)
int hashcode = hash(key);
if (bucket[hashcode] == null)
return null;
else
Entity temp = bucket[hashcode];
while (temp != null)
if (temp.key.equals(key))
return (T) temp.value;
temp = temp.next;
return null;
public void delete(K key)
int hashcode = hash(key);
Entity deleteNode = bucket[hashcode];
deleteNode = null;
bucket[hashcode] = deleteNode;
@Override
public Iterator<Entity> iterator()
// TODO Auto-generated method stub
bucketItr = new Entity[indexCount];
int entryCount = 0;
for (Entity e : bucket)
if (Objects.nonNull(e))
bucketItr[entryCount] = e;
entryCount++;
return new MapItr();
class MapItr implements Iterator<Entity>
int count = 0;
Entity node = bucketItr[0];
@Override
public boolean hasNext()
// TODO Auto-generated method stub
return count < bucketItr.length;
@Override
public Entity next()
Entity result = null;
if (hasNext())
result = node;
if ( Objects.nonNull(node) && node.next != null)
node = node.next;
else
count++;
if (count < bucketItr.length)
node = bucketItr[count];
return result;
else
return null;
Entity.java
public class Entity<K, T>
public K key;
public T value;
public Entity next;
public Entity(K key, T value, Entity next)
this.key = key;
this.value = value;
this.next = next;
Please suggest improvements and things which you feel I need to add to it.
java hash-map
edited Jan 24 at 13:31
Graipher
20.5k43081
20.5k43081
asked Jan 24 at 13:18
![](https://i.stack.imgur.com/mNVFF.png?s=32&g=1)
![](https://i.stack.imgur.com/mNVFF.png?s=32&g=1)
Simmant
1085
1085
add a comment |Â
add a comment |Â
2 Answers
2
active
oldest
votes
up vote
3
down vote
accepted
Here are my comments, in decreasing order of severity:
1) BUGS
Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your
add()
method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.Load factor: The code does not refer to the
DEFAULT_LOAD_FACTOR
: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11thadd()
call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number ofEntity
instances in all the buckets and of course on the load factor.
Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy()
might not suffice. This is actually what is done in the JDK implementation.
2) Performance
Adding a new
Entity
to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length asbucket
?The usage of
bucketItr
seems redundant. First of all, you copybucket
intobucketItr
every timeadd()
is called.System.arraycopy()
is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line initerator()
initializesbucketItr
from scratch, making the copy operation inadd()
completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop onbucket
inMapItr
and just ignore null entries?
3) Best Practices
References to generic type
Entity<K,T>
should be parameterized: Throughout the code, you refer to theEntity
raw type, without the<K, T>
generic parameters. While it seems that this should not produce a bug, (sinceadd()
method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).The use of
Objects.nonNull()
is redundant. why not use the simple and clearnode != null
? and while we are there, you first assignnode
toresult
and then ask about null. so theoretically, you could return null result.
4) Naming Conventions
- an array of buckets should be named
buckets
or perhapsbucket_array
don't you think?
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
add a comment |Â
up vote
1
down vote
A few things to fix.
Entity
should be private and nested.Entity<K, T> next
is valid field inside Entity.java, you are using generics.<K, V>
is a more standard name than<K, T>
Entity bucket = null;
can be marked as final.You dont need a private
init
, a constructor can call another constructor, called constructor overloading.hash
method does not check fornull
add
can be renamed toput
In
add
,System.arraycopy(obj, 0, bucket, 0, indexCount-1);
is BUGGY. You need to rehash and recalculate index.In
add
you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.In
get
, your if statement is redundant. code works perfectly fine without if block
10.Your delete
is buggy
. If you assign null to a bucket then you lost all others mapped to that bucket.
- Your
count++;
if (count < bucketItr.length)
node = bucketItr[count];
Shoud be a while, and while until node != null.
add a comment |Â
2 Answers
2
active
oldest
votes
2 Answers
2
active
oldest
votes
active
oldest
votes
active
oldest
votes
up vote
3
down vote
accepted
Here are my comments, in decreasing order of severity:
1) BUGS
Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your
add()
method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.Load factor: The code does not refer to the
DEFAULT_LOAD_FACTOR
: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11thadd()
call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number ofEntity
instances in all the buckets and of course on the load factor.
Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy()
might not suffice. This is actually what is done in the JDK implementation.
2) Performance
Adding a new
Entity
to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length asbucket
?The usage of
bucketItr
seems redundant. First of all, you copybucket
intobucketItr
every timeadd()
is called.System.arraycopy()
is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line initerator()
initializesbucketItr
from scratch, making the copy operation inadd()
completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop onbucket
inMapItr
and just ignore null entries?
3) Best Practices
References to generic type
Entity<K,T>
should be parameterized: Throughout the code, you refer to theEntity
raw type, without the<K, T>
generic parameters. While it seems that this should not produce a bug, (sinceadd()
method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).The use of
Objects.nonNull()
is redundant. why not use the simple and clearnode != null
? and while we are there, you first assignnode
toresult
and then ask about null. so theoretically, you could return null result.
4) Naming Conventions
- an array of buckets should be named
buckets
or perhapsbucket_array
don't you think?
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
add a comment |Â
up vote
3
down vote
accepted
Here are my comments, in decreasing order of severity:
1) BUGS
Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your
add()
method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.Load factor: The code does not refer to the
DEFAULT_LOAD_FACTOR
: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11thadd()
call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number ofEntity
instances in all the buckets and of course on the load factor.
Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy()
might not suffice. This is actually what is done in the JDK implementation.
2) Performance
Adding a new
Entity
to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length asbucket
?The usage of
bucketItr
seems redundant. First of all, you copybucket
intobucketItr
every timeadd()
is called.System.arraycopy()
is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line initerator()
initializesbucketItr
from scratch, making the copy operation inadd()
completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop onbucket
inMapItr
and just ignore null entries?
3) Best Practices
References to generic type
Entity<K,T>
should be parameterized: Throughout the code, you refer to theEntity
raw type, without the<K, T>
generic parameters. While it seems that this should not produce a bug, (sinceadd()
method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).The use of
Objects.nonNull()
is redundant. why not use the simple and clearnode != null
? and while we are there, you first assignnode
toresult
and then ask about null. so theoretically, you could return null result.
4) Naming Conventions
- an array of buckets should be named
buckets
or perhapsbucket_array
don't you think?
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
add a comment |Â
up vote
3
down vote
accepted
up vote
3
down vote
accepted
Here are my comments, in decreasing order of severity:
1) BUGS
Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your
add()
method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.Load factor: The code does not refer to the
DEFAULT_LOAD_FACTOR
: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11thadd()
call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number ofEntity
instances in all the buckets and of course on the load factor.
Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy()
might not suffice. This is actually what is done in the JDK implementation.
2) Performance
Adding a new
Entity
to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length asbucket
?The usage of
bucketItr
seems redundant. First of all, you copybucket
intobucketItr
every timeadd()
is called.System.arraycopy()
is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line initerator()
initializesbucketItr
from scratch, making the copy operation inadd()
completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop onbucket
inMapItr
and just ignore null entries?
3) Best Practices
References to generic type
Entity<K,T>
should be parameterized: Throughout the code, you refer to theEntity
raw type, without the<K, T>
generic parameters. While it seems that this should not produce a bug, (sinceadd()
method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).The use of
Objects.nonNull()
is redundant. why not use the simple and clearnode != null
? and while we are there, you first assignnode
toresult
and then ask about null. so theoretically, you could return null result.
4) Naming Conventions
- an array of buckets should be named
buckets
or perhapsbucket_array
don't you think?
Here are my comments, in decreasing order of severity:
1) BUGS
Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your
add()
method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.Load factor: The code does not refer to the
DEFAULT_LOAD_FACTOR
: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11thadd()
call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number ofEntity
instances in all the buckets and of course on the load factor.
Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy()
might not suffice. This is actually what is done in the JDK implementation.
2) Performance
Adding a new
Entity
to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length asbucket
?The usage of
bucketItr
seems redundant. First of all, you copybucket
intobucketItr
every timeadd()
is called.System.arraycopy()
is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line initerator()
initializesbucketItr
from scratch, making the copy operation inadd()
completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop onbucket
inMapItr
and just ignore null entries?
3) Best Practices
References to generic type
Entity<K,T>
should be parameterized: Throughout the code, you refer to theEntity
raw type, without the<K, T>
generic parameters. While it seems that this should not produce a bug, (sinceadd()
method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).The use of
Objects.nonNull()
is redundant. why not use the simple and clearnode != null
? and while we are there, you first assignnode
toresult
and then ask about null. so theoretically, you could return null result.
4) Naming Conventions
- an array of buckets should be named
buckets
or perhapsbucket_array
don't you think?
edited Jan 24 at 15:45
![](https://i.stack.imgur.com/cNcWV.jpg?s=32&g=1)
![](https://i.stack.imgur.com/cNcWV.jpg?s=32&g=1)
Julien Rousé
446416
446416
answered Jan 24 at 15:30
Sharon Ben Asher
2,073512
2,073512
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
add a comment |Â
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
â Simmant
Jan 24 at 16:31
add a comment |Â
up vote
1
down vote
A few things to fix.
Entity
should be private and nested.Entity<K, T> next
is valid field inside Entity.java, you are using generics.<K, V>
is a more standard name than<K, T>
Entity bucket = null;
can be marked as final.You dont need a private
init
, a constructor can call another constructor, called constructor overloading.hash
method does not check fornull
add
can be renamed toput
In
add
,System.arraycopy(obj, 0, bucket, 0, indexCount-1);
is BUGGY. You need to rehash and recalculate index.In
add
you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.In
get
, your if statement is redundant. code works perfectly fine without if block
10.Your delete
is buggy
. If you assign null to a bucket then you lost all others mapped to that bucket.
- Your
count++;
if (count < bucketItr.length)
node = bucketItr[count];
Shoud be a while, and while until node != null.
add a comment |Â
up vote
1
down vote
A few things to fix.
Entity
should be private and nested.Entity<K, T> next
is valid field inside Entity.java, you are using generics.<K, V>
is a more standard name than<K, T>
Entity bucket = null;
can be marked as final.You dont need a private
init
, a constructor can call another constructor, called constructor overloading.hash
method does not check fornull
add
can be renamed toput
In
add
,System.arraycopy(obj, 0, bucket, 0, indexCount-1);
is BUGGY. You need to rehash and recalculate index.In
add
you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.In
get
, your if statement is redundant. code works perfectly fine without if block
10.Your delete
is buggy
. If you assign null to a bucket then you lost all others mapped to that bucket.
- Your
count++;
if (count < bucketItr.length)
node = bucketItr[count];
Shoud be a while, and while until node != null.
add a comment |Â
up vote
1
down vote
up vote
1
down vote
A few things to fix.
Entity
should be private and nested.Entity<K, T> next
is valid field inside Entity.java, you are using generics.<K, V>
is a more standard name than<K, T>
Entity bucket = null;
can be marked as final.You dont need a private
init
, a constructor can call another constructor, called constructor overloading.hash
method does not check fornull
add
can be renamed toput
In
add
,System.arraycopy(obj, 0, bucket, 0, indexCount-1);
is BUGGY. You need to rehash and recalculate index.In
add
you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.In
get
, your if statement is redundant. code works perfectly fine without if block
10.Your delete
is buggy
. If you assign null to a bucket then you lost all others mapped to that bucket.
- Your
count++;
if (count < bucketItr.length)
node = bucketItr[count];
Shoud be a while, and while until node != null.
A few things to fix.
Entity
should be private and nested.Entity<K, T> next
is valid field inside Entity.java, you are using generics.<K, V>
is a more standard name than<K, T>
Entity bucket = null;
can be marked as final.You dont need a private
init
, a constructor can call another constructor, called constructor overloading.hash
method does not check fornull
add
can be renamed toput
In
add
,System.arraycopy(obj, 0, bucket, 0, indexCount-1);
is BUGGY. You need to rehash and recalculate index.In
add
you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.In
get
, your if statement is redundant. code works perfectly fine without if block
10.Your delete
is buggy
. If you assign null to a bucket then you lost all others mapped to that bucket.
- Your
count++;
if (count < bucketItr.length)
node = bucketItr[count];
Shoud be a while, and while until node != null.
answered Jan 30 at 16:34
Tanvi Jaywant
1122
1122
add a comment |Â
add a comment |Â
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fcodereview.stackexchange.com%2fquestions%2f185874%2fcustom-hashmap-implementation-in-java%23new-answer', 'question_page');
);
Post as a guest
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password