Custom HashMap Implementation in java

The name of the pictureThe name of the pictureThe name of the pictureClash Royale CLAN TAG#URR8PPP





.everyoneloves__top-leaderboard:empty,.everyoneloves__mid-leaderboard:empty margin-bottom:0;







up vote
1
down vote

favorite












I implemented a CustomHashMap in Java, and would like to know if my approach is good or not.



Below I am sharing the complete code of my CustomHashMap:



CustomHashMap.java



import java.util.Iterator;
import java.util.Objects;
import com.custom.utils.*;

public class CustomHashMap<K, T> implements Iterable<Entity>

private static final int capacity_init = 10;
static final float DEFAULT_LOAD_FACTOR = 0.75f;
private int indexCount = 0;

Entity bucket = null;

Entity bucketItr = null;

public CustomHashMap()
init(capacity_init);


public CustomHashMap(int capacity)
init(capacity);


private void init(int intcapacity)

bucket = new Entity[intcapacity];


private int hash(Object key)

return key.hashCode() >> 1;




public void add(K key, T value)

int hashcode = hash(key);

if(indexCount == bucket.length)
Entity obj = bucket;
int k = (int) (obj.length*2);
bucket = new Entity[k];
System.arraycopy(obj, 0, bucket, 0, indexCount-1);


if (bucket[hashcode] == null)
indexCount++;
Entity head = new Entity(key, value, null);
bucket[hashcode] = head;
else
Entity temp = bucket[hashcode];
while (temp.next != null)
temp = temp.next;

Entity neEntity = new Entity(key, value, null);
neEntity.next = temp;
bucket[hashcode] = neEntity;



bucketItr = new Entity[bucket.length];
System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);





public T get(K key)
int hashcode = hash(key);
if (bucket[hashcode] == null)
return null;
else
Entity temp = bucket[hashcode];
while (temp != null)

if (temp.key.equals(key))
return (T) temp.value;

temp = temp.next;

return null;




public void delete(K key)
int hashcode = hash(key);
Entity deleteNode = bucket[hashcode];
deleteNode = null;
bucket[hashcode] = deleteNode;


@Override
public Iterator<Entity> iterator()
// TODO Auto-generated method stub
bucketItr = new Entity[indexCount];
int entryCount = 0;
for (Entity e : bucket)

if (Objects.nonNull(e))
bucketItr[entryCount] = e;
entryCount++;




return new MapItr();


class MapItr implements Iterator<Entity>

int count = 0;
Entity node = bucketItr[0];

@Override
public boolean hasNext()
// TODO Auto-generated method stub

return count < bucketItr.length;


@Override
public Entity next()

Entity result = null;

if (hasNext())
result = node;
if ( Objects.nonNull(node) && node.next != null)
node = node.next;
else
count++;
if (count < bucketItr.length)
node = bucketItr[count];


return result;
else
return null;










Entity.java



public class Entity<K, T> 
public K key;
public T value;
public Entity next;

public Entity(K key, T value, Entity next)

this.key = key;
this.value = value;
this.next = next;






Please suggest improvements and things which you feel I need to add to it.







share|improve this question



























    up vote
    1
    down vote

    favorite












    I implemented a CustomHashMap in Java, and would like to know if my approach is good or not.



    Below I am sharing the complete code of my CustomHashMap:



    CustomHashMap.java



    import java.util.Iterator;
    import java.util.Objects;
    import com.custom.utils.*;

    public class CustomHashMap<K, T> implements Iterable<Entity>

    private static final int capacity_init = 10;
    static final float DEFAULT_LOAD_FACTOR = 0.75f;
    private int indexCount = 0;

    Entity bucket = null;

    Entity bucketItr = null;

    public CustomHashMap()
    init(capacity_init);


    public CustomHashMap(int capacity)
    init(capacity);


    private void init(int intcapacity)

    bucket = new Entity[intcapacity];


    private int hash(Object key)

    return key.hashCode() >> 1;




    public void add(K key, T value)

    int hashcode = hash(key);

    if(indexCount == bucket.length)
    Entity obj = bucket;
    int k = (int) (obj.length*2);
    bucket = new Entity[k];
    System.arraycopy(obj, 0, bucket, 0, indexCount-1);


    if (bucket[hashcode] == null)
    indexCount++;
    Entity head = new Entity(key, value, null);
    bucket[hashcode] = head;
    else
    Entity temp = bucket[hashcode];
    while (temp.next != null)
    temp = temp.next;

    Entity neEntity = new Entity(key, value, null);
    neEntity.next = temp;
    bucket[hashcode] = neEntity;



    bucketItr = new Entity[bucket.length];
    System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);





    public T get(K key)
    int hashcode = hash(key);
    if (bucket[hashcode] == null)
    return null;
    else
    Entity temp = bucket[hashcode];
    while (temp != null)

    if (temp.key.equals(key))
    return (T) temp.value;

    temp = temp.next;

    return null;




    public void delete(K key)
    int hashcode = hash(key);
    Entity deleteNode = bucket[hashcode];
    deleteNode = null;
    bucket[hashcode] = deleteNode;


    @Override
    public Iterator<Entity> iterator()
    // TODO Auto-generated method stub
    bucketItr = new Entity[indexCount];
    int entryCount = 0;
    for (Entity e : bucket)

    if (Objects.nonNull(e))
    bucketItr[entryCount] = e;
    entryCount++;




    return new MapItr();


    class MapItr implements Iterator<Entity>

    int count = 0;
    Entity node = bucketItr[0];

    @Override
    public boolean hasNext()
    // TODO Auto-generated method stub

    return count < bucketItr.length;


    @Override
    public Entity next()

    Entity result = null;

    if (hasNext())
    result = node;
    if ( Objects.nonNull(node) && node.next != null)
    node = node.next;
    else
    count++;
    if (count < bucketItr.length)
    node = bucketItr[count];


    return result;
    else
    return null;










    Entity.java



    public class Entity<K, T> 
    public K key;
    public T value;
    public Entity next;

    public Entity(K key, T value, Entity next)

    this.key = key;
    this.value = value;
    this.next = next;






    Please suggest improvements and things which you feel I need to add to it.







    share|improve this question























      up vote
      1
      down vote

      favorite









      up vote
      1
      down vote

      favorite











      I implemented a CustomHashMap in Java, and would like to know if my approach is good or not.



      Below I am sharing the complete code of my CustomHashMap:



      CustomHashMap.java



      import java.util.Iterator;
      import java.util.Objects;
      import com.custom.utils.*;

      public class CustomHashMap<K, T> implements Iterable<Entity>

      private static final int capacity_init = 10;
      static final float DEFAULT_LOAD_FACTOR = 0.75f;
      private int indexCount = 0;

      Entity bucket = null;

      Entity bucketItr = null;

      public CustomHashMap()
      init(capacity_init);


      public CustomHashMap(int capacity)
      init(capacity);


      private void init(int intcapacity)

      bucket = new Entity[intcapacity];


      private int hash(Object key)

      return key.hashCode() >> 1;




      public void add(K key, T value)

      int hashcode = hash(key);

      if(indexCount == bucket.length)
      Entity obj = bucket;
      int k = (int) (obj.length*2);
      bucket = new Entity[k];
      System.arraycopy(obj, 0, bucket, 0, indexCount-1);


      if (bucket[hashcode] == null)
      indexCount++;
      Entity head = new Entity(key, value, null);
      bucket[hashcode] = head;
      else
      Entity temp = bucket[hashcode];
      while (temp.next != null)
      temp = temp.next;

      Entity neEntity = new Entity(key, value, null);
      neEntity.next = temp;
      bucket[hashcode] = neEntity;



      bucketItr = new Entity[bucket.length];
      System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);





      public T get(K key)
      int hashcode = hash(key);
      if (bucket[hashcode] == null)
      return null;
      else
      Entity temp = bucket[hashcode];
      while (temp != null)

      if (temp.key.equals(key))
      return (T) temp.value;

      temp = temp.next;

      return null;




      public void delete(K key)
      int hashcode = hash(key);
      Entity deleteNode = bucket[hashcode];
      deleteNode = null;
      bucket[hashcode] = deleteNode;


      @Override
      public Iterator<Entity> iterator()
      // TODO Auto-generated method stub
      bucketItr = new Entity[indexCount];
      int entryCount = 0;
      for (Entity e : bucket)

      if (Objects.nonNull(e))
      bucketItr[entryCount] = e;
      entryCount++;




      return new MapItr();


      class MapItr implements Iterator<Entity>

      int count = 0;
      Entity node = bucketItr[0];

      @Override
      public boolean hasNext()
      // TODO Auto-generated method stub

      return count < bucketItr.length;


      @Override
      public Entity next()

      Entity result = null;

      if (hasNext())
      result = node;
      if ( Objects.nonNull(node) && node.next != null)
      node = node.next;
      else
      count++;
      if (count < bucketItr.length)
      node = bucketItr[count];


      return result;
      else
      return null;










      Entity.java



      public class Entity<K, T> 
      public K key;
      public T value;
      public Entity next;

      public Entity(K key, T value, Entity next)

      this.key = key;
      this.value = value;
      this.next = next;






      Please suggest improvements and things which you feel I need to add to it.







      share|improve this question













      I implemented a CustomHashMap in Java, and would like to know if my approach is good or not.



      Below I am sharing the complete code of my CustomHashMap:



      CustomHashMap.java



      import java.util.Iterator;
      import java.util.Objects;
      import com.custom.utils.*;

      public class CustomHashMap<K, T> implements Iterable<Entity>

      private static final int capacity_init = 10;
      static final float DEFAULT_LOAD_FACTOR = 0.75f;
      private int indexCount = 0;

      Entity bucket = null;

      Entity bucketItr = null;

      public CustomHashMap()
      init(capacity_init);


      public CustomHashMap(int capacity)
      init(capacity);


      private void init(int intcapacity)

      bucket = new Entity[intcapacity];


      private int hash(Object key)

      return key.hashCode() >> 1;




      public void add(K key, T value)

      int hashcode = hash(key);

      if(indexCount == bucket.length)
      Entity obj = bucket;
      int k = (int) (obj.length*2);
      bucket = new Entity[k];
      System.arraycopy(obj, 0, bucket, 0, indexCount-1);


      if (bucket[hashcode] == null)
      indexCount++;
      Entity head = new Entity(key, value, null);
      bucket[hashcode] = head;
      else
      Entity temp = bucket[hashcode];
      while (temp.next != null)
      temp = temp.next;

      Entity neEntity = new Entity(key, value, null);
      neEntity.next = temp;
      bucket[hashcode] = neEntity;



      bucketItr = new Entity[bucket.length];
      System.arraycopy(bucket, 0, bucketItr, 0, bucket.length);





      public T get(K key)
      int hashcode = hash(key);
      if (bucket[hashcode] == null)
      return null;
      else
      Entity temp = bucket[hashcode];
      while (temp != null)

      if (temp.key.equals(key))
      return (T) temp.value;

      temp = temp.next;

      return null;




      public void delete(K key)
      int hashcode = hash(key);
      Entity deleteNode = bucket[hashcode];
      deleteNode = null;
      bucket[hashcode] = deleteNode;


      @Override
      public Iterator<Entity> iterator()
      // TODO Auto-generated method stub
      bucketItr = new Entity[indexCount];
      int entryCount = 0;
      for (Entity e : bucket)

      if (Objects.nonNull(e))
      bucketItr[entryCount] = e;
      entryCount++;




      return new MapItr();


      class MapItr implements Iterator<Entity>

      int count = 0;
      Entity node = bucketItr[0];

      @Override
      public boolean hasNext()
      // TODO Auto-generated method stub

      return count < bucketItr.length;


      @Override
      public Entity next()

      Entity result = null;

      if (hasNext())
      result = node;
      if ( Objects.nonNull(node) && node.next != null)
      node = node.next;
      else
      count++;
      if (count < bucketItr.length)
      node = bucketItr[count];


      return result;
      else
      return null;










      Entity.java



      public class Entity<K, T> 
      public K key;
      public T value;
      public Entity next;

      public Entity(K key, T value, Entity next)

      this.key = key;
      this.value = value;
      this.next = next;






      Please suggest improvements and things which you feel I need to add to it.









      share|improve this question












      share|improve this question




      share|improve this question








      edited Jan 24 at 13:31









      Graipher

      20.5k43081




      20.5k43081









      asked Jan 24 at 13:18









      Simmant

      1085




      1085




















          2 Answers
          2






          active

          oldest

          votes

















          up vote
          3
          down vote



          accepted










          Here are my comments, in decreasing order of severity:



          1) BUGS



          1. Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your add() method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.


          2. Load factor: The code does not refer to the DEFAULT_LOAD_FACTOR: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11th add() call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number of Entity instances in all the buckets and of course on the load factor.


          Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy() might not suffice. This is actually what is done in the JDK implementation.



          2) Performance



          1. Adding a new Entity to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length as bucket?


          2. The usage of bucketItr seems redundant. First of all, you copy bucket into bucketItr every time add() is called. System.arraycopy() is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line in iterator() initializes bucketItr from scratch, making the copy operation in add() completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop on bucket in MapItr and just ignore null entries?


          3) Best Practices



          1. References to generic type Entity<K,T> should be parameterized: Throughout the code, you refer to the Entity raw type, without the <K, T> generic parameters. While it seems that this should not produce a bug, (since add() method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).


          2. The use of Objects.nonNull() is redundant. why not use the simple and clear node != null? and while we are there, you first assign node to result and then ask about null. so theoretically, you could return null result.


          4) Naming Conventions



          1. an array of buckets should be named buckets or perhaps bucket_array don't you think?





          share|improve this answer























          • Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
            – Simmant
            Jan 24 at 16:31

















          up vote
          1
          down vote













          A few things to fix.



          1. Entity should be private and nested.


          2. Entity<K, T> next is valid field inside Entity.java, you are using generics.


          3. <K, V> is a more standard name than <K, T>


          4. Entity bucket = null; can be marked as final.


          5. You dont need a private init, a constructor can call another constructor, called constructor overloading.


          6. hash method does not check for null


          7. add can be renamed to put


          8. In add, System.arraycopy(obj, 0, bucket, 0, indexCount-1); is BUGGY. You need to rehash and recalculate index.


          9. In add you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.


          10. In get, your if statement is redundant. code works perfectly fine without if block


          10.Your delete is buggy. If you assign null to a bucket then you lost all others mapped to that bucket.



          1. Your

          count++;
          if (count < bucketItr.length)
          node = bucketItr[count];



          Shoud be a while, and while until node != null.






          share|improve this answer





















            Your Answer




            StackExchange.ifUsing("editor", function ()
            return StackExchange.using("mathjaxEditing", function ()
            StackExchange.MarkdownEditor.creationCallbacks.add(function (editor, postfix)
            StackExchange.mathjaxEditing.prepareWmdForMathJax(editor, postfix, [["\$", "\$"]]);
            );
            );
            , "mathjax-editing");

            StackExchange.ifUsing("editor", function ()
            StackExchange.using("externalEditor", function ()
            StackExchange.using("snippets", function ()
            StackExchange.snippets.init();
            );
            );
            , "code-snippets");

            StackExchange.ready(function()
            var channelOptions =
            tags: "".split(" "),
            id: "196"
            ;
            initTagRenderer("".split(" "), "".split(" "), channelOptions);

            StackExchange.using("externalEditor", function()
            // Have to fire editor after snippets, if snippets enabled
            if (StackExchange.settings.snippets.snippetsEnabled)
            StackExchange.using("snippets", function()
            createEditor();
            );

            else
            createEditor();

            );

            function createEditor()
            StackExchange.prepareEditor(
            heartbeatType: 'answer',
            convertImagesToLinks: false,
            noModals: false,
            showLowRepImageUploadWarning: true,
            reputationToPostImages: null,
            bindNavPrevention: true,
            postfix: "",
            onDemand: true,
            discardSelector: ".discard-answer"
            ,immediatelyShowMarkdownHelp:true
            );



            );








             

            draft saved


            draft discarded


















            StackExchange.ready(
            function ()
            StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fcodereview.stackexchange.com%2fquestions%2f185874%2fcustom-hashmap-implementation-in-java%23new-answer', 'question_page');

            );

            Post as a guest






























            2 Answers
            2






            active

            oldest

            votes








            2 Answers
            2






            active

            oldest

            votes









            active

            oldest

            votes






            active

            oldest

            votes








            up vote
            3
            down vote



            accepted










            Here are my comments, in decreasing order of severity:



            1) BUGS



            1. Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your add() method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.


            2. Load factor: The code does not refer to the DEFAULT_LOAD_FACTOR: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11th add() call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number of Entity instances in all the buckets and of course on the load factor.


            Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy() might not suffice. This is actually what is done in the JDK implementation.



            2) Performance



            1. Adding a new Entity to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length as bucket?


            2. The usage of bucketItr seems redundant. First of all, you copy bucket into bucketItr every time add() is called. System.arraycopy() is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line in iterator() initializes bucketItr from scratch, making the copy operation in add() completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop on bucket in MapItr and just ignore null entries?


            3) Best Practices



            1. References to generic type Entity<K,T> should be parameterized: Throughout the code, you refer to the Entity raw type, without the <K, T> generic parameters. While it seems that this should not produce a bug, (since add() method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).


            2. The use of Objects.nonNull() is redundant. why not use the simple and clear node != null? and while we are there, you first assign node to result and then ask about null. so theoretically, you could return null result.


            4) Naming Conventions



            1. an array of buckets should be named buckets or perhaps bucket_array don't you think?





            share|improve this answer























            • Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
              – Simmant
              Jan 24 at 16:31














            up vote
            3
            down vote



            accepted










            Here are my comments, in decreasing order of severity:



            1) BUGS



            1. Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your add() method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.


            2. Load factor: The code does not refer to the DEFAULT_LOAD_FACTOR: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11th add() call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number of Entity instances in all the buckets and of course on the load factor.


            Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy() might not suffice. This is actually what is done in the JDK implementation.



            2) Performance



            1. Adding a new Entity to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length as bucket?


            2. The usage of bucketItr seems redundant. First of all, you copy bucket into bucketItr every time add() is called. System.arraycopy() is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line in iterator() initializes bucketItr from scratch, making the copy operation in add() completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop on bucket in MapItr and just ignore null entries?


            3) Best Practices



            1. References to generic type Entity<K,T> should be parameterized: Throughout the code, you refer to the Entity raw type, without the <K, T> generic parameters. While it seems that this should not produce a bug, (since add() method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).


            2. The use of Objects.nonNull() is redundant. why not use the simple and clear node != null? and while we are there, you first assign node to result and then ask about null. so theoretically, you could return null result.


            4) Naming Conventions



            1. an array of buckets should be named buckets or perhaps bucket_array don't you think?





            share|improve this answer























            • Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
              – Simmant
              Jan 24 at 16:31












            up vote
            3
            down vote



            accepted







            up vote
            3
            down vote



            accepted






            Here are my comments, in decreasing order of severity:



            1) BUGS



            1. Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your add() method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.


            2. Load factor: The code does not refer to the DEFAULT_LOAD_FACTOR: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11th add() call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number of Entity instances in all the buckets and of course on the load factor.


            Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy() might not suffice. This is actually what is done in the JDK implementation.



            2) Performance



            1. Adding a new Entity to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length as bucket?


            2. The usage of bucketItr seems redundant. First of all, you copy bucket into bucketItr every time add() is called. System.arraycopy() is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line in iterator() initializes bucketItr from scratch, making the copy operation in add() completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop on bucket in MapItr and just ignore null entries?


            3) Best Practices



            1. References to generic type Entity<K,T> should be parameterized: Throughout the code, you refer to the Entity raw type, without the <K, T> generic parameters. While it seems that this should not produce a bug, (since add() method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).


            2. The use of Objects.nonNull() is redundant. why not use the simple and clear node != null? and while we are there, you first assign node to result and then ask about null. so theoretically, you could return null result.


            4) Naming Conventions



            1. an array of buckets should be named buckets or perhaps bucket_array don't you think?





            share|improve this answer















            Here are my comments, in decreasing order of severity:



            1) BUGS



            1. Hash function: your hash function does not refer in any way to the bucket (changing) capacity. The main problem with that is that the function may produce values that are outside of the index bounds of the bucket array (current function may even produce negative values) and then your add() method would throw an exception. but even if you protect from such cases, the function needs to refer to the current capacity in order to spread the values as evenly as possible across the buckets.


            2. Load factor: The code does not refer to the DEFAULT_LOAD_FACTOR: nor does it allow the user to override this value, and more importantly, it doesn't consider this when deciding if the bucket array needs to be enlarged. In fact, the whole logic of this decision seems to me to be wrong: when all entries in the bucket are full, you decide to enlarge the size. so theoretically, on the 11th add() call, you would enlarge the size. This logic needs to be redesigned and it needs to be based on the total number of Entity instances in all the buckets and of course on the load factor.


            Note: depending on the hash function implementation, you may need to rehash the entire hash table because if the hash function depends on the capacity, then it may produce different results for existing keys when the capacity changes. So, the simple System.arraycopy() might not suffice. This is actually what is done in the JDK implementation.



            2) Performance



            1. Adding a new Entity to the linked list involves going through the entire list. This seems inefficient. Perhaps you can save the last node in a separate array that has same length as bucket?


            2. The usage of bucketItr seems redundant. First of all, you copy bucket into bucketItr every time add() is called. System.arraycopy() is efficient but that doesn't mean it need not be used judiciously. Moreover, the first line in iterator() initializes bucketItr from scratch, making the copy operation in add() completely redundant. but moreover, I don't see the reason for this array at all. couldn't you loop on bucket in MapItr and just ignore null entries?


            3) Best Practices



            1. References to generic type Entity<K,T> should be parameterized: Throughout the code, you refer to the Entity raw type, without the <K, T> generic parameters. While it seems that this should not produce a bug, (since add() method only accept args of the correct types, this is still considered bad practice (and indeed produces compile warning).


            2. The use of Objects.nonNull() is redundant. why not use the simple and clear node != null? and while we are there, you first assign node to result and then ask about null. so theoretically, you could return null result.


            4) Naming Conventions



            1. an array of buckets should be named buckets or perhaps bucket_array don't you think?






            share|improve this answer















            share|improve this answer



            share|improve this answer








            edited Jan 24 at 15:45









            Julien Rousé

            446416




            446416











            answered Jan 24 at 15:30









            Sharon Ben Asher

            2,073512




            2,073512











            • Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
              – Simmant
              Jan 24 at 16:31
















            • Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
              – Simmant
              Jan 24 at 16:31















            Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
            – Simmant
            Jan 24 at 16:31




            Thank you so much for such valuable comments, I started working on listed items and will share the updated code once I done with all points.
            – Simmant
            Jan 24 at 16:31












            up vote
            1
            down vote













            A few things to fix.



            1. Entity should be private and nested.


            2. Entity<K, T> next is valid field inside Entity.java, you are using generics.


            3. <K, V> is a more standard name than <K, T>


            4. Entity bucket = null; can be marked as final.


            5. You dont need a private init, a constructor can call another constructor, called constructor overloading.


            6. hash method does not check for null


            7. add can be renamed to put


            8. In add, System.arraycopy(obj, 0, bucket, 0, indexCount-1); is BUGGY. You need to rehash and recalculate index.


            9. In add you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.


            10. In get, your if statement is redundant. code works perfectly fine without if block


            10.Your delete is buggy. If you assign null to a bucket then you lost all others mapped to that bucket.



            1. Your

            count++;
            if (count < bucketItr.length)
            node = bucketItr[count];



            Shoud be a while, and while until node != null.






            share|improve this answer

























              up vote
              1
              down vote













              A few things to fix.



              1. Entity should be private and nested.


              2. Entity<K, T> next is valid field inside Entity.java, you are using generics.


              3. <K, V> is a more standard name than <K, T>


              4. Entity bucket = null; can be marked as final.


              5. You dont need a private init, a constructor can call another constructor, called constructor overloading.


              6. hash method does not check for null


              7. add can be renamed to put


              8. In add, System.arraycopy(obj, 0, bucket, 0, indexCount-1); is BUGGY. You need to rehash and recalculate index.


              9. In add you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.


              10. In get, your if statement is redundant. code works perfectly fine without if block


              10.Your delete is buggy. If you assign null to a bucket then you lost all others mapped to that bucket.



              1. Your

              count++;
              if (count < bucketItr.length)
              node = bucketItr[count];



              Shoud be a while, and while until node != null.






              share|improve this answer























                up vote
                1
                down vote










                up vote
                1
                down vote









                A few things to fix.



                1. Entity should be private and nested.


                2. Entity<K, T> next is valid field inside Entity.java, you are using generics.


                3. <K, V> is a more standard name than <K, T>


                4. Entity bucket = null; can be marked as final.


                5. You dont need a private init, a constructor can call another constructor, called constructor overloading.


                6. hash method does not check for null


                7. add can be renamed to put


                8. In add, System.arraycopy(obj, 0, bucket, 0, indexCount-1); is BUGGY. You need to rehash and recalculate index.


                9. In add you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.


                10. In get, your if statement is redundant. code works perfectly fine without if block


                10.Your delete is buggy. If you assign null to a bucket then you lost all others mapped to that bucket.



                1. Your

                count++;
                if (count < bucketItr.length)
                node = bucketItr[count];



                Shoud be a while, and while until node != null.






                share|improve this answer













                A few things to fix.



                1. Entity should be private and nested.


                2. Entity<K, T> next is valid field inside Entity.java, you are using generics.


                3. <K, V> is a more standard name than <K, T>


                4. Entity bucket = null; can be marked as final.


                5. You dont need a private init, a constructor can call another constructor, called constructor overloading.


                6. hash method does not check for null


                7. add can be renamed to put


                8. In add, System.arraycopy(obj, 0, bucket, 0, indexCount-1); is BUGGY. You need to rehash and recalculate index.


                9. In add you are advancing temp. Its giving you no benefit. Infact it appears buggy. Try looking up for stack using linkedlist.


                10. In get, your if statement is redundant. code works perfectly fine without if block


                10.Your delete is buggy. If you assign null to a bucket then you lost all others mapped to that bucket.



                1. Your

                count++;
                if (count < bucketItr.length)
                node = bucketItr[count];



                Shoud be a while, and while until node != null.







                share|improve this answer













                share|improve this answer



                share|improve this answer











                answered Jan 30 at 16:34









                Tanvi Jaywant

                1122




                1122






















                     

                    draft saved


                    draft discarded


























                     


                    draft saved


                    draft discarded














                    StackExchange.ready(
                    function ()
                    StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fcodereview.stackexchange.com%2fquestions%2f185874%2fcustom-hashmap-implementation-in-java%23new-answer', 'question_page');

                    );

                    Post as a guest













































































                    Popular posts from this blog

                    Chat program with C++ and SFML

                    Function to Return a JSON Like Objects Using VBA Collections and Arrays

                    ADO Stream Object