Posts Remove Duplicates from Sorted List II
Post
Cancel

Remove Duplicates from Sorted List II

PROBLEM DESCRIPTION

Given the head of a sorted linked list, delete all nodes that have duplicate numbers, leaving only distinct numbers from the original list. Return the linked list sorted as well.

leetcode

SOLUTION

APPROACH 1

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
class Solution {
    
    public ListNode deleteDuplicates(ListNode head) {
        
        // Create a HashMap to store the frequency of each value in the linked list.
        Map<Integer, Integer> map = new HashMap<>();

        // Traverse the linked list to populate the frequency map.
        ListNode temp = head;
        while(temp != null){
            // Increment the frequency count for the current value in the map.
            map.put(temp.val, map.getOrDefault(temp.val, 0) + 1);
            // Move to the next node in the linked list.
            temp = temp.next;
        }

        // Create a dummy node with a value greater than any possible value in the list.
        // This helps handle cases where the first few nodes are duplicates.
        ListNode dummy = new ListNode(101, head);

        // Initialize two pointers: prev points to the previous distinct node,
        // and current points to the current node being checked.
        ListNode prev = dummy;
        ListNode current = head;

        // Traverse the linked list again to remove duplicate nodes.
        while(current != null){

            // Get the frequency of the current value from the map.
            int freq = map.get(current.val);

            // If the frequency is greater than or equal to 2, it means the value is a duplicate.
            if(freq >= 2){

                // Skip the duplicate node by updating the 'next' pointer of the previous node.
                prev.next = current.next;
                // Move to the next node.
                current = current.next;

            }else{

                // If the value is distinct, update both prev and current pointers to look at the next node
                prev = prev.next;
                current = current.next;

            }
        }

        // Return the modified linked list starting from the next of the dummy node.
        return dummy.next;
    }
}

APPROACH 2 - SPACE OPTIMIZED

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
class Solution {
    
    public ListNode deleteDuplicates(ListNode head) {
        // Create a dummy node with a value greater than any possible value in the list.
        // This helps handle cases where the first few nodes are duplicates.
        ListNode dummy = new ListNode(101, head);

        // Initialize two pointers: prev points to the previous distinct node,
        // and current points to the current node being checked.
        ListNode prev = dummy;
        ListNode current = head;

        // Traverse the linked list to remove duplicate nodes.
        while(current != null && current.next != null){

            // Check if the current value is the same as the next value.
            if(current.val == current.next.val){

                // If there are consecutive duplicate values, keep moving the current pointer.
                while(current.next != null && current.val == current.next.val){
                    current = current.next;
                }

                // Skip the duplicate nodes by updating the 'next' pointer of the previous node.
                prev.next = current.next;
                // Move to the next node after the duplicates.
                current = current.next;

            }else{
                // If the value is distinct, update both prev and current pointers.
                prev = prev.next;
                current = current.next;
            }

        }

        // Return the modified linked list starting from the next of the dummy node.
        return dummy.next;
    }
}
This post is licensed under CC BY 4.0 by the author.