Disallow duplicate heartbeats and crashreports

Add unique constraints and corresponding schema and data migration.
Adapt all test cases so that only unique heartbeats and crashreports
are sent. Delete test cases that are inappropriate as no duplicate
entries can exist in the database anymore.

Issue: HIC-180
Change-Id: I768d1610d4482c9d61b76cdbc588334198bfe415
diff --git a/crashreport_stats/management/commands/stats.py b/crashreport_stats/management/commands/stats.py
index d510a0e..c2cc56c 100644
--- a/crashreport_stats/management/commands/stats.py
+++ b/crashreport_stats/management/commands/stats.py
@@ -256,7 +256,7 @@
         return (
             query_objects.annotate(_report_day=TruncDate("date"))
             .values(self.version_field_name, "_report_day")
-            .annotate(count=Count("date", distinct=True))
+            .annotate(count=Count("date"))
         )
 
     def delete_stats(self) -> Dict[str, int]:
diff --git a/crashreport_stats/rest_endpoints.py b/crashreport_stats/rest_endpoints.py
index ba8ea9f..a15edf5 100644
--- a/crashreport_stats/rest_endpoints.py
+++ b/crashreport_stats/rest_endpoints.py
@@ -189,7 +189,7 @@
         device_heartbeats = list(device.heartbeats.all())
         device_crashreports = list(device.crashreports.all())
 
-        dates = {heartbeat.date.date() for heartbeat in device_heartbeats}
+        dates = {heartbeat.date for heartbeat in device_heartbeats}
 
         response = [
             get_stats_for_date(date, device_crashreports, device_heartbeats)
@@ -201,7 +201,7 @@
 
 def get_stats_for_date(date, crashreports, heartbeats):
     """Get the stats for a device for a specific date."""
-    heartbeats = filter_instances(heartbeats, lambda hb: hb.date.date() == date)
+    heartbeats = filter_instances(heartbeats, lambda hb: hb.date == date)
     crashreports = filter_instances(
         crashreports, lambda c: c.date.date() == date
     )
diff --git a/crashreport_stats/tests/test_rest_endpoints.py b/crashreport_stats/tests/test_rest_endpoints.py
index a3bd2d1..8708f3d 100644
--- a/crashreport_stats/tests/test_rest_endpoints.py
+++ b/crashreport_stats/tests/test_rest_endpoints.py
@@ -1,7 +1,6 @@
 """Tests for the rest_endpoints module."""
 import operator
 from datetime import datetime, timedelta
-import unittest
 
 import pytz
 from django.test import override_settings
@@ -585,15 +584,22 @@
         """Test getting device stats for a device."""
         # Create a device with a heartbeat and one report of each type
         device = Dummy.create_dummy_device(Dummy.create_dummy_user())
-        heartbeat = Dummy.create_dummy_report(HeartBeat, device)
+        crashreport_date = Dummy.DEFAULT_DUMMY_CRASHREPORT_VALUES["date"]
+        heartbeat = Dummy.create_dummy_report(
+            HeartBeat, device, date=crashreport_date.date()
+        )
         for boot_reason in (
             Crashreport.SMPL_BOOT_REASONS
             + Crashreport.CRASH_BOOT_REASONS
             + ["other boot reason"]
         ):
             Dummy.create_dummy_report(
-                Crashreport, device, boot_reason=boot_reason
+                Crashreport,
+                device,
+                boot_reason=boot_reason,
+                date=crashreport_date,
             )
+            crashreport_date += timedelta(milliseconds=1)
 
         # Get the device statistics
         response = self._get_with_params(
@@ -619,15 +625,15 @@
         device = Dummy.create_dummy_device(Dummy.create_dummy_user())
         num_days = 100
         for i in range(num_days):
-            report_day = datetime.now(tz=pytz.utc) + timedelta(days=i)
+            report_date = datetime.now(tz=pytz.utc) + timedelta(days=i)
             heartbeat = Dummy.create_dummy_report(
-                HeartBeat, device, date=report_day
+                HeartBeat, device, date=report_date.date()
             )
-            Dummy.create_dummy_report(Crashreport, device, date=report_day)
+            Dummy.create_dummy_report(Crashreport, device, date=report_date)
             Dummy.create_dummy_report(
                 Crashreport,
                 device,
-                date=report_day,
+                date=report_date + timedelta(minutes=1),
                 boot_reason=Crashreport.SMPL_BOOT_REASONS[0],
             )
 
@@ -656,13 +662,13 @@
         num_days = 100
         skip_day = round(num_days / 2)
         for i in range(num_days):
-            report_day = datetime.now(tz=pytz.utc) + timedelta(days=i)
+            report_date = datetime.now(tz=pytz.utc) + timedelta(days=i)
             # Skip creation of heartbeat at one day
             if i != skip_day:
                 heartbeat = Dummy.create_dummy_report(
-                    HeartBeat, device, date=report_day
+                    HeartBeat, device, date=report_date.date()
                 )
-            Dummy.create_dummy_report(Crashreport, device, date=report_day)
+            Dummy.create_dummy_report(Crashreport, device, date=report_date)
 
         # Get the device statistics
         response = self._get_with_params(
@@ -682,48 +688,6 @@
             last_active=heartbeat.date,
         )
 
-    @unittest.skip("Duplicate heartbeats are currently not dropped.")
-    def test_get_device_stats_multiple_days_duplicate_heartbeat(self):
-        """Test getting device stats for a device with duplicate heartbeat.
-
-        Duplicate heartbeats are dropped and thus should not influence the
-        statistics.
-        """
-        # Create a device with some heartbeats and reports over time
-        device = Dummy.create_dummy_device(Dummy.create_dummy_user())
-        num_days = 100
-        duplicate_day = round(num_days / 2)
-        first_report_day = Dummy.DEFAULT_DUMMY_HEARTBEAT_VALUES["date"]
-        for i in range(num_days):
-            report_day = first_report_day + timedelta(days=i)
-            heartbeat = Dummy.create_dummy_report(
-                HeartBeat, device, date=report_day
-            )
-            # Create a second at the duplicate day (with 1 hour delay)
-            if i == duplicate_day:
-                Dummy.create_dummy_report(
-                    HeartBeat, device, date=report_day + timedelta(hours=1)
-                )
-            Dummy.create_dummy_report(Crashreport, device, date=report_day)
-
-        # Get the device statistics
-        response = self._get_with_params(
-            self.device_overview_url, {"uuid": device.uuid}
-        )
-
-        # Assert that the statistics match
-        self._assert_device_stats_response_is(
-            response=response,
-            uuid=str(device.uuid),
-            board_date=device.board_date,
-            num_heartbeats=num_days,
-            num_crashreports=num_days,
-            num_smpls=0,
-            crashes_per_day=1,
-            smpl_per_day=0,
-            last_active=heartbeat.date,
-        )
-
     def test_get_device_report_history_no_reports(self):
         """Test getting report history stats for a device without reports."""
         # Create a device
@@ -741,15 +705,22 @@
         """Test getting report history stats for a device."""
         # Create a device with a heartbeat and one report of each type
         device = Dummy.create_dummy_device(Dummy.create_dummy_user())
-        heartbeat = Dummy.create_dummy_report(HeartBeat, device)
+        crashreport_date = Dummy.DEFAULT_DUMMY_CRASHREPORT_VALUES["date"]
+        heartbeat = Dummy.create_dummy_report(
+            HeartBeat, device, date=crashreport_date.date()
+        )
         for boot_reason in (
             Crashreport.SMPL_BOOT_REASONS
             + Crashreport.CRASH_BOOT_REASONS
             + ["other boot reason"]
         ):
             Dummy.create_dummy_report(
-                Crashreport, device, boot_reason=boot_reason
+                Crashreport,
+                device,
+                boot_reason=boot_reason,
+                date=crashreport_date,
             )
+            crashreport_date += timedelta(milliseconds=1)
 
         # Get the device report history statistics
         response = self._get_with_params(
@@ -759,7 +730,7 @@
         # Assert that the statistics match
         report_history = [
             {
-                "date": heartbeat.date.date(),
+                "date": heartbeat.date,
                 "heartbeats": 1,
                 "smpl": len(Crashreport.SMPL_BOOT_REASONS),
                 "prob_crashes": len(Crashreport.CRASH_BOOT_REASONS),
@@ -779,8 +750,10 @@
         for _ in range(10):
             report_date = report_date + timedelta(days=1)
 
-            Dummy.create_dummy_report(HeartBeat, device, date=report_date)
-            for boot_reason in (
+            Dummy.create_dummy_report(
+                HeartBeat, device, date=report_date.date()
+            )
+            for i, boot_reason in enumerate(
                 Crashreport.SMPL_BOOT_REASONS
                 + Crashreport.CRASH_BOOT_REASONS
                 + ["other boot reason"]
@@ -789,7 +762,7 @@
                     Crashreport,
                     device,
                     boot_reason=boot_reason,
-                    date=report_date,
+                    date=report_date + timedelta(milliseconds=i),
                 )
 
             # Create the expected report history object
@@ -831,14 +804,18 @@
         """Test getting update history stats for a device."""
         # Create a device with a heartbeat and one report of each type
         device = Dummy.create_dummy_device(Dummy.create_dummy_user())
-        heartbeat = Dummy.create_dummy_report(HeartBeat, device)
+        crashreport_date = Dummy.DEFAULT_DUMMY_CRASHREPORT_VALUES["date"]
+        heartbeat = Dummy.create_dummy_report(
+            HeartBeat, device, date=crashreport_date.date()
+        )
         for boot_reason in (
             Crashreport.SMPL_BOOT_REASONS
             + Crashreport.CRASH_BOOT_REASONS
             + ["other boot reason"]
         ):
-            params = {"boot_reason": boot_reason}
+            params = {"boot_reason": boot_reason, "date": crashreport_date}
             Dummy.create_dummy_report(Crashreport, device, **params)
+            crashreport_date += timedelta(milliseconds=1)
 
         # Get the device update history statistics
         response = self._get_with_params(
@@ -866,24 +843,24 @@
         device = Dummy.create_dummy_device(Dummy.create_dummy_user())
         expected_update_history = []
         for i, build_fingerprint in enumerate(Dummy.BUILD_FINGERPRINTS):
-            report_day = datetime.now(tz=pytz.utc) + timedelta(days=i)
+            report_date = datetime.now(tz=pytz.utc) + timedelta(days=i)
             Dummy.create_dummy_report(
                 HeartBeat,
                 device,
-                date=report_day,
+                date=report_date,
                 build_fingerprint=build_fingerprint,
             )
             Dummy.create_dummy_report(
                 Crashreport,
                 device,
-                date=report_day,
+                date=report_date,
                 build_fingerprint=build_fingerprint,
             )
 
             # Create the expected update history object
             expected_update_history.append(
                 {
-                    "update_date": report_day,
+                    "update_date": report_date.date(),
                     "build_fingerprint": build_fingerprint,
                     "max": device.id,
                     "prob_crashes": 1,
diff --git a/crashreport_stats/tests/test_stats_management_command.py b/crashreport_stats/tests/test_stats_management_command.py
index 136e2ef..6b2745e 100644
--- a/crashreport_stats/tests/test_stats_management_command.py
+++ b/crashreport_stats/tests/test_stats_management_command.py
@@ -1,5 +1,4 @@
 """Tests for the stats management command module."""
-
 from io import StringIO
 from datetime import datetime, timedelta
 import unittest
@@ -38,13 +37,14 @@
         self, report_type, unique_entry_name, device, number, **kwargs
     ):
         # Create reports with distinct timestamps
-        now = datetime.now(pytz.utc)
+        report_date = datetime.now(pytz.utc)
+        if report_type == HeartBeat:
+            report_date = report_date.date()
         for i in range(number):
-            report_date = now - timedelta(milliseconds=i)
             report_attributes = {
                 self.unique_entry_name: unique_entry_name,
                 "device": device,
-                "date": report_date,
+                "date": report_date - timedelta(days=i),
             }
             report_attributes.update(**kwargs)
             Dummy.create_dummy_report(report_type, **report_attributes)
@@ -86,12 +86,15 @@
         }
         version = self.version_class.objects.get(**get_params)
 
-        self.assertEqual(report.date.date(), version.first_seen_on)
+        report_date = (
+            report.date.date() if report_type == Crashreport else report.date
+        )
+        self.assertEqual(report_date, version.first_seen_on)
 
         # Create a new report from an earlier point in time
-        report_time_2 = report.date - timedelta(weeks=1)
+        report_date_2 = report.date - timedelta(weeks=1)
         Dummy.create_dummy_report(
-            report_type, device=device, date=report_time_2
+            report_type, device=device, date=report_date_2
         )
 
         # Run the command to update the database
@@ -101,7 +104,9 @@
         version = self.version_class.objects.get(**get_params)
 
         # Validate that the date matches the report recently sent
-        self.assertEqual(report_time_2.date(), version.first_seen_on)
+        if report_type == Crashreport:
+            report_date_2 = report_date_2.date()
+        self.assertEqual(report_date_2, version.first_seen_on)
 
     def test_older_heartbeat_updates_version_date(self):
         """Validate updating version date with older heartbeats."""
@@ -114,9 +119,9 @@
     def test_entries_are_unique(self):
         """Validate the entries' unicity and value."""
         # Create some reports
-        user = Dummy.create_dummy_user()
-        device = Dummy.create_dummy_device(user=user)
-        for unique_entry in self.unique_entries:
+        for unique_entry, username in zip(self.unique_entries, Dummy.USERNAMES):
+            user = Dummy.create_dummy_user(username=username)
+            device = Dummy.create_dummy_device(user=user)
             self._create_reports(HeartBeat, unique_entry, device, 10)
 
         # Run the command to update the database
@@ -142,9 +147,11 @@
                 "({} != {})".format(len(numbers), len(self.unique_entries))
             )
         # Create some reports
-        user = Dummy.create_dummy_user()
-        device = Dummy.create_dummy_device(user=user)
-        for unique_entry, num in zip(self.unique_entries, numbers):
+        for unique_entry, num, username in zip(
+            self.unique_entries, numbers, Dummy.USERNAMES
+        ):
+            user = Dummy.create_dummy_user(username=username)
+            device = Dummy.create_dummy_device(user=user)
             self._create_reports(
                 report_type, unique_entry, device, num, **kwargs
             )
@@ -334,20 +341,24 @@
     def _assert_updating_twice_gives_correct_counters(
         self, report_type, counter_attribute_name, **boot_reason_param
     ):
-        # Create a device and a corresponding reports for 2 different versions
-        device = Dummy.create_dummy_device(Dummy.create_dummy_user())
+        # Create a two devices and a corresponding reports for 2 different
+        # versions
+        device_1 = Dummy.create_dummy_device(Dummy.create_dummy_user())
         num_reports = 5
         self._create_reports(
             report_type,
             self.unique_entries[0],
-            device,
+            device_1,
             num_reports,
             **boot_reason_param
         )
+        device_2 = Dummy.create_dummy_device(
+            Dummy.create_dummy_user(username=Dummy.USERNAMES[1])
+        )
         self._create_reports(
             report_type,
             self.unique_entries[1],
-            device,
+            device_2,
             num_reports,
             **boot_reason_param
         )
@@ -372,7 +383,7 @@
         # Create another report for the first version
         report_new_attributes = {
             self.unique_entry_name: self.unique_entries[0],
-            "device": device,
+            "device": device_1,
             **boot_reason_param,
         }
         Dummy.create_dummy_report(report_type, **report_new_attributes)
@@ -520,74 +531,6 @@
             Crashreport, counter_attribute_name, **params
         )
 
-    def _assert_duplicates_are_ignored(
-        self, report_type, device, counter_attribute_name, **kwargs
-    ):
-        """Validate that reports with duplicate timestamps are ignored."""
-        # Create a report
-        report = Dummy.create_dummy_report(report_type, device=device, **kwargs)
-
-        # Create a second report with the same timestamp
-        Dummy.create_dummy_report(
-            report_type, device=device, date=report.date, **kwargs
-        )
-
-        # Run the command to update the database
-        call_command("stats", "update")
-
-        # Get the corresponding version instance from the database
-        get_params = {
-            self.unique_entry_name: getattr(report, self.unique_entry_name)
-        }
-        version = self.version_class.objects.get(**get_params)
-
-        # Assert that the report with the duplicate timestamp is not
-        # counted, i.e. only 1 report is counted.
-        self.assertEqual(getattr(version, counter_attribute_name), 1)
-
-    def test_heartbeat_duplicates_are_ignored(self):
-        """Validate that heartbeat duplicates are ignored."""
-        counter_attribute_name = "heartbeats"
-        device = Dummy.create_dummy_device(user=Dummy.create_dummy_user())
-        self._assert_duplicates_are_ignored(
-            HeartBeat, device, counter_attribute_name
-        )
-
-    def test_crash_report_duplicates_are_ignored(self):
-        """Validate that crash report duplicates are ignored."""
-        counter_attribute_name = "prob_crashes"
-        device = Dummy.create_dummy_device(user=Dummy.create_dummy_user())
-        for i, boot_reason in enumerate(Crashreport.CRASH_BOOT_REASONS):
-            params = {
-                "boot_reason": boot_reason,
-                self.unique_entry_name: self.unique_entries[i],
-            }
-            self._assert_duplicates_are_ignored(
-                Crashreport, device, counter_attribute_name, **params
-            )
-
-    def test_smpl_report_duplicates_are_ignored(self):
-        """Validate that smpl report duplicates are ignored."""
-        counter_attribute_name = "smpl"
-        device = Dummy.create_dummy_device(user=Dummy.create_dummy_user())
-        for i, boot_reason in enumerate(Crashreport.SMPL_BOOT_REASONS):
-            params = {
-                "boot_reason": boot_reason,
-                self.unique_entry_name: self.unique_entries[i],
-            }
-            self._assert_duplicates_are_ignored(
-                Crashreport, device, counter_attribute_name, **params
-            )
-
-    def test_other_report_duplicates_are_ignored(self):
-        """Validate that other report duplicates are ignored."""
-        counter_attribute_name = "other"
-        params = {"boot_reason": "random boot reason"}
-        device = Dummy.create_dummy_device(user=Dummy.create_dummy_user())
-        self._assert_duplicates_are_ignored(
-            Crashreport, device, counter_attribute_name, **params
-        )
-
     def _assert_older_reports_update_released_on_date(
         self, report_type, **kwargs
     ):
@@ -609,7 +552,10 @@
         )
 
         # Assert that the released_on date matches the first report date
-        self.assertEqual(version.released_on, report.date.date())
+        report_date = (
+            report.date.date() if report_type == Crashreport else report.date
+        )
+        self.assertEqual(version.released_on, report_date)
 
         # Create a second report with the a timestamp earlier in time
         report_2_date = report.date - timedelta(days=1)
@@ -626,7 +572,9 @@
         )
 
         # Assert that the released_on date matches the older report date
-        self.assertEqual(version.released_on, report_2_date.date())
+        if report_type == Crashreport:
+            report_2_date = report_2_date.date()
+        self.assertEqual(version.released_on, report_2_date)
 
     def _assert_newer_reports_do_not_update_released_on_date(
         self, report_type, **kwargs
@@ -639,7 +587,9 @@
         # Create a report
         device = Dummy.create_dummy_device(user=Dummy.create_dummy_user())
         report = Dummy.create_dummy_report(report_type, device=device, **kwargs)
-        report_1_date = report.date.date()
+        report_1_date = (
+            report.date.date() if report_type == Crashreport else report.date
+        )
 
         # Run the command to update the database
         call_command("stats", "update")
@@ -706,7 +656,10 @@
         )
 
         # Assert that the released_on date matches the first report date
-        self.assertEqual(version.released_on, report.date.date())
+        report_date = (
+            report.date.date() if report_type == Crashreport else report.date
+        )
+        self.assertEqual(version.released_on, report_date)
 
         # Create a second report with a timestamp earlier in time
         report_2_date = report.date - timedelta(days=1)
@@ -715,7 +668,7 @@
         )
 
         # Manually change the released_on date
-        version_release_date = report.date + timedelta(days=1)
+        version_release_date = report_date + timedelta(days=1)
         version.released_on = version_release_date
         version.save()
 
@@ -729,7 +682,7 @@
 
         # Assert that the released_on date still matches the date is was
         # manually changed to
-        self.assertEqual(version.released_on, version_release_date.date())
+        self.assertEqual(version.released_on, version_release_date)
 
     def test_manually_changed_released_on_date_is_not_updated_by_heartbeat(
         self
diff --git a/crashreports/migrations/0006_add_unique_constraints_and_drop_duplicates.py b/crashreports/migrations/0006_add_unique_constraints_and_drop_duplicates.py
new file mode 100644
index 0000000..5fa6a6f
--- /dev/null
+++ b/crashreports/migrations/0006_add_unique_constraints_and_drop_duplicates.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+"""Migrations to set the unique constraints and drop duplicates."""
+# pylint: disable=invalid-name
+import logging
+
+from django.db import migrations, models, connection
+from django.db.models import Count, Min
+
+from crashreports.models import HeartBeat, Crashreport
+
+LOGGER = logging.getLogger(__name__)
+
+
+def drop_heartbeat_duplicates(apps, schema_editor):
+    """Drop duplicate heartbeat entries."""
+    # pylint: disable=unused-argument
+    find_and_drop_duplicates(HeartBeat)
+
+
+def drop_crashreport_duplicates(apps, schema_editor):
+    """Drop duplicate crashreport entries."""
+    # pylint: disable=unused-argument
+    find_and_drop_duplicates(Crashreport)
+
+
+def find_and_drop_duplicates(object_type):
+    """Drop all duplicates of the given object type."""
+    unique_fields = ("device", "date")
+    duplicates = (
+        object_type.objects.values(*unique_fields)
+        .order_by()
+        .annotate(min_id=Min("id"), num_duplicates=Count("id"))
+        .filter(num_duplicates__gt=1)
+    )
+
+    LOGGER.info(
+        "Found %d %s instances that have duplicates. These will be removed.",
+        duplicates.count(),
+        object_type.__name__,
+    )
+    for duplicate in duplicates:
+        LOGGER.debug("Removing duplicates: %s", duplicate)
+        (
+            object_type.objects.filter(
+                device=duplicate["device"], date=duplicate["date"]
+            )
+            .exclude(id=duplicate["min_id"])
+            .delete()
+        )
+
+    # Manually commit the data migration before schema migrations are applied
+    connection.cursor().execute("COMMIT;")
+
+
+class Migration(migrations.Migration):
+    """Change heartbeat date field, set unique constraints, drop duplicates."""
+
+    dependencies = [("crashreports", "0005_add_fp_staff_group")]
+
+    operations = [
+        migrations.AlterField(
+            model_name="heartbeat",
+            name="date",
+            field=models.DateField(db_index=True),
+        ),
+        migrations.RunPython(
+            drop_heartbeat_duplicates, reverse_code=migrations.RunPython.noop
+        ),
+        migrations.RunPython(
+            drop_crashreport_duplicates, reverse_code=migrations.RunPython.noop
+        ),
+        migrations.AlterUniqueTogether(
+            name="crashreport", unique_together=set([("device", "date")])
+        ),
+        migrations.AlterUniqueTogether(
+            name="heartbeat", unique_together=set([("device", "date")])
+        ),
+    ]
diff --git a/crashreports/models.py b/crashreports/models.py
index 25aa452..f76c894 100644
--- a/crashreports/models.py
+++ b/crashreports/models.py
@@ -1,14 +1,17 @@
 # -*- coding: utf-8 -*-
 """Models for devices, heartbeats, crashreports and log files."""
-
+import logging
 import os
 import uuid
 
-from django.db import models, transaction
+from django.db import models, transaction, IntegrityError
 from django.contrib.auth.models import User
 from django.dispatch import receiver
+from django.forms import model_to_dict
 from taggit.managers import TaggableManager
 
+LOGGER = logging.getLogger(__name__)
+
 
 class Device(models.Model):
     """A device representing a phone that has been registered on Hiccup."""
@@ -109,6 +112,9 @@
     next_logfile_key = models.PositiveIntegerField(default=1)
     created_at = models.DateTimeField(auto_now_add=True)
 
+    class Meta:  # noqa: D106
+        unique_together = ("device", "date")
+
     @transaction.atomic
     def get_logfile_key(self):
         """Get the next key for a log file and update the ID-counter."""
@@ -125,11 +131,21 @@
         update_fields=None,
     ):
         """Save the crashreport and set its local ID if it was not set."""
-        if not self.device_local_id:
-            self.device_local_id = self.device.get_crashreport_key()
-        super(Crashreport, self).save(
-            force_insert, force_update, using, update_fields
-        )
+        try:
+            with transaction.atomic():
+                if not self.device_local_id:
+                    self.device_local_id = self.device.get_crashreport_key()
+                super(Crashreport, self).save(
+                    force_insert, force_update, using, update_fields
+                )
+        except IntegrityError:
+            # If there is a duplicate entry, log its values and return
+            # without throwing an exception to keep idempotency of the
+            # interface.
+            LOGGER.debug(
+                "Duplicate Crashreport received and dropped: %s",
+                model_to_dict(self),
+            )
 
     def _get_uuid(self):
         """Return the device UUID."""
@@ -187,10 +203,13 @@
     uptime = models.CharField(max_length=200)
     build_fingerprint = models.CharField(db_index=True, max_length=200)
     radio_version = models.CharField(db_index=True, max_length=200, null=True)
-    date = models.DateTimeField(db_index=True)
+    date = models.DateField(db_index=True)
     device_local_id = models.PositiveIntegerField(blank=True)
     created_at = models.DateTimeField(auto_now_add=True)
 
+    class Meta:  # noqa: D106
+        unique_together = ("device", "date")
+
     def save(
         self,
         force_insert=False,
@@ -199,11 +218,21 @@
         update_fields=None,
     ):
         """Save the heartbeat and set its local ID if it was not set."""
-        if not self.device_local_id:
-            self.device_local_id = self.device.get_heartbeat_key()
-        super(HeartBeat, self).save(
-            force_insert, force_update, using, update_fields
-        )
+        try:
+            with transaction.atomic():
+                if not self.device_local_id:
+                    self.device_local_id = self.device.get_heartbeat_key()
+                super(HeartBeat, self).save(
+                    force_insert, force_update, using, update_fields
+                )
+        except IntegrityError:
+            # If there is a duplicate entry, log its values and return
+            # without throwing an exception to keep idempotency of the
+            # interface.
+            LOGGER.debug(
+                "Duplicate HeartBeat received and dropped: %s",
+                model_to_dict(self),
+            )
 
     def _get_uuid(self):
         """Return the device UUID."""
diff --git a/crashreports/serializers.py b/crashreports/serializers.py
index 1125f62..1dd52ea 100644
--- a/crashreports/serializers.py
+++ b/crashreports/serializers.py
@@ -1,6 +1,7 @@
 """Serializers for Crashreport-related models."""
 from django.utils import timezone
 from django.core.exceptions import ObjectDoesNotExist
+from django.utils.dateparse import parse_datetime
 from rest_framework import serializers
 from rest_framework.exceptions import NotFound
 from rest_framework import permissions
@@ -77,7 +78,7 @@
     uuid = serializers.CharField(max_length=64)
     id = PrivateField()
     device_local_id = serializers.IntegerField(required=False)
-    date = serializers.DateTimeField(default_timezone=timezone.utc)
+    date = serializers.DateField()
 
     class Meta:  # noqa: D106
         model = HeartBeat
@@ -102,6 +103,22 @@
         heartbeat.save()
         return heartbeat
 
+    def to_internal_value(self, data):
+        """Parse serialized heartbeat representations.
+
+        Incoming 'date' values that are datetime values (including time) are
+        changed so that only the date part of the value is deserialized.
+        Initially, the date was a datetime field and Hiccup clients can still
+        send datetime values.
+        """
+        datetime = parse_datetime(data["date"])
+        if datetime:
+            updated_data = data.copy()
+            updated_data["date"] = datetime.date().isoformat()
+            data = updated_data
+
+        return super(HeartBeatSerializer, self).to_internal_value(data)
+
 
 class LogFileSerializer(serializers.ModelSerializer):
     """Serializer for LogFile instances."""
diff --git a/crashreports/tests/test_migrations.py b/crashreports/tests/test_migrations.py
new file mode 100644
index 0000000..e5faffb
--- /dev/null
+++ b/crashreports/tests/test_migrations.py
@@ -0,0 +1,149 @@
+"""Tests for the Django database migrations."""
+import logging
+import os
+import tempfile
+from datetime import datetime, date
+
+import pytz
+from django.test import TransactionTestCase, override_settings
+from django.db.migrations.executor import MigrationExecutor
+from django.db import connection
+
+from crashreports.models import Crashreport, HeartBeat, LogFile
+from crashreports.tests.utils import Dummy
+
+
+class MigrationTestCase(TransactionTestCase):
+    """Test for Django database migrations."""
+
+    # Make data from migrations available in the test cases
+    serialized_rollback = True
+
+    # These must be defined by subclasses.
+    migrate_from = None
+    migrate_to = None
+
+    def setUp(self):
+        """Set up the database up to the state of the first migration."""
+        super(MigrationTestCase, self).setUp()
+
+        self.executor = MigrationExecutor(connection)
+        self.executor.migrate(self.migrate_from)
+
+    def migrate_to_dest(self):
+        """Migrate the database to the desired destination migration."""
+        self.executor.loader.build_graph()
+        self.executor.migrate(self.migrate_to)
+
+
+@override_settings(MEDIA_ROOT=tempfile.mkdtemp(".hiccup-tests"))
+class DropDuplicatesMigrationTestCase(MigrationTestCase):
+    """Test the migration for dropping duplicate heartbeats and crashreports."""
+
+    migrate_from = [("crashreports", "0005_add_fp_staff_group")]
+    migrate_to = [
+        ("crashreports", "0006_add_unique_constraints_and_drop_duplicates")
+    ]
+
+    def test_duplicate_heartbeats_are_deleted(self):
+        """Test that duplicate heartbeats are deleted after migrating."""
+        self._assert_duplicates_are_deleted(HeartBeat)
+
+    def test_duplicate_crashreports_are_deleted(self):
+        """Test that duplicate crashreports are deleted after migrating."""
+        self._assert_duplicates_are_deleted(Crashreport)
+
+    def _assert_duplicates_are_deleted(self, object_type):
+        # Create a user, device and two duplicate reports
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        report_1 = Dummy.create_dummy_report(object_type, device)
+        Dummy.create_dummy_report(object_type, device)
+
+        # Assert that 2 instances have been created
+        self.assertEqual(object_type.objects.count(), 2)
+
+        # Run the migration
+        logger = logging.getLogger("crashreports")
+        with self.assertLogs(logger, "DEBUG") as logging_watcher:
+            self.migrate_to_dest()
+
+        # Assert the correct message is logged
+        self.assertTrue(
+            {
+                "INFO:crashreports.migrations."
+                "0006_add_unique_constraints_and_drop_duplicates:"
+                "Found 1 {} instances that have duplicates. "
+                "These will be removed.".format(object_type.__name__),
+                "DEBUG:crashreports.migrations"
+                ".0006_add_unique_constraints_and_drop_duplicates:Removing "
+                "duplicates: {}".format(
+                    str(
+                        {
+                            "device": device.id,
+                            "date": report_1.date,
+                            "min_id": report_1.id,
+                            "num_duplicates": 2,
+                        }
+                    )
+                ),
+            }.issubset(set(logging_watcher.output))
+        )
+
+        # Assert that only one instance is left in the database
+        self.assertEqual(object_type.objects.count(), 1)
+
+    def test_delete_duplicate_crashreport_with_logfile(self):
+        """Test deletion of a duplicate crashreport with logfile."""
+        # Create a user, device and two duplicate reports with logfiles
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        crashreport_1 = Dummy.create_dummy_report(Crashreport, device)
+        crashreport_2 = Dummy.create_dummy_report(Crashreport, device)
+        _, logfile_1_path = Dummy.create_dummy_log_file_with_actual_file(
+            crashreport_1
+        )
+        _, logfile_2_path = Dummy.create_dummy_log_file_with_actual_file(
+            crashreport_2, logfile=Dummy.DEFAULT_DUMMY_LOG_FILE_PATHS[1]
+        )
+
+        # Assert that 2 crashreports and logfiles have been created
+        self.assertEqual(Crashreport.objects.count(), 2)
+        self.assertEqual(LogFile.objects.count(), 2)
+        self.assertTrue(os.path.isfile(logfile_1_path))
+        self.assertTrue(os.path.isfile(logfile_2_path))
+
+        # Run the migration
+        self.migrate_to_dest()
+
+        # Assert that only one crashreport and one logfile is left in the
+        # database
+        self.assertEqual(Crashreport.objects.count(), 1)
+        self.assertEqual(Crashreport.objects.first().logfiles.count(), 1)
+        self.assertEqual(LogFile.objects.count(), 1)
+
+        # Assert that the correct log file has been deleted
+        self.assertTrue(os.path.isfile(logfile_1_path))
+        self.assertFalse(os.path.isfile(logfile_2_path))
+
+    def test_change_of_date_field_type(self):
+        """Test that the 'date' field of heartbeats is changed to a date."""
+        # Create a user, device and a heartbeat
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        heartbeat_timestamp = datetime(2015, 12, 15, 1, 23, 45, tzinfo=pytz.utc)
+
+        heartbeat = Dummy.create_dummy_report(
+            HeartBeat, device, date=heartbeat_timestamp
+        )
+
+        # Assert that the date is of type datetime
+        self.assertIsInstance(heartbeat.date, datetime)
+
+        # Run the migration
+        self.migrate_to_dest()
+
+        # Assert that the date is now of type date and has the correct value
+        heartbeat = HeartBeat.objects.first()
+        self.assertIsInstance(heartbeat.date, date)
+        self.assertEqual(heartbeat.date, heartbeat_timestamp.date())
diff --git a/crashreports/tests/test_models.py b/crashreports/tests/test_models.py
new file mode 100644
index 0000000..d12189d
--- /dev/null
+++ b/crashreports/tests/test_models.py
@@ -0,0 +1,42 @@
+"""Tests for the crashreports models."""
+import logging
+
+from django.forms import model_to_dict
+from django.test import TestCase
+
+from crashreports.models import HeartBeat, Crashreport
+from crashreports.tests.utils import Dummy
+
+
+class DuplicatesTestCase(TestCase):
+    """Test cases for the uniqueness for model instances."""
+
+    def test_creation_of_duplicate_heartbeats(self):
+        """Test creation of duplicate heartbeats."""
+        self._assert_duplicate_entries_can_not_be_created(HeartBeat)
+
+    def test_creation_of_duplicate_crashreports(self):
+        """Test creation of duplicate crashreports."""
+        self._assert_duplicate_entries_can_not_be_created(Crashreport)
+
+    def _assert_duplicate_entries_can_not_be_created(self, object_type):
+        # Create a user, device and a report
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        Dummy.create_dummy_report(object_type, device)
+
+        # Assert creating a duplicate report fails
+        logger = logging.getLogger("crashreports")
+        with self.assertLogs(logger, "DEBUG") as logging_watcher:
+            report = Dummy.create_dummy_report(object_type, device)
+        self.assertEqual(
+            logging_watcher.output,
+            [
+                "DEBUG:crashreports.models:"
+                "Duplicate {} received and dropped: {}".format(
+                    object_type.__name__, str(model_to_dict(report))
+                )
+            ],
+        )
+
+        self.assertEqual(object_type.objects.count(), 1)
diff --git a/crashreports/tests/test_rest_api_crashreports.py b/crashreports/tests/test_rest_api_crashreports.py
index 7165f9b..dbfaccc 100644
--- a/crashreports/tests/test_rest_api_crashreports.py
+++ b/crashreports/tests/test_rest_api_crashreports.py
@@ -1,4 +1,6 @@
 """Tests for the crashreports REST API."""
+from django.urls import reverse
+from rest_framework import status
 
 from crashreports.tests.utils import Dummy
 from crashreports.tests.test_rest_api_heartbeats import HeartbeatsTestCase
@@ -17,3 +19,31 @@
     @staticmethod
     def _create_dummy_data(**kwargs):
         return Dummy.crashreport_data(**kwargs)
+
+    @staticmethod
+    def _create_alternative_dummy_data(**kwargs):
+        return Dummy.alternative_crashreport_data(**kwargs)
+
+    def test_create_duplicate(self):
+        """Test creation of a duplicate crashreport."""
+        # Create a first crashreport
+        report_data = self._create_dummy_data(uuid=self.uuid)
+        response_first = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_first.status_code, status.HTTP_201_CREATED)
+
+        # Create a second crashreport for the same day and the same time
+        response_second = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_second.status_code, status.HTTP_201_CREATED)
+
+        # Assert that only one crashreport instance was created
+        url = reverse(self.LIST_CREATE_BY_UUID_URL, args=[self.uuid])
+        response = self.fp_staff_client.get(url)
+        self.assertEqual(len(response.data["results"]), 1)
+
+    def test_create_with_datetime(self):
+        """Override to just pass because crashreports always use datetime."""
+        pass
diff --git a/crashreports/tests/test_rest_api_heartbeats.py b/crashreports/tests/test_rest_api_heartbeats.py
index cd1ccec..9ae6b29 100644
--- a/crashreports/tests/test_rest_api_heartbeats.py
+++ b/crashreports/tests/test_rest_api_heartbeats.py
@@ -1,5 +1,7 @@
 """Tests for the heartbeats REST API."""
+from datetime import timedelta, datetime
 
+import pytz
 from django.urls import reverse
 
 from rest_framework import status
@@ -11,6 +13,8 @@
 class HeartbeatsTestCase(HiccupCrashreportsAPITestCase):
     """Test cases for heartbeats."""
 
+    # pylint: disable=too-many-public-methods
+
     LIST_CREATE_URL = "api_v1_heartbeats"
     RETRIEVE_URL = "api_v1_heartbeat"
     LIST_CREATE_BY_UUID_URL = "api_v1_heartbeats_by_uuid"
@@ -20,11 +24,34 @@
     def _create_dummy_data(**kwargs):
         return Dummy.heartbeat_data(**kwargs)
 
+    @staticmethod
+    def _create_alternative_dummy_data(**kwargs):
+        return Dummy.alternative_heartbeat_data(**kwargs)
+
     def _post_multiple(self, client, data, count):
-        return [
-            client.post(reverse(self.LIST_CREATE_URL), data)
-            for _ in range(count)
-        ]
+        """Send multiple POST requests to create reports.
+
+        Note that the date of the data will be adapted for each POST request
+        so that no duplicate reports are being created. However, the given
+        `data` parameter value will not be modified.
+
+        Args:
+            client: The client used for sending the requests
+            data: The data that is sent each request
+            count: The number of requests that should be made
+
+        Returns: A list of HTTP response objects
+
+        """
+        results = []
+        data_to_send = data.copy()
+        for i in range(count):
+            data_to_send["date"] += timedelta(days=i)
+            results.append(
+                client.post(reverse(self.LIST_CREATE_URL), data_to_send)
+            )
+
+        return results
 
     def _retrieve_single(self, user):
         count = 5
@@ -221,3 +248,68 @@
         data["date"] = "2017-10-29 02:34:56"
         response = self.user.post(reverse(self.LIST_CREATE_URL), data)
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
+    def test_create_duplicate(self):
+        """Test creation of a duplicate Heartbeat."""
+        # Create a first Heartbeat
+        report_data = self._create_dummy_data(uuid=self.uuid)
+        response_first = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_first.status_code, status.HTTP_201_CREATED)
+
+        # Create a second heartbeat for the same day
+        response_second = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_second.status_code, status.HTTP_201_CREATED)
+
+        # Assert that only one heartbeat instance was created
+        url = reverse(self.LIST_CREATE_BY_UUID_URL, args=[self.uuid])
+        response = self.fp_staff_client.get(url)
+        self.assertEqual(len(response.data["results"]), 1)
+
+    def test_create_duplicate_different_values(self):
+        """Test creation of a duplicate Heartbeat with different values."""
+        # Create a first Heartbeat
+        report_1_data = self._create_dummy_data(uuid=self.uuid)
+        response_first = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_1_data
+        )
+        self.assertEqual(response_first.status_code, status.HTTP_201_CREATED)
+
+        # Create a second heartbeat for the same day with all different
+        # values except for the date and UUID
+        report_2_data = self._create_alternative_dummy_data(uuid=self.uuid)
+        response_second = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_2_data
+        )
+        self.assertEqual(response_second.status_code, status.HTTP_201_CREATED)
+
+        # Assert that only one heartbeat instance was created
+        url = reverse(self.LIST_CREATE_BY_UUID_URL, args=[self.uuid])
+        response = self.fp_staff_client.get(url)
+        self.assertEqual(len(response.data["results"]), 1)
+
+        # Assert that the values are all the same as of the first heartbeat, as
+        # we are dropping all incoming duplicates (we need to ignore the `id`
+        # because its value is set to -1 in the response for creating reports)
+        self.assertTrue(
+            {k: v for k, v in response.data["results"][0].items() if k != "id"}
+            == {k: v for k, v in response_first.data.items() if k != "id"}
+        )
+
+    def test_create_with_datetime(self):
+        """Test creation of heartbeats with datetime instead of date value.
+
+        Initially, the 'date' field of the HeartBeat model was a datetime
+        field but now has been changed to a date field. However, Hiccup clients
+        are still sending datetime values which also need to be accepted and
+        processed by the server.
+        """
+        data = self._create_dummy_data(uuid=self.uuid)
+        data["date"] = datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc)
+
+        response = self.user.post(reverse(self.LIST_CREATE_URL), data)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+        self.assertEqual(response.data["date"], str(data["date"].date()))
diff --git a/crashreports/tests/test_rest_api_logfiles.py b/crashreports/tests/test_rest_api_logfiles.py
index 9b8b6b2..57482bf 100644
--- a/crashreports/tests/test_rest_api_logfiles.py
+++ b/crashreports/tests/test_rest_api_logfiles.py
@@ -70,7 +70,7 @@
         device_local_id = self._upload_crashreport(user, uuid)
 
         # Upload a logfile for the crashreport
-        logfile = open(Dummy.DEFAULT_DUMMY_LOG_FILE_PATH, "rb")
+        logfile = open(Dummy.DEFAULT_DUMMY_LOG_FILE_PATHS[0], "rb")
 
         logfile_name = os.path.basename(logfile.name)
         response = user.post(
@@ -97,7 +97,7 @@
         # bytes. However, we mainly care that the contents are equal:
         self._assert_zip_file_contents_equal(
             default_storage.path(uploaded_logfile_path),
-            Dummy.DEFAULT_DUMMY_LOG_FILE_PATH,
+            Dummy.DEFAULT_DUMMY_LOG_FILE_PATHS[0],
         )
 
     def test_logfile_upload_as_user(self):
diff --git a/crashreports/tests/utils.py b/crashreports/tests/utils.py
index fcaa2a3..73a479b 100644
--- a/crashreports/tests/utils.py
+++ b/crashreports/tests/utils.py
@@ -73,7 +73,7 @@
     ]
     UUIDs = ["e1c0cc95-ab8d-461a-a768-cb8d9d7fdb04"]
 
-    USERNAMES = ["testuser1", "testuser2", "testuser3"]
+    USERNAMES = ["testuser1", "testuser2", "testuser3", "testuser4"]
 
     DATES = [date(2018, 3, 19), date(2018, 3, 26), date(2018, 5, 1)]
 
@@ -97,16 +97,39 @@
         ),
         "build_fingerprint": BUILD_FINGERPRINTS[0],
         "radio_version": RADIO_VERSIONS[0],
-        "date": datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc),
+        "date": date(2018, 3, 19),
+    }
+
+    ALTERNATIVE_HEARTBEAT_VALUES = {
+        "app_version": 10101,
+        "uptime": (
+            "up time: 2 days, 12:39:13, idle time: 2 days, 11:35:01, "
+            "sleep time: 2 days, 11:56:12"
+        ),
+        "build_fingerprint": BUILD_FINGERPRINTS[1],
+        "radio_version": RADIO_VERSIONS[1],
+        "date": date(2018, 3, 19),
     }
 
     DEFAULT_DUMMY_CRASHREPORT_VALUES = DEFAULT_DUMMY_HEARTBEAT_VALUES.copy()
     DEFAULT_DUMMY_CRASHREPORT_VALUES.update(
         {
-            "is_fake_report": 0,
+            "is_fake_report": False,
             "boot_reason": Crashreport.BOOT_REASON_UNKOWN,
             "power_on_reason": "it was powered on",
             "power_off_reason": "something happened and it went off",
+            "date": datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc),
+        }
+    )
+
+    ALTERNATIVE_CRASHREPORT_VALUES = ALTERNATIVE_HEARTBEAT_VALUES.copy()
+    ALTERNATIVE_CRASHREPORT_VALUES.update(
+        {
+            "is_fake_report": True,
+            "boot_reason": Crashreport.BOOT_REASON_KEYBOARD_POWER_ON,
+            "power_on_reason": "alternative power on reason",
+            "power_off_reason": "alternative power off reason",
+            "date": datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc),
         }
     )
 
@@ -118,18 +141,28 @@
         "other": "whatever",
     }
 
-    DEFAULT_DUMMY_LOG_FILE_FILENAME = "test_logfile.zip"
+    DEFAULT_DUMMY_LOG_FILE_FILENAMES = [
+        "test_logfile_1.zip",
+        "test_logfile_2.zip",
+    ]
     DEFAULT_DUMMY_LOG_FILE_DIRECTORY = os.path.join("resources", "test")
 
-    DEFAULT_DUMMY_LOG_FILE_PATH = os.path.join(
-        DEFAULT_DUMMY_LOG_FILE_DIRECTORY, DEFAULT_DUMMY_LOG_FILE_FILENAME
-    )
-
     DEFAULT_DUMMY_LOG_FILE_VALUES = {
         "logfile_type": "last_kmsg",
-        "logfile": DEFAULT_DUMMY_LOG_FILE_FILENAME,
+        "logfile": DEFAULT_DUMMY_LOG_FILE_FILENAMES[0],
     }
 
+    DEFAULT_DUMMY_LOG_FILE_PATHS = [
+        os.path.join(
+            DEFAULT_DUMMY_LOG_FILE_DIRECTORY,
+            DEFAULT_DUMMY_LOG_FILE_FILENAMES[0],
+        ),
+        os.path.join(
+            DEFAULT_DUMMY_LOG_FILE_DIRECTORY,
+            DEFAULT_DUMMY_LOG_FILE_FILENAMES[1],
+        ),
+    ]
+
     @staticmethod
     def _update_copy(original, update):
         """Merge fields of update into a copy of original."""
@@ -158,6 +191,15 @@
         return Dummy._update_copy(Dummy.DEFAULT_DUMMY_HEARTBEAT_VALUES, kwargs)
 
     @staticmethod
+    def alternative_heartbeat_data(**kwargs):
+        """Return the alternative data required to create a heartbeat.
+
+        Use the values passed as keyword arguments or default to the ones
+        from `Dummy.ALTERNATIVE_HEARTBEAT_VALUES`.
+        """
+        return Dummy._update_copy(Dummy.ALTERNATIVE_HEARTBEAT_VALUES, kwargs)
+
+    @staticmethod
     def crashreport_data(report_type: Optional[str] = None, **kwargs):
         """Return the data required to create a crashreport.
 
@@ -182,6 +224,15 @@
         return data
 
     @staticmethod
+    def alternative_crashreport_data(**kwargs):
+        """Return the alternative data required to create a crashreport.
+
+        Use the values passed as keyword arguments or default to the ones
+        from `Dummy.ALTERNATIVE_CRASHREPORT_VALUES`.
+        """
+        return Dummy._update_copy(Dummy.ALTERNATIVE_CRASHREPORT_VALUES, kwargs)
+
+    @staticmethod
     def create_dummy_user(**kwargs):
         """Create a dummy user instance.
 
diff --git a/resources/test/test_logfile.zip b/resources/test/test_logfile_1.zip
similarity index 100%
rename from resources/test/test_logfile.zip
rename to resources/test/test_logfile_1.zip
Binary files differ
diff --git a/resources/test/test_logfile_2.zip b/resources/test/test_logfile_2.zip
new file mode 100644
index 0000000..90bf7d0
--- /dev/null
+++ b/resources/test/test_logfile_2.zip
Binary files differ