Disallow duplicate heartbeats and crashreports

Add unique constraints and corresponding schema and data migration.
Adapt all test cases so that only unique heartbeats and crashreports
are sent. Delete test cases that are inappropriate as no duplicate
entries can exist in the database anymore.

Issue: HIC-180
Change-Id: I768d1610d4482c9d61b76cdbc588334198bfe415
diff --git a/crashreports/migrations/0006_add_unique_constraints_and_drop_duplicates.py b/crashreports/migrations/0006_add_unique_constraints_and_drop_duplicates.py
new file mode 100644
index 0000000..5fa6a6f
--- /dev/null
+++ b/crashreports/migrations/0006_add_unique_constraints_and_drop_duplicates.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+"""Migrations to set the unique constraints and drop duplicates."""
+# pylint: disable=invalid-name
+import logging
+
+from django.db import migrations, models, connection
+from django.db.models import Count, Min
+
+from crashreports.models import HeartBeat, Crashreport
+
+LOGGER = logging.getLogger(__name__)
+
+
+def drop_heartbeat_duplicates(apps, schema_editor):
+    """Drop duplicate heartbeat entries."""
+    # pylint: disable=unused-argument
+    find_and_drop_duplicates(HeartBeat)
+
+
+def drop_crashreport_duplicates(apps, schema_editor):
+    """Drop duplicate crashreport entries."""
+    # pylint: disable=unused-argument
+    find_and_drop_duplicates(Crashreport)
+
+
+def find_and_drop_duplicates(object_type):
+    """Drop all duplicates of the given object type."""
+    unique_fields = ("device", "date")
+    duplicates = (
+        object_type.objects.values(*unique_fields)
+        .order_by()
+        .annotate(min_id=Min("id"), num_duplicates=Count("id"))
+        .filter(num_duplicates__gt=1)
+    )
+
+    LOGGER.info(
+        "Found %d %s instances that have duplicates. These will be removed.",
+        duplicates.count(),
+        object_type.__name__,
+    )
+    for duplicate in duplicates:
+        LOGGER.debug("Removing duplicates: %s", duplicate)
+        (
+            object_type.objects.filter(
+                device=duplicate["device"], date=duplicate["date"]
+            )
+            .exclude(id=duplicate["min_id"])
+            .delete()
+        )
+
+    # Manually commit the data migration before schema migrations are applied
+    connection.cursor().execute("COMMIT;")
+
+
+class Migration(migrations.Migration):
+    """Change heartbeat date field, set unique constraints, drop duplicates."""
+
+    dependencies = [("crashreports", "0005_add_fp_staff_group")]
+
+    operations = [
+        migrations.AlterField(
+            model_name="heartbeat",
+            name="date",
+            field=models.DateField(db_index=True),
+        ),
+        migrations.RunPython(
+            drop_heartbeat_duplicates, reverse_code=migrations.RunPython.noop
+        ),
+        migrations.RunPython(
+            drop_crashreport_duplicates, reverse_code=migrations.RunPython.noop
+        ),
+        migrations.AlterUniqueTogether(
+            name="crashreport", unique_together=set([("device", "date")])
+        ),
+        migrations.AlterUniqueTogether(
+            name="heartbeat", unique_together=set([("device", "date")])
+        ),
+    ]
diff --git a/crashreports/models.py b/crashreports/models.py
index 25aa452..f76c894 100644
--- a/crashreports/models.py
+++ b/crashreports/models.py
@@ -1,14 +1,17 @@
 # -*- coding: utf-8 -*-
 """Models for devices, heartbeats, crashreports and log files."""
-
+import logging
 import os
 import uuid
 
-from django.db import models, transaction
+from django.db import models, transaction, IntegrityError
 from django.contrib.auth.models import User
 from django.dispatch import receiver
+from django.forms import model_to_dict
 from taggit.managers import TaggableManager
 
+LOGGER = logging.getLogger(__name__)
+
 
 class Device(models.Model):
     """A device representing a phone that has been registered on Hiccup."""
@@ -109,6 +112,9 @@
     next_logfile_key = models.PositiveIntegerField(default=1)
     created_at = models.DateTimeField(auto_now_add=True)
 
+    class Meta:  # noqa: D106
+        unique_together = ("device", "date")
+
     @transaction.atomic
     def get_logfile_key(self):
         """Get the next key for a log file and update the ID-counter."""
@@ -125,11 +131,21 @@
         update_fields=None,
     ):
         """Save the crashreport and set its local ID if it was not set."""
-        if not self.device_local_id:
-            self.device_local_id = self.device.get_crashreport_key()
-        super(Crashreport, self).save(
-            force_insert, force_update, using, update_fields
-        )
+        try:
+            with transaction.atomic():
+                if not self.device_local_id:
+                    self.device_local_id = self.device.get_crashreport_key()
+                super(Crashreport, self).save(
+                    force_insert, force_update, using, update_fields
+                )
+        except IntegrityError:
+            # If there is a duplicate entry, log its values and return
+            # without throwing an exception to keep idempotency of the
+            # interface.
+            LOGGER.debug(
+                "Duplicate Crashreport received and dropped: %s",
+                model_to_dict(self),
+            )
 
     def _get_uuid(self):
         """Return the device UUID."""
@@ -187,10 +203,13 @@
     uptime = models.CharField(max_length=200)
     build_fingerprint = models.CharField(db_index=True, max_length=200)
     radio_version = models.CharField(db_index=True, max_length=200, null=True)
-    date = models.DateTimeField(db_index=True)
+    date = models.DateField(db_index=True)
     device_local_id = models.PositiveIntegerField(blank=True)
     created_at = models.DateTimeField(auto_now_add=True)
 
+    class Meta:  # noqa: D106
+        unique_together = ("device", "date")
+
     def save(
         self,
         force_insert=False,
@@ -199,11 +218,21 @@
         update_fields=None,
     ):
         """Save the heartbeat and set its local ID if it was not set."""
-        if not self.device_local_id:
-            self.device_local_id = self.device.get_heartbeat_key()
-        super(HeartBeat, self).save(
-            force_insert, force_update, using, update_fields
-        )
+        try:
+            with transaction.atomic():
+                if not self.device_local_id:
+                    self.device_local_id = self.device.get_heartbeat_key()
+                super(HeartBeat, self).save(
+                    force_insert, force_update, using, update_fields
+                )
+        except IntegrityError:
+            # If there is a duplicate entry, log its values and return
+            # without throwing an exception to keep idempotency of the
+            # interface.
+            LOGGER.debug(
+                "Duplicate HeartBeat received and dropped: %s",
+                model_to_dict(self),
+            )
 
     def _get_uuid(self):
         """Return the device UUID."""
diff --git a/crashreports/serializers.py b/crashreports/serializers.py
index 1125f62..1dd52ea 100644
--- a/crashreports/serializers.py
+++ b/crashreports/serializers.py
@@ -1,6 +1,7 @@
 """Serializers for Crashreport-related models."""
 from django.utils import timezone
 from django.core.exceptions import ObjectDoesNotExist
+from django.utils.dateparse import parse_datetime
 from rest_framework import serializers
 from rest_framework.exceptions import NotFound
 from rest_framework import permissions
@@ -77,7 +78,7 @@
     uuid = serializers.CharField(max_length=64)
     id = PrivateField()
     device_local_id = serializers.IntegerField(required=False)
-    date = serializers.DateTimeField(default_timezone=timezone.utc)
+    date = serializers.DateField()
 
     class Meta:  # noqa: D106
         model = HeartBeat
@@ -102,6 +103,22 @@
         heartbeat.save()
         return heartbeat
 
+    def to_internal_value(self, data):
+        """Parse serialized heartbeat representations.
+
+        Incoming 'date' values that are datetime values (including time) are
+        changed so that only the date part of the value is deserialized.
+        Initially, the date was a datetime field and Hiccup clients can still
+        send datetime values.
+        """
+        datetime = parse_datetime(data["date"])
+        if datetime:
+            updated_data = data.copy()
+            updated_data["date"] = datetime.date().isoformat()
+            data = updated_data
+
+        return super(HeartBeatSerializer, self).to_internal_value(data)
+
 
 class LogFileSerializer(serializers.ModelSerializer):
     """Serializer for LogFile instances."""
diff --git a/crashreports/tests/test_migrations.py b/crashreports/tests/test_migrations.py
new file mode 100644
index 0000000..e5faffb
--- /dev/null
+++ b/crashreports/tests/test_migrations.py
@@ -0,0 +1,149 @@
+"""Tests for the Django database migrations."""
+import logging
+import os
+import tempfile
+from datetime import datetime, date
+
+import pytz
+from django.test import TransactionTestCase, override_settings
+from django.db.migrations.executor import MigrationExecutor
+from django.db import connection
+
+from crashreports.models import Crashreport, HeartBeat, LogFile
+from crashreports.tests.utils import Dummy
+
+
+class MigrationTestCase(TransactionTestCase):
+    """Test for Django database migrations."""
+
+    # Make data from migrations available in the test cases
+    serialized_rollback = True
+
+    # These must be defined by subclasses.
+    migrate_from = None
+    migrate_to = None
+
+    def setUp(self):
+        """Set up the database up to the state of the first migration."""
+        super(MigrationTestCase, self).setUp()
+
+        self.executor = MigrationExecutor(connection)
+        self.executor.migrate(self.migrate_from)
+
+    def migrate_to_dest(self):
+        """Migrate the database to the desired destination migration."""
+        self.executor.loader.build_graph()
+        self.executor.migrate(self.migrate_to)
+
+
+@override_settings(MEDIA_ROOT=tempfile.mkdtemp(".hiccup-tests"))
+class DropDuplicatesMigrationTestCase(MigrationTestCase):
+    """Test the migration for dropping duplicate heartbeats and crashreports."""
+
+    migrate_from = [("crashreports", "0005_add_fp_staff_group")]
+    migrate_to = [
+        ("crashreports", "0006_add_unique_constraints_and_drop_duplicates")
+    ]
+
+    def test_duplicate_heartbeats_are_deleted(self):
+        """Test that duplicate heartbeats are deleted after migrating."""
+        self._assert_duplicates_are_deleted(HeartBeat)
+
+    def test_duplicate_crashreports_are_deleted(self):
+        """Test that duplicate crashreports are deleted after migrating."""
+        self._assert_duplicates_are_deleted(Crashreport)
+
+    def _assert_duplicates_are_deleted(self, object_type):
+        # Create a user, device and two duplicate reports
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        report_1 = Dummy.create_dummy_report(object_type, device)
+        Dummy.create_dummy_report(object_type, device)
+
+        # Assert that 2 instances have been created
+        self.assertEqual(object_type.objects.count(), 2)
+
+        # Run the migration
+        logger = logging.getLogger("crashreports")
+        with self.assertLogs(logger, "DEBUG") as logging_watcher:
+            self.migrate_to_dest()
+
+        # Assert the correct message is logged
+        self.assertTrue(
+            {
+                "INFO:crashreports.migrations."
+                "0006_add_unique_constraints_and_drop_duplicates:"
+                "Found 1 {} instances that have duplicates. "
+                "These will be removed.".format(object_type.__name__),
+                "DEBUG:crashreports.migrations"
+                ".0006_add_unique_constraints_and_drop_duplicates:Removing "
+                "duplicates: {}".format(
+                    str(
+                        {
+                            "device": device.id,
+                            "date": report_1.date,
+                            "min_id": report_1.id,
+                            "num_duplicates": 2,
+                        }
+                    )
+                ),
+            }.issubset(set(logging_watcher.output))
+        )
+
+        # Assert that only one instance is left in the database
+        self.assertEqual(object_type.objects.count(), 1)
+
+    def test_delete_duplicate_crashreport_with_logfile(self):
+        """Test deletion of a duplicate crashreport with logfile."""
+        # Create a user, device and two duplicate reports with logfiles
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        crashreport_1 = Dummy.create_dummy_report(Crashreport, device)
+        crashreport_2 = Dummy.create_dummy_report(Crashreport, device)
+        _, logfile_1_path = Dummy.create_dummy_log_file_with_actual_file(
+            crashreport_1
+        )
+        _, logfile_2_path = Dummy.create_dummy_log_file_with_actual_file(
+            crashreport_2, logfile=Dummy.DEFAULT_DUMMY_LOG_FILE_PATHS[1]
+        )
+
+        # Assert that 2 crashreports and logfiles have been created
+        self.assertEqual(Crashreport.objects.count(), 2)
+        self.assertEqual(LogFile.objects.count(), 2)
+        self.assertTrue(os.path.isfile(logfile_1_path))
+        self.assertTrue(os.path.isfile(logfile_2_path))
+
+        # Run the migration
+        self.migrate_to_dest()
+
+        # Assert that only one crashreport and one logfile is left in the
+        # database
+        self.assertEqual(Crashreport.objects.count(), 1)
+        self.assertEqual(Crashreport.objects.first().logfiles.count(), 1)
+        self.assertEqual(LogFile.objects.count(), 1)
+
+        # Assert that the correct log file has been deleted
+        self.assertTrue(os.path.isfile(logfile_1_path))
+        self.assertFalse(os.path.isfile(logfile_2_path))
+
+    def test_change_of_date_field_type(self):
+        """Test that the 'date' field of heartbeats is changed to a date."""
+        # Create a user, device and a heartbeat
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        heartbeat_timestamp = datetime(2015, 12, 15, 1, 23, 45, tzinfo=pytz.utc)
+
+        heartbeat = Dummy.create_dummy_report(
+            HeartBeat, device, date=heartbeat_timestamp
+        )
+
+        # Assert that the date is of type datetime
+        self.assertIsInstance(heartbeat.date, datetime)
+
+        # Run the migration
+        self.migrate_to_dest()
+
+        # Assert that the date is now of type date and has the correct value
+        heartbeat = HeartBeat.objects.first()
+        self.assertIsInstance(heartbeat.date, date)
+        self.assertEqual(heartbeat.date, heartbeat_timestamp.date())
diff --git a/crashreports/tests/test_models.py b/crashreports/tests/test_models.py
new file mode 100644
index 0000000..d12189d
--- /dev/null
+++ b/crashreports/tests/test_models.py
@@ -0,0 +1,42 @@
+"""Tests for the crashreports models."""
+import logging
+
+from django.forms import model_to_dict
+from django.test import TestCase
+
+from crashreports.models import HeartBeat, Crashreport
+from crashreports.tests.utils import Dummy
+
+
+class DuplicatesTestCase(TestCase):
+    """Test cases for the uniqueness for model instances."""
+
+    def test_creation_of_duplicate_heartbeats(self):
+        """Test creation of duplicate heartbeats."""
+        self._assert_duplicate_entries_can_not_be_created(HeartBeat)
+
+    def test_creation_of_duplicate_crashreports(self):
+        """Test creation of duplicate crashreports."""
+        self._assert_duplicate_entries_can_not_be_created(Crashreport)
+
+    def _assert_duplicate_entries_can_not_be_created(self, object_type):
+        # Create a user, device and a report
+        user = Dummy.create_dummy_user()
+        device = Dummy.create_dummy_device(user)
+        Dummy.create_dummy_report(object_type, device)
+
+        # Assert creating a duplicate report fails
+        logger = logging.getLogger("crashreports")
+        with self.assertLogs(logger, "DEBUG") as logging_watcher:
+            report = Dummy.create_dummy_report(object_type, device)
+        self.assertEqual(
+            logging_watcher.output,
+            [
+                "DEBUG:crashreports.models:"
+                "Duplicate {} received and dropped: {}".format(
+                    object_type.__name__, str(model_to_dict(report))
+                )
+            ],
+        )
+
+        self.assertEqual(object_type.objects.count(), 1)
diff --git a/crashreports/tests/test_rest_api_crashreports.py b/crashreports/tests/test_rest_api_crashreports.py
index 7165f9b..dbfaccc 100644
--- a/crashreports/tests/test_rest_api_crashreports.py
+++ b/crashreports/tests/test_rest_api_crashreports.py
@@ -1,4 +1,6 @@
 """Tests for the crashreports REST API."""
+from django.urls import reverse
+from rest_framework import status
 
 from crashreports.tests.utils import Dummy
 from crashreports.tests.test_rest_api_heartbeats import HeartbeatsTestCase
@@ -17,3 +19,31 @@
     @staticmethod
     def _create_dummy_data(**kwargs):
         return Dummy.crashreport_data(**kwargs)
+
+    @staticmethod
+    def _create_alternative_dummy_data(**kwargs):
+        return Dummy.alternative_crashreport_data(**kwargs)
+
+    def test_create_duplicate(self):
+        """Test creation of a duplicate crashreport."""
+        # Create a first crashreport
+        report_data = self._create_dummy_data(uuid=self.uuid)
+        response_first = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_first.status_code, status.HTTP_201_CREATED)
+
+        # Create a second crashreport for the same day and the same time
+        response_second = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_second.status_code, status.HTTP_201_CREATED)
+
+        # Assert that only one crashreport instance was created
+        url = reverse(self.LIST_CREATE_BY_UUID_URL, args=[self.uuid])
+        response = self.fp_staff_client.get(url)
+        self.assertEqual(len(response.data["results"]), 1)
+
+    def test_create_with_datetime(self):
+        """Override to just pass because crashreports always use datetime."""
+        pass
diff --git a/crashreports/tests/test_rest_api_heartbeats.py b/crashreports/tests/test_rest_api_heartbeats.py
index cd1ccec..9ae6b29 100644
--- a/crashreports/tests/test_rest_api_heartbeats.py
+++ b/crashreports/tests/test_rest_api_heartbeats.py
@@ -1,5 +1,7 @@
 """Tests for the heartbeats REST API."""
+from datetime import timedelta, datetime
 
+import pytz
 from django.urls import reverse
 
 from rest_framework import status
@@ -11,6 +13,8 @@
 class HeartbeatsTestCase(HiccupCrashreportsAPITestCase):
     """Test cases for heartbeats."""
 
+    # pylint: disable=too-many-public-methods
+
     LIST_CREATE_URL = "api_v1_heartbeats"
     RETRIEVE_URL = "api_v1_heartbeat"
     LIST_CREATE_BY_UUID_URL = "api_v1_heartbeats_by_uuid"
@@ -20,11 +24,34 @@
     def _create_dummy_data(**kwargs):
         return Dummy.heartbeat_data(**kwargs)
 
+    @staticmethod
+    def _create_alternative_dummy_data(**kwargs):
+        return Dummy.alternative_heartbeat_data(**kwargs)
+
     def _post_multiple(self, client, data, count):
-        return [
-            client.post(reverse(self.LIST_CREATE_URL), data)
-            for _ in range(count)
-        ]
+        """Send multiple POST requests to create reports.
+
+        Note that the date of the data will be adapted for each POST request
+        so that no duplicate reports are being created. However, the given
+        `data` parameter value will not be modified.
+
+        Args:
+            client: The client used for sending the requests
+            data: The data that is sent each request
+            count: The number of requests that should be made
+
+        Returns: A list of HTTP response objects
+
+        """
+        results = []
+        data_to_send = data.copy()
+        for i in range(count):
+            data_to_send["date"] += timedelta(days=i)
+            results.append(
+                client.post(reverse(self.LIST_CREATE_URL), data_to_send)
+            )
+
+        return results
 
     def _retrieve_single(self, user):
         count = 5
@@ -221,3 +248,68 @@
         data["date"] = "2017-10-29 02:34:56"
         response = self.user.post(reverse(self.LIST_CREATE_URL), data)
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
+    def test_create_duplicate(self):
+        """Test creation of a duplicate Heartbeat."""
+        # Create a first Heartbeat
+        report_data = self._create_dummy_data(uuid=self.uuid)
+        response_first = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_first.status_code, status.HTTP_201_CREATED)
+
+        # Create a second heartbeat for the same day
+        response_second = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_data
+        )
+        self.assertEqual(response_second.status_code, status.HTTP_201_CREATED)
+
+        # Assert that only one heartbeat instance was created
+        url = reverse(self.LIST_CREATE_BY_UUID_URL, args=[self.uuid])
+        response = self.fp_staff_client.get(url)
+        self.assertEqual(len(response.data["results"]), 1)
+
+    def test_create_duplicate_different_values(self):
+        """Test creation of a duplicate Heartbeat with different values."""
+        # Create a first Heartbeat
+        report_1_data = self._create_dummy_data(uuid=self.uuid)
+        response_first = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_1_data
+        )
+        self.assertEqual(response_first.status_code, status.HTTP_201_CREATED)
+
+        # Create a second heartbeat for the same day with all different
+        # values except for the date and UUID
+        report_2_data = self._create_alternative_dummy_data(uuid=self.uuid)
+        response_second = self.user.post(
+            reverse(self.LIST_CREATE_URL), report_2_data
+        )
+        self.assertEqual(response_second.status_code, status.HTTP_201_CREATED)
+
+        # Assert that only one heartbeat instance was created
+        url = reverse(self.LIST_CREATE_BY_UUID_URL, args=[self.uuid])
+        response = self.fp_staff_client.get(url)
+        self.assertEqual(len(response.data["results"]), 1)
+
+        # Assert that the values are all the same as of the first heartbeat, as
+        # we are dropping all incoming duplicates (we need to ignore the `id`
+        # because its value is set to -1 in the response for creating reports)
+        self.assertTrue(
+            {k: v for k, v in response.data["results"][0].items() if k != "id"}
+            == {k: v for k, v in response_first.data.items() if k != "id"}
+        )
+
+    def test_create_with_datetime(self):
+        """Test creation of heartbeats with datetime instead of date value.
+
+        Initially, the 'date' field of the HeartBeat model was a datetime
+        field but now has been changed to a date field. However, Hiccup clients
+        are still sending datetime values which also need to be accepted and
+        processed by the server.
+        """
+        data = self._create_dummy_data(uuid=self.uuid)
+        data["date"] = datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc)
+
+        response = self.user.post(reverse(self.LIST_CREATE_URL), data)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+        self.assertEqual(response.data["date"], str(data["date"].date()))
diff --git a/crashreports/tests/test_rest_api_logfiles.py b/crashreports/tests/test_rest_api_logfiles.py
index 9b8b6b2..57482bf 100644
--- a/crashreports/tests/test_rest_api_logfiles.py
+++ b/crashreports/tests/test_rest_api_logfiles.py
@@ -70,7 +70,7 @@
         device_local_id = self._upload_crashreport(user, uuid)
 
         # Upload a logfile for the crashreport
-        logfile = open(Dummy.DEFAULT_DUMMY_LOG_FILE_PATH, "rb")
+        logfile = open(Dummy.DEFAULT_DUMMY_LOG_FILE_PATHS[0], "rb")
 
         logfile_name = os.path.basename(logfile.name)
         response = user.post(
@@ -97,7 +97,7 @@
         # bytes. However, we mainly care that the contents are equal:
         self._assert_zip_file_contents_equal(
             default_storage.path(uploaded_logfile_path),
-            Dummy.DEFAULT_DUMMY_LOG_FILE_PATH,
+            Dummy.DEFAULT_DUMMY_LOG_FILE_PATHS[0],
         )
 
     def test_logfile_upload_as_user(self):
diff --git a/crashreports/tests/utils.py b/crashreports/tests/utils.py
index fcaa2a3..73a479b 100644
--- a/crashreports/tests/utils.py
+++ b/crashreports/tests/utils.py
@@ -73,7 +73,7 @@
     ]
     UUIDs = ["e1c0cc95-ab8d-461a-a768-cb8d9d7fdb04"]
 
-    USERNAMES = ["testuser1", "testuser2", "testuser3"]
+    USERNAMES = ["testuser1", "testuser2", "testuser3", "testuser4"]
 
     DATES = [date(2018, 3, 19), date(2018, 3, 26), date(2018, 5, 1)]
 
@@ -97,16 +97,39 @@
         ),
         "build_fingerprint": BUILD_FINGERPRINTS[0],
         "radio_version": RADIO_VERSIONS[0],
-        "date": datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc),
+        "date": date(2018, 3, 19),
+    }
+
+    ALTERNATIVE_HEARTBEAT_VALUES = {
+        "app_version": 10101,
+        "uptime": (
+            "up time: 2 days, 12:39:13, idle time: 2 days, 11:35:01, "
+            "sleep time: 2 days, 11:56:12"
+        ),
+        "build_fingerprint": BUILD_FINGERPRINTS[1],
+        "radio_version": RADIO_VERSIONS[1],
+        "date": date(2018, 3, 19),
     }
 
     DEFAULT_DUMMY_CRASHREPORT_VALUES = DEFAULT_DUMMY_HEARTBEAT_VALUES.copy()
     DEFAULT_DUMMY_CRASHREPORT_VALUES.update(
         {
-            "is_fake_report": 0,
+            "is_fake_report": False,
             "boot_reason": Crashreport.BOOT_REASON_UNKOWN,
             "power_on_reason": "it was powered on",
             "power_off_reason": "something happened and it went off",
+            "date": datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc),
+        }
+    )
+
+    ALTERNATIVE_CRASHREPORT_VALUES = ALTERNATIVE_HEARTBEAT_VALUES.copy()
+    ALTERNATIVE_CRASHREPORT_VALUES.update(
+        {
+            "is_fake_report": True,
+            "boot_reason": Crashreport.BOOT_REASON_KEYBOARD_POWER_ON,
+            "power_on_reason": "alternative power on reason",
+            "power_off_reason": "alternative power off reason",
+            "date": datetime(2018, 3, 19, 12, 0, 0, tzinfo=pytz.utc),
         }
     )
 
@@ -118,18 +141,28 @@
         "other": "whatever",
     }
 
-    DEFAULT_DUMMY_LOG_FILE_FILENAME = "test_logfile.zip"
+    DEFAULT_DUMMY_LOG_FILE_FILENAMES = [
+        "test_logfile_1.zip",
+        "test_logfile_2.zip",
+    ]
     DEFAULT_DUMMY_LOG_FILE_DIRECTORY = os.path.join("resources", "test")
 
-    DEFAULT_DUMMY_LOG_FILE_PATH = os.path.join(
-        DEFAULT_DUMMY_LOG_FILE_DIRECTORY, DEFAULT_DUMMY_LOG_FILE_FILENAME
-    )
-
     DEFAULT_DUMMY_LOG_FILE_VALUES = {
         "logfile_type": "last_kmsg",
-        "logfile": DEFAULT_DUMMY_LOG_FILE_FILENAME,
+        "logfile": DEFAULT_DUMMY_LOG_FILE_FILENAMES[0],
     }
 
+    DEFAULT_DUMMY_LOG_FILE_PATHS = [
+        os.path.join(
+            DEFAULT_DUMMY_LOG_FILE_DIRECTORY,
+            DEFAULT_DUMMY_LOG_FILE_FILENAMES[0],
+        ),
+        os.path.join(
+            DEFAULT_DUMMY_LOG_FILE_DIRECTORY,
+            DEFAULT_DUMMY_LOG_FILE_FILENAMES[1],
+        ),
+    ]
+
     @staticmethod
     def _update_copy(original, update):
         """Merge fields of update into a copy of original."""
@@ -158,6 +191,15 @@
         return Dummy._update_copy(Dummy.DEFAULT_DUMMY_HEARTBEAT_VALUES, kwargs)
 
     @staticmethod
+    def alternative_heartbeat_data(**kwargs):
+        """Return the alternative data required to create a heartbeat.
+
+        Use the values passed as keyword arguments or default to the ones
+        from `Dummy.ALTERNATIVE_HEARTBEAT_VALUES`.
+        """
+        return Dummy._update_copy(Dummy.ALTERNATIVE_HEARTBEAT_VALUES, kwargs)
+
+    @staticmethod
     def crashreport_data(report_type: Optional[str] = None, **kwargs):
         """Return the data required to create a crashreport.
 
@@ -182,6 +224,15 @@
         return data
 
     @staticmethod
+    def alternative_crashreport_data(**kwargs):
+        """Return the alternative data required to create a crashreport.
+
+        Use the values passed as keyword arguments or default to the ones
+        from `Dummy.ALTERNATIVE_CRASHREPORT_VALUES`.
+        """
+        return Dummy._update_copy(Dummy.ALTERNATIVE_CRASHREPORT_VALUES, kwargs)
+
+    @staticmethod
     def create_dummy_user(**kwargs):
         """Create a dummy user instance.