Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
zip_safe=False,
package_dir={'': 'src'},
packages=find_packages(where='src'),
install_requires=["requests>=2", "pytest>=7"],
install_requires=["requests>=2", "pytest>=7", "filelock>=3"],
extras_require={
"dev": [
"mock>=4",
Expand Down
6 changes: 3 additions & 3 deletions src/buildkite_test_collector/collector/payload.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Buildkite Test Analytics payload"""

from dataclasses import dataclass, replace, field
from typing import Dict, Tuple, Optional, Union, Literal
from typing import Dict, Tuple, Optional, Union, Literal, List
from datetime import timedelta
from uuid import UUID

Expand Down Expand Up @@ -77,7 +77,7 @@ class TestHistory:
start_at: Optional[Instant] = None
end_at: Optional[Instant] = None
duration: Optional[timedelta] = None
children: Tuple['TestSpan'] = ()
children: List['TestSpan'] = ()

def is_finished(self) -> bool:
"""Is there an end_at time present?"""
Expand All @@ -91,7 +91,7 @@ def as_json(self, started_at: Instant) -> JsonDict:
"""Convert this trace into a Dict for eventual serialisation into JSON"""
attrs = {
"section": "top",
"children": tuple(map(lambda span: span.as_json(started_at), self.children))
"children": list(map(lambda span: span.as_json(started_at), self.children))
}

if self.start_at is not None:
Expand Down
9 changes: 8 additions & 1 deletion src/buildkite_test_collector/pytest_plugin/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def pytest_unconfigure(config):
# Note that when xdist is used, this JSON output file will NOT contain tags.
jsonpath = config.option.jsonpath
if jsonpath:
plugin.save_payload_as_json(jsonpath)
plugin.save_payload_as_json(jsonpath, merge=config.option.mergejson)

del config._buildkite
config.pluginmanager.unregister(plugin)
Expand All @@ -75,3 +75,10 @@ def pytest_addoption(parser):
metavar="path",
help='save json file at given path'
)
group.addoption(
'--merge-json',
default=False,
action='store_true',
dest="mergejson",
help='merge json output with existing file, if it exists'
)
20 changes: 17 additions & 3 deletions src/buildkite_test_collector/pytest_plugin/buildkite_plugin.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
"""Buildkite test collector plugin for Pytest"""
import json
import os
from uuid import uuid4

from filelock import FileLock

from ..collector.payload import TestData
from .logger import logger

Expand Down Expand Up @@ -108,7 +111,18 @@ def finalize_test(self, nodeid):
return True
return False

def save_payload_as_json(self, path):
""" Save payload into a json file """
def save_payload_as_json(self, path, merge=False):
"""Save payload into a json file, merging with existing data if merge is True"""
data = list(self.payload.as_json()["data"])

if merge:
lock = FileLock(f"{path}.lock")
with lock:
if os.path.exists(path):
with open(path, "r", encoding="utf-8") as f:
existing_data = json.load(f)
# Merge existing data with current payload
data = existing_data + data

with open(path, "w", encoding="utf-8") as f:
json.dump(self.payload.as_json()["data"], f)
json.dump(data, f)
100 changes: 94 additions & 6 deletions tests/buildkite_test_collector/pytest_plugin/test_plugin.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from buildkite_test_collector.pytest_plugin import BuildkitePlugin
import json
import pytest

from buildkite_test_collector.collector.payload import Payload
from pathlib import Path
from buildkite_test_collector.pytest_plugin import BuildkitePlugin

import json

def test_runtest_logstart_with_unstarted_payload(fake_env):
payload = Payload.init(fake_env)
Expand All @@ -15,14 +16,101 @@ def test_runtest_logstart_with_unstarted_payload(fake_env):
assert plugin.payload.started_at is not None


def test_save_json_payload(fake_env, tmp_path, successful_test):
def test_save_json_payload_without_merge(fake_env, tmp_path, successful_test):
payload = Payload.init(fake_env)
payload = Payload.started(payload)
payload = payload.push_test_data(successful_test)

plugin = BuildkitePlugin(payload)

path = tmp_path / "result.json"

# Create an existing file with some data
existing_data = [{"existing": "data"}]
path.write_text(json.dumps(existing_data))

# Save without merge option
plugin.save_payload_as_json(path, merge=False)

# Check if the data was not merged
expected_data = [successful_test.as_json(payload.started_at)]
assert json.loads(path.read_text()) == expected_data


def test_save_json_payload_with_merge(fake_env, tmp_path, successful_test):
payload = Payload.init(fake_env)
payload = Payload.started(payload)
payload = payload.push_test_data(successful_test)

plugin = BuildkitePlugin(payload)

path = tmp_path / "result.json"
plugin.save_payload_as_json(path)

assert path.read_text() == json.dumps([successful_test.as_json(payload.started_at)])
# Create an existing file with some data
existing_data = [{"existing": "data"}]
path.write_text(json.dumps(existing_data))

# Save with merge option
plugin.save_payload_as_json(path, merge=True)

# Check if the data was merged
expected_data = existing_data + [successful_test.as_json(payload.started_at)]
assert json.loads(path.read_text()) == expected_data


def test_save_json_payload_with_non_existent_file(fake_env, tmp_path, successful_test):
payload = Payload.init(fake_env)
payload = Payload.started(payload)
payload = payload.push_test_data(successful_test)

plugin = BuildkitePlugin(payload)

path = tmp_path / "non_existent.json"

# Ensure the file does not exist
assert not path.exists()

# Save with merge option
plugin.save_payload_as_json(path, merge=True)

# Check if the data was saved correctly
expected_data = [successful_test.as_json(payload.started_at)]
assert json.loads(path.read_text()) == expected_data


def test_save_json_payload_with_invalid_file(fake_env, tmp_path, successful_test):
payload = Payload.init(fake_env)
payload = Payload.started(payload)
payload = payload.push_test_data(successful_test)

plugin = BuildkitePlugin(payload)

path = tmp_path / "invalid.json"

# Create a file with invalid JSON
path.write_text("{invalid: json}")

# Save with merge option, expect JSONDecodeError
with pytest.raises(json.decoder.JSONDecodeError):
plugin.save_payload_as_json(path, merge=True)


def test_save_json_payload_with_large_data(fake_env, tmp_path, successful_test):
payload = Payload.init(fake_env)
payload = Payload.started(payload)
payload = payload.push_test_data(successful_test)

plugin = BuildkitePlugin(payload)

path = tmp_path / "large_data.json"

# Create an existing file with a large amount of data
existing_data = [{"test": f"data_{i}"} for i in range(1000)]
path.write_text(json.dumps(existing_data))

# Save with merge option
plugin.save_payload_as_json(path, merge=True)

# Check if the data was merged correctly
expected_data = existing_data + [successful_test.as_json(payload.started_at)]
assert json.loads(path.read_text()) == expected_data