Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Backend/api/app/models/requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class GeneratorExecuteRequest(BaseModel):
continuous: bool = Field(default=False, description="Run indefinitely (ignores count)")
eps: Optional[float] = Field(None, ge=0.1, le=10000, description="Events per second rate")
speed_mode: bool = Field(False, description="Pre-generate 1K events and loop for max throughput (auto-enabled for EPS > 1000)")
overwrite_parser: bool = Field(False, description="Overwrite existing parsers during execution")
options: Dict[str, Any] = Field(default_factory=dict, description="Generator-specific options")

@validator('count')
Expand Down Expand Up @@ -52,6 +53,7 @@ class ScenarioExecuteRequest(BaseModel):
"""Scenario execution request"""
speed: str = Field("fast", pattern="^(realtime|fast|instant)$")
dry_run: bool = Field(False)
overwrite_parser: bool = Field(False, description="Overwrite existing parsers during execution")

class Config:
extra = "forbid"
Expand Down
3 changes: 2 additions & 1 deletion Backend/api/app/routers/generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,8 @@ async def execute_generator(
count=request.count,
format=request.format,
star_trek_theme=request.star_trek_theme,
options=request.options
options=request.options,
overwrite_parser=request.overwrite_parser
)
execution_time = (time.time() - start_time) * 1000 # Convert to ms

Expand Down
6 changes: 5 additions & 1 deletion Backend/api/app/routers/parser_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class ParserSyncRequest(BaseModel):
github_repo_urls: Optional[List[str]] = Field(None, description="Optional GitHub repository URLs to fetch parsers from")
github_token: Optional[str] = Field(None, description="Optional GitHub token for private repositories")
selected_parsers: Optional[Dict[str, Dict]] = Field(None, description="Optional user-selected parsers for similar name resolution")
overwrite_parser: bool = Field(False, description="If true, overwrite existing parsers instead of skipping them")


class ParserSyncResponse(BaseModel):
Expand Down Expand Up @@ -77,7 +78,8 @@ async def sync_parsers(
config_write_token=request.config_write_token,
github_repo_urls=request.github_repo_urls,
github_token=request.github_token,
selected_parsers=request.selected_parsers
selected_parsers=request.selected_parsers,
overwrite=request.overwrite_parser
)

# Count results (include uploaded_from_github in uploaded count)
Expand Down Expand Up @@ -107,6 +109,7 @@ class SingleParserSyncRequest(BaseModel):
config_write_token: str = Field(..., description="Config API token for reading and writing parsers")
github_repo_urls: Optional[List[str]] = Field(None, description="Optional GitHub repository URLs to fetch parsers from")
github_token: Optional[str] = Field(None, description="Optional GitHub token for private repositories")
overwrite_parser: bool = Field(False, description="If true, overwrite existing parser instead of skipping")


class SingleParserSyncResponse(BaseModel):
Expand Down Expand Up @@ -141,6 +144,7 @@ async def sync_single_parser(
config_write_token=request.config_write_token,
github_repo_urls=request.github_repo_urls,
github_token=request.github_token,
overwrite=request.overwrite_parser,
)
status = result.get("status", "no_parser")
message = result.get("message", "Unknown status")
Expand Down
66 changes: 66 additions & 0 deletions Backend/api/app/routers/scenarios.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import asyncio
import time
import json
import sys
from datetime import datetime, timedelta
from pathlib import Path as PathLib

Expand All @@ -29,6 +30,18 @@ class SIEMQueryRequest(BaseModel):
end_time_hours: int = 0 # How far back to end (default now)
anchor_configs: Optional[List[Dict[str, Any]]] = None


class CorrelationRunRequest(BaseModel):
"""Request model for correlation scenario execution"""
scenario_id: str
destination_id: str
siem_context: Optional[Dict[str, Any]] = None
trace_id: Optional[str] = None
tag_phase: bool = True
tag_trace: bool = True
workers: int = 10
overwrite_parser: bool = False

# Initialize scenario service
scenario_service = ScenarioService()

Expand Down Expand Up @@ -278,6 +291,57 @@ async def execute_siem_query(
raise HTTPException(status_code=500, detail=str(e))


@router.post("/correlation/run", response_model=BaseResponse)
async def run_correlation_scenario(
request: CorrelationRunRequest,
background_tasks: BackgroundTasks,
_: str = Depends(require_write_access)
):
"""
Execute a correlation scenario with SIEM context and trace ID support
"""
try:
# Validate scenario exists and supports correlation
scenarios_dir = PathLib(__file__).parent.parent.parent / "scenarios"
if str(scenarios_dir) not in sys.path:
sys.path.insert(0, str(scenarios_dir))

scenario_modules = {
"apollo_ransomware_scenario": "apollo_ransomware_scenario"
}

if request.scenario_id not in scenario_modules:
raise HTTPException(status_code=404, detail=f"Correlation scenario '{request.scenario_id}' not found")

# Start correlation scenario execution with trace ID
execution_id = await scenario_service.start_correlation_scenario(
scenario_id=request.scenario_id,
siem_context=request.siem_context or {},
trace_id=request.trace_id,
tag_phase=request.tag_phase,
tag_trace=request.tag_trace,
overwrite_parser=request.overwrite_parser,
background_tasks=background_tasks
)

return BaseResponse(
success=True,
data={
"execution_id": execution_id,
"scenario_id": request.scenario_id,
"status": "started",
"trace_id": request.trace_id,
"tag_phase": request.tag_phase,
"tag_trace": request.tag_trace,
"started_at": datetime.utcnow().isoformat()
}
)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))


# =============================================================================
# GENERIC SCENARIO ENDPOINTS (catch-all /{scenario_id} routes must be last)
# =============================================================================
Expand Down Expand Up @@ -309,6 +373,7 @@ async def execute_scenario(
scenario_id: str = Path(..., description="Scenario identifier"),
speed: str = Query("fast", description="Execution speed: realtime, fast, instant"),
dry_run: bool = Query(False, description="Simulate without generating events"),
overwrite_parser: bool = Query(False, description="Overwrite existing parsers during execution"),
_: str = Depends(require_write_access)
):
"""Execute an attack scenario"""
Expand All @@ -323,6 +388,7 @@ async def execute_scenario(
scenario_id=scenario_id,
speed=speed,
dry_run=dry_run,
overwrite_parser=overwrite_parser,
background_tasks=background_tasks
)

Expand Down
3 changes: 2 additions & 1 deletion Backend/api/app/services/generator_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,8 @@ async def execute_generator(
count: int = 1,
format: str = "json",
star_trek_theme: bool = True,
options: Dict[str, Any] = None
options: Dict[str, Any] = None,
overwrite_parser: bool = False
) -> List[Dict[str, Any]]:
"""Execute a generator and return events"""
if generator_id not in self.generator_metadata:
Expand Down
28 changes: 19 additions & 9 deletions Backend/api/app/services/parser_sync_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,12 +136,9 @@ def get_parser_path_in_siem(self, sourcetype: str) -> str:
Returns:
The parser path in SIEM (e.g., '/logParsers/okta_authentication-latest')
"""
# In the Scalyr/SentinelOne config tree, log parsers are stored as JSON files
# under /logParsers.
leaf = sourcetype
if not leaf.endswith(".json"):
leaf = f"{leaf}.json"
return f"/logParsers/{leaf}"
# In the Scalyr/SentinelOne config tree, log parsers are stored
# under /logParsers without a file extension.
return f"/logParsers/{sourcetype}"

def _local_parser_directories_for_sourcetype(self, sourcetype: str) -> List[Path]:
local_name = LOCAL_PARSER_ALIASES.get(sourcetype, sourcetype)
Expand Down Expand Up @@ -207,15 +204,19 @@ def ensure_parser_for_sourcetype(
github_repo_urls: Optional[List[str]] = None,
github_token: Optional[str] = None,
selected_parser: Optional[Dict] = None,
overwrite: bool = False,
) -> Dict[str, str]:
parser_path = self.get_parser_path_in_siem(sourcetype)

exists, _ = self.check_parser_exists(config_write_token, parser_path)
if exists:
if exists and not overwrite:
return {
"status": "exists",
"message": f"Parser already exists: {parser_path}",
}

if exists and overwrite:
logger.info(f"Overwriting existing parser: {parser_path}")

parser_content = self.load_local_parser(sourcetype)
from_github = False
Expand Down Expand Up @@ -434,7 +435,8 @@ def ensure_parsers_for_sources(
config_write_token: str,
github_repo_urls: Optional[List[str]] = None,
github_token: Optional[str] = None,
selected_parsers: Optional[Dict[str, Dict]] = None
selected_parsers: Optional[Dict[str, Dict]] = None,
overwrite: bool = False
) -> Dict[str, dict]:
"""
Ensure all required parsers exist in the destination SIEM
Expand All @@ -445,6 +447,7 @@ def ensure_parsers_for_sources(
github_repo_urls: Optional list of GitHub repository URLs to fetch parsers from
github_token: Optional GitHub personal access token for private repos
selected_parsers: Optional dict mapping sourcetype to user-selected parser info
overwrite: If True, overwrite existing parsers instead of skipping them

Returns:
Dict with status for each source:
Expand Down Expand Up @@ -492,14 +495,21 @@ def ensure_parsers_for_sources(
# Check if parser exists (write token can also read)
exists, _ = self.check_parser_exists(config_write_token, parser_path)

if exists:
if exists and not overwrite:
results[source] = {
"status": "exists",
"sourcetype": sourcetype,
"message": f"Parser already exists: {parser_path}"
}
continue

if exists and overwrite:
# Parser exists but we need to overwrite it
logger.info(f"Overwriting existing parser: {parser_path}")
else:
# Parser doesn't exist, we need to create it
logger.info(f"Creating new parser: {parser_path}")

# Parser doesn't exist, try to find it
parser_content = None
from_github = False
Expand Down
68 changes: 57 additions & 11 deletions Backend/api/app/services/scenario_service.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
"""
Scenario service for managing attack scenarios
"""
from typing import Dict, Any, List, Optional
import uuid
import time
import asyncio
from datetime import datetime
import logging
import os
import sys
import importlib
import json
import uuid
import asyncio
import importlib
from datetime import datetime
from typing import Dict, Any, Optional, List
from pathlib import Path

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -129,6 +128,17 @@ def __init__(self):
]
}
,
"apollo_ransomware_scenario": {
"id": "apollo_ransomware_scenario",
"name": "Apollo Ransomware Scenario",
"description": "Proofpoint phishing, M365 email interaction, SharePoint recon & exfiltration",
"phases": [
{"name": "Phishing Delivery", "generators": ["proofpoint"], "duration": 5},
{"name": "Email Interaction", "generators": ["microsoft_365_collaboration"], "duration": 5},
{"name": "SharePoint Recon", "generators": ["microsoft_365_collaboration"], "duration": 15},
{"name": "Data Exfiltration", "generators": ["microsoft_365_collaboration"], "duration": 10}
]
},
"hr_phishing_pdf_c2": {
"id": "hr_phishing_pdf_c2",
"name": "HR Phishing PDF -> PowerShell -> Scheduled Task -> C2",
Expand Down Expand Up @@ -236,6 +246,7 @@ async def start_scenario(
scenario_id: str,
speed: str = "fast",
dry_run: bool = False,
overwrite_parser: bool = False,
background_tasks=None
) -> str:
"""Start scenario execution"""
Expand All @@ -252,6 +263,7 @@ async def start_scenario(
"started_at": datetime.utcnow().isoformat(),
"speed": speed,
"dry_run": dry_run,
"overwrite_parser": overwrite_parser,
"progress": 0
}

Expand All @@ -264,11 +276,15 @@ async def start_correlation_scenario(
self,
scenario_id: str,
siem_context: Dict[str, Any],
trace_id: Optional[str] = None,
tag_phase: bool = True,
tag_trace: bool = True,
speed: str = "fast",
dry_run: bool = False,
overwrite_parser: bool = False,
background_tasks=None
) -> str:
"""Start correlation scenario execution with SIEM context"""
"""Start correlation scenario execution with SIEM context and trace ID support"""
execution_id = str(uuid.uuid4())

self.running_scenarios[execution_id] = {
Expand All @@ -279,16 +295,36 @@ async def start_correlation_scenario(
"speed": speed,
"dry_run": dry_run,
"siem_context": siem_context,
"trace_id": trace_id,
"tag_phase": tag_phase,
"tag_trace": tag_trace,
"overwrite_parser": overwrite_parser,
"progress": 0
}

if background_tasks:
background_tasks.add_task(self._execute_correlation_scenario, execution_id, scenario_id, siem_context)
background_tasks.add_task(
self._execute_correlation_scenario,
execution_id,
scenario_id,
siem_context,
trace_id,
tag_phase,
tag_trace
)

return execution_id

async def _execute_correlation_scenario(self, execution_id: str, scenario_id: str, siem_context: Dict[str, Any]):
"""Execute correlation scenario with SIEM context"""
async def _execute_correlation_scenario(
self,
execution_id: str,
scenario_id: str,
siem_context: Dict[str, Any],
trace_id: Optional[str] = None,
tag_phase: bool = True,
tag_trace: bool = True
):
"""Execute correlation scenario with SIEM context and trace ID support"""
import sys
import os
from pathlib import Path
Expand All @@ -303,6 +339,12 @@ async def _execute_correlation_scenario(self, execution_id: str, scenario_id: st
siem_context_json = json.dumps(siem_context)
os.environ['SIEM_CONTEXT'] = siem_context_json

# Set trace ID and tagging environment variables
if trace_id:
os.environ['S1_TRACE_ID'] = trace_id
os.environ['S1_TAG_PHASE'] = '1' if tag_phase else '0'
os.environ['S1_TAG_TRACE'] = '1' if tag_trace else '0'

# Import and run the scenario
module = __import__(scenario_id)
scenario_result = module.generate_apollo_ransomware_scenario(siem_context=siem_context)
Expand All @@ -321,8 +363,12 @@ async def _execute_correlation_scenario(self, execution_id: str, scenario_id: st
self.running_scenarios[execution_id]["error"] = str(e)
self.running_scenarios[execution_id]["completed_at"] = datetime.utcnow().isoformat()
finally:
# Clean up environment variable
# Clean up environment variables
os.environ.pop('SIEM_CONTEXT', None)
if trace_id:
os.environ.pop('S1_TRACE_ID', None)
os.environ.pop('S1_TAG_PHASE', None)
os.environ.pop('S1_TAG_TRACE', None)

async def _execute_scenario(self, execution_id: str, scenario: Dict[str, Any]):
"""Execute scenario in background"""
Expand Down
Loading
Loading