Solutions/BloodHound Enterprise/Data Connectors/pkg/connector/main.go (12 lines): - line 21: // TODO: hax - line 101: // TODO: Validate and return error - line 115: FindingCount: float64(*postureStat.CriticalRiskCount), // TODO: wrong - line 116: TierZeroCount: float64(*postureStat.TierZeroCount), // TODO: wrong - line 266: bhd = append(bhd, data) // TODO is this correct, capacity - line 390: FindingID: *groupMember.ObjectId, // TODO: move this to a new field called ObjectID (backfill required for other data_types??) - line 391: EventDetails: *groupMember.PrimaryKind, // TODO: move this to a new field called ObjectKind (backfill required for other data_types??) - line 393: DomainID: *environment_id, // TODO: DomainSID and DomainID seem redundant - line 413: // TODO maybe skip - line 485: // TODO Better error / retry handling - line 529: var responseBytes = make([]byte, 1024) // TODO check if this is a good size or will expand - line 539: // TODO is there a generic sdk client type Solutions/BloodHound Enterprise/Data Connectors/pkg/bloodhound/client.go (7 lines): - line 78: // TODO does LastCompleteAnalysisAt mean successful completion, do I also need to look at Data.Status - line 133: m[domainId] = *response.JSON200.Data // TODO nil checks empty checks - line 152: // TODO support pagination using limit and skip, along with returned count - line 201: // TODO should I skip here - line 205: // TODO should I skip here - line 209: dm[finding] = *response.JSON200.Data // TODO do I need copy this in? - line 234: // TODO SCRUB LOG Solutions/VMware SD-WAN and SASE/Data Connectors/Function App Connector/vmw_sdwan_sase_funcapp/sdwan_efslogs/__init__.py (3 lines): - line 136: # FIXME: sanitize this portion, very pedestrian code, but verbosity is advisable - line 479: # FIXME: need to play with "from" and "size" based on the API response "count" to deal with multi-page, just break for now. - line 613: # FIXME: Add more error handling here for specific use-cases like 404, 500, etc. Solutions/VMware SD-WAN and SASE/Data Connectors/Function App Connector/vmw_sdwan_sase_funcapp/sdwan_auditlogs/__init__.py (3 lines): - line 48: # FIXME: Clean up JSON file and get rid of the extra variables - line 289: # FIXME: Add more error handling here for specific use-cases like 404, 500, etc. - line 318: # FIXME: potentially we can rebuild this part a bit so that we contain the supported event types in a list Solutions/VMware SD-WAN and SASE/Data Connectors/Function App Connector/vmw_sdwan_sase_funcapp/cws_dlplogs/__init__.py (3 lines): - line 49: # FIXME: Clean up JSON file and get rid of the extra variables - line 315: # FIXME: Add more error handling here for specific use-cases like 404, 500, etc. - line 344: # FIXME: potentially we can rebuild this part a bit so that we contain the supported event types in a list Solutions/VMware SD-WAN and SASE/Data Connectors/Function App Connector/vmw_sdwan_sase_funcapp/cws_weblogs/__init__.py (2 lines): - line 273: # FIXME: Add more error handling here for specific use-cases like 404, 500, etc. - line 302: # FIXME: potentially we can rebuild this part a bit so that we contain the supported event types in a list Tools/RDAP/RDAPQuery/RDAPQuery/QueryEngine.cs (2 lines): - line 38: /// URL Paramaters in case we need them TODO: Eliminate this if unneeded. - line 145: // TODO: Really need to clean this up. Solutions/WithSecureElementsViaFunction/Data Connectors/WithSecureElementsAzureFunction/lib/ws_connector.py (2 lines): - line 16: # TODO: haven't tested any resilience solutions - so not sure what will happen when network connection fails - line 27: # TODO: in case when function was down for too long it might timeout without moving forward Solutions/VMware SD-WAN and SASE/Data Connectors/Function App Connector/vmw_sdwan_sase_funcapp/cws_healthcheck/__init__.py (2 lines): - line 190: # FIXME: Add more error handling here for specific use-cases like 404, 500, etc. - line 222: # FIXME: potentially we can rebuild this part a bit so that we contain the supported event types in a list Parsers/ASimProcessEvent/Parsers/vimProcessCreateMD4IoT.yaml (2 lines): - line 63: | where RawEventName == "Process" // TODO: exclude entries where segment EventType is "EXIT" by full segment structure - line 82: | where EventDetails.EventType != 'EXIT' // TODO: move filter to prefiltering. see prev comment Solutions/BloodHound Enterprise/Data Connectors/handler.go (1 line): - line 125: // TODO: have our hown bloodhound client type that contains key, domain etc. ASIM/dev/ASimTester/filteringTest/ASimFilteringTest.py (1 line): - line 256: pass #TODO add test for datetime DataConnectors/AWS-CloudTrail-AzureFunction/AzFunAWSCloudTrailLogsIngestion/__init__.py (1 line): - line 47: # TODO: Read Collection schedule from environment variable as CRON expression; This is also Azure Function Trigger Schedule Hunting Queries/Microsoft 365 Defender/General queries/File footprint.yaml (1 line): - line 5: TODO - set file hash to be a SHA1 hash of your choice... DataConnectors/S3-Lambda/S3toSentinel.ps1 (1 line): - line 161: # TODO: Add logic to handle S3 event record, for example Hunting Queries/Microsoft 365 Defender/General queries/File footprint (1).yaml (1 line): - line 5: TODO - set file hash to be a SHA1 hash of your choice... Solutions/Trend Micro Vision One/Data Connectors/AzureFunctionTrendMicroXDR/shared_code/services/oat_service.py (1 line): - line 4: TODO: Use azure-data-tables instead, see: https://pypi.org/project/azure-data-tables/ DataConnectors/AWS-S3-AzureFunction/AzFun-AWS-S3-Ingestion/__init__.py (1 line): - line 47: # TODO: Read Collection schedule from environment variable as CRON expression; This is also Azure Function Trigger Schedule Tools/ArcSight-Data-Migration/lacat-opt.py (1 line): - line 8: # TODO: