in eventdata/parameter_sources/randomevent.py [0:0]
def __init__(self, params, agent=Agent, client_ip=ClientIp, referrer=Referrer, request=Request):
self._agent = agent()
self._clientip = client_ip()
self._referrer = referrer()
self._request = request()
# We will reuse the event dictionary. This assumes that each field will be present (and thus overwritten) in each event.
# This reduces object churn and improves peak indexing throughput.
self._event = {}
if "index" in params:
index = re.sub(r"<\s*yyyy\s*>", "{ts[yyyy]}", params["index"], flags=re.IGNORECASE)
index = re.sub(r"<\s*yy\s*>", "{ts[yy]}", index, flags=re.IGNORECASE)
index = re.sub(r"<\s*mm\s*>", "{ts[mm]}", index, flags=re.IGNORECASE)
index = re.sub(r"<\s*dd\s*>", "{ts[dd]}", index, flags=re.IGNORECASE)
index = re.sub(r"<\s*hh\s*>", "{ts[hh]}", index, flags=re.IGNORECASE)
self._index = index
self._index_pattern = True
else:
self._index = "elasticlogs"
self._index_pattern = False
self._type = "doc"
self._timestamp_generator = TimestampStructGenerator(
params.get("starting_point", "now"),
params.get("offset"),
float(params.get("acceleration_factor", "1.0")),
# this is only expected to be used in tests
params.get("__utc_now")
)
if "daily_logging_volume" in params and "client_count" in params:
# in bytes
self.daily_logging_volume = convert_to_bytes(params["daily_logging_volume"]) // int(params["client_count"])
else:
self.daily_logging_volume = None
self.current_logging_volume = 0
self.total_days = params.get("number_of_days")
self.remaining_days = self.total_days
self.record_raw_event_size = params.get("record_raw_event_size", False)
self._offset = 0
self._web_host = itertools.cycle([1, 2, 3])
self._timestruct = None
self._index_name = None
self._time_interval_current_bulk = 0