1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
|
from datetime import datetime, timedelta, timezone
from os.path import join as pjoin
from pathlib import Path
from random import choice as randchoice
from shutil import rmtree
from typing import TYPE_CHECKING, Callable, Optional
from uuid import uuid4
import pytest
from _pytest.fixtures import SubRequest
from faker import Faker
# from test_utils.managers.ledger.conftest import session_with_tx_factory
# from test_utils.models.conftest import session_factory
if TYPE_CHECKING:
from generalresearch.config import GRLBaseSettings
from generalresearch.incite.base import GRLDatasets
from generalresearch.incite.collections import (
DFCollectionItem,
DFCollectionType,
)
from generalresearch.incite.mergers import MergeType
from generalresearch.models.admin.request import (
ReportRequest,
)
from generalresearch.models.thl.product import Product
from generalresearch.models.thl.session import Session
from generalresearch.models.thl.user import User
fake = Faker()
@pytest.fixture
def mnt_gr_api_dir(request: SubRequest, settings: "GRLBaseSettings") -> Path:
p = Path(settings.mnt_gr_api_dir)
p.mkdir(parents=True, exist_ok=True)
from generalresearch.models.admin.request import ReportType
for e in list(ReportType):
Path(pjoin(p, e.value)).mkdir(exist_ok=True)
def tmp_file_teardown():
assert "/mnt/" not in str(p), (
"Under no condition, testing or otherwise should we have code delete "
" any folders or potential data on a network mount"
)
rmtree(p)
request.addfinalizer(tmp_file_teardown)
return p
@pytest.fixture
def event_report_request(utc_hour_ago: datetime, start: datetime) -> "ReportRequest":
from generalresearch.models.admin.request import (
ReportRequest,
ReportType,
)
return ReportRequest.model_validate(
{
"report_type": ReportType.POP_EVENT,
"interval": "5min",
"start": start,
}
)
@pytest.fixture
def session_report_request(utc_hour_ago: datetime, start: datetime) -> "ReportRequest":
from generalresearch.models.admin.request import (
ReportRequest,
ReportType,
)
return ReportRequest.model_validate(
{
"report_type": ReportType.POP_SESSION,
"interval": "5min",
"start": start,
}
)
@pytest.fixture
def mnt_filepath(request: SubRequest) -> "GRLDatasets":
"""
Creates a temporary file path for all DFCollections &
Mergers parquet files.
"""
from generalresearch.incite.base import GRLDatasets, NFSMount
instance = GRLDatasets(
data_src=Path(pjoin("/tmp", f"test-{uuid4().hex[:12]}")),
incite=NFSMount(point="thl-incite"),
)
def tmp_file_teardown():
assert "/mnt/" not in str(instance.data_src), (
"Under no condition, testing or otherwise should we have code delete "
" any folders or potential data on a network mount"
)
rmtree(instance.data_src)
request.addfinalizer(tmp_file_teardown)
return instance
@pytest.fixture
def start(utc_90days_ago: datetime) -> "datetime":
s = utc_90days_ago.replace(microsecond=0)
return s
@pytest.fixture
def offset() -> str:
return "15min"
@pytest.fixture
def duration() -> Optional["timedelta"]:
return timedelta(hours=1)
@pytest.fixture
def df_collection_data_type() -> "DFCollectionType":
from generalresearch.incite.collections import DFCollectionType
return DFCollectionType.TEST
@pytest.fixture
def merge_type() -> "MergeType":
from generalresearch.incite.mergers import MergeType
return MergeType.TEST
@pytest.fixture
def incite_item_factory(
session_factory: Callable[..., "Session"],
product: "Product",
user_factory: Callable[..., "User"],
session_with_tx_factory: Callable[..., "Session"],
) -> Callable[..., None]:
def _inner(
item: "DFCollectionItem",
observations: int = 3,
user: Optional["User"] = None,
):
from generalresearch.incite.collections import (
DFCollection,
DFCollectionType,
)
from generalresearch.models.thl.session import Source
collection: DFCollection = item._collection
data_type: DFCollectionType = collection.data_type
for _ in range(5):
item_time = fake.date_time_between(
start_date=item.start, end_date=item.finish, tzinfo=timezone.utc
)
match data_type:
case DFCollectionType.USER:
user_factory(product=product, created=item_time)
case DFCollectionType.LEDGER:
session_with_tx_factory(started=item_time, user=user)
case DFCollectionType.WALL:
u = (
user
if user
else user_factory(product=product, created=item_time)
)
session_factory(
user=u,
started=item_time,
wall_source=randchoice(list(Source)),
)
case DFCollectionType.SESSION:
u = (
user
if user
else user_factory(product=product, created=item_time)
)
session_factory(
user=u,
started=item_time,
wall_source=randchoice(list(Source)),
)
case _:
raise ValueError("Unsupported DFCollectionItem")
return None
return _inner
|