aboutsummaryrefslogtreecommitdiff
path: root/test_utils
diff options
context:
space:
mode:
authorMax Nanis2026-03-06 16:49:46 -0500
committerMax Nanis2026-03-06 16:49:46 -0500
commit91d040211a4ed6e4157896256a762d3854777b5e (patch)
treecd95922ea4257dc8d3f4e4cbe8534474709a20dc /test_utils
downloadgeneralresearch-91d040211a4ed6e4157896256a762d3854777b5e.tar.gz
generalresearch-91d040211a4ed6e4157896256a762d3854777b5e.zip
Initial commitv3.3.4
Diffstat (limited to 'test_utils')
-rw-r--r--test_utils/__init__.py0
-rw-r--r--test_utils/conftest.py310
-rw-r--r--test_utils/grliq/__init__.py0
-rw-r--r--test_utils/grliq/conftest.py28
-rw-r--r--test_utils/grliq/managers/__init__.py0
-rw-r--r--test_utils/grliq/managers/conftest.py0
-rw-r--r--test_utils/grliq/models/__init__.py0
-rw-r--r--test_utils/grliq/models/conftest.py0
-rw-r--r--test_utils/incite/__init__.py0
-rw-r--r--test_utils/incite/collections/__init__.py0
-rw-r--r--test_utils/incite/collections/conftest.py205
-rw-r--r--test_utils/incite/conftest.py201
-rw-r--r--test_utils/incite/mergers/__init__.py0
-rw-r--r--test_utils/incite/mergers/conftest.py247
-rw-r--r--test_utils/managers/__init__.py0
-rw-r--r--test_utils/managers/cashout_methods.py76
-rw-r--r--test_utils/managers/conftest.py701
-rw-r--r--test_utils/managers/contest/__init__.py0
-rw-r--r--test_utils/managers/contest/conftest.py295
-rw-r--r--test_utils/managers/ledger/__init__.py0
-rw-r--r--test_utils/managers/ledger/conftest.py678
-rw-r--r--test_utils/managers/upk/__init__.py0
-rw-r--r--test_utils/managers/upk/conftest.py161
-rw-r--r--test_utils/managers/upk/marketplace_category.csv.gzbin0 -> 100990 bytes
-rw-r--r--test_utils/managers/upk/marketplace_item.csv.gzbin0 -> 3225 bytes
-rw-r--r--test_utils/managers/upk/marketplace_property.csv.gzbin0 -> 3315 bytes
-rw-r--r--test_utils/managers/upk/marketplace_propertycategoryassociation.csv.gzbin0 -> 2079 bytes
-rw-r--r--test_utils/managers/upk/marketplace_propertycountry.csv.gzbin0 -> 71359 bytes
-rw-r--r--test_utils/managers/upk/marketplace_propertyitemrange.csv.gzbin0 -> 65389 bytes
-rw-r--r--test_utils/managers/upk/marketplace_propertymarketplaceassociation.csv.gzbin0 -> 4272 bytes
-rw-r--r--test_utils/managers/upk/marketplace_question.csv.gzbin0 -> 283465 bytes
-rw-r--r--test_utils/models/__init__.py0
-rw-r--r--test_utils/models/conftest.py608
-rw-r--r--test_utils/spectrum/__init__.py0
-rw-r--r--test_utils/spectrum/conftest.py79
-rw-r--r--test_utils/spectrum/surveys_json.py140
36 files changed, 3729 insertions, 0 deletions
diff --git a/test_utils/__init__.py b/test_utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/__init__.py
diff --git a/test_utils/conftest.py b/test_utils/conftest.py
new file mode 100644
index 0000000..7acafc5
--- /dev/null
+++ b/test_utils/conftest.py
@@ -0,0 +1,310 @@
+import os
+import shutil
+from datetime import datetime, timezone
+from os.path import join as pjoin
+from typing import TYPE_CHECKING, Callable
+from uuid import uuid4
+
+import pytest
+import redis
+from dotenv import load_dotenv
+from pydantic import MariaDBDsn
+from redis import Redis
+
+from generalresearch.pg_helper import PostgresConfig
+from generalresearch.redis_helper import RedisConfig
+from generalresearch.sql_helper import SqlHelper
+
+if TYPE_CHECKING:
+ from generalresearch.currency import USDCent
+ from generalresearch.models.thl.session import Status
+ from generalresearch.config import GRLBaseSettings
+
+
+@pytest.fixture(scope="session")
+def env_file_path(pytestconfig):
+ root_path = pytestconfig.rootpath
+ env_path = os.path.join(root_path, ".env.test")
+
+ if os.path.exists(env_path):
+ load_dotenv(dotenv_path=env_path, override=True)
+
+ return env_path
+
+
+@pytest.fixture(scope="session")
+def settings(env_file_path) -> "GRLBaseSettings":
+ from generalresearch.config import GRLBaseSettings
+
+ s = GRLBaseSettings(_env_file=env_file_path)
+
+ if s.thl_mkpl_rr_db is not None:
+ if s.spectrum_rw_db is None:
+ s.spectrum_rw_db = MariaDBDsn(f"{s.thl_mkpl_rw_db}unittest-thl-spectrum")
+ if s.spectrum_rr_db is None:
+ s.spectrum_rr_db = MariaDBDsn(f"{s.thl_mkpl_rr_db}unittest-thl-spectrum")
+
+ s.mnt_gr_api_dir = pjoin("/tmp", f"test-{uuid4().hex[:12]}")
+
+ return s
+
+
+# === Database Connectors ===
+
+
+@pytest.fixture(scope="session")
+def thl_web_rr(settings) -> PostgresConfig:
+ assert "/unittest-" in settings.thl_web_rr_db.path
+
+ return PostgresConfig(
+ dsn=settings.thl_web_rr_db,
+ connect_timeout=1,
+ statement_timeout=5,
+ )
+
+
+@pytest.fixture(scope="session")
+def thl_web_rw(settings) -> PostgresConfig:
+ assert "/unittest-" in settings.thl_web_rw_db.path
+
+ return PostgresConfig(
+ dsn=settings.thl_web_rw_db,
+ connect_timeout=1,
+ statement_timeout=5,
+ )
+
+
+@pytest.fixture(scope="session")
+def gr_db(settings) -> PostgresConfig:
+ assert "/unittest-" in settings.gr_db.path
+ return PostgresConfig(dsn=settings.gr_db, connect_timeout=5, statement_timeout=2)
+
+
+@pytest.fixture(scope="session")
+def spectrum_rw(settings) -> SqlHelper:
+ assert "/unittest-" in settings.spectrum_rw_db.path
+
+ return SqlHelper(
+ dsn=settings.spectrum_rw_db,
+ read_timeout=2,
+ write_timeout=1,
+ connect_timeout=2,
+ )
+
+
+@pytest.fixture(scope="session")
+def grliq_db(settings) -> PostgresConfig:
+ assert "/unittest-" in settings.grliq_db.path
+
+ # test_words = {"localhost", "127.0.0.1", "unittest", "grliq-test"}
+ # assert any(w in str(postgres_config.dsn) for w in test_words), "check grliq postgres_config"
+ # assert "grliqdeceezpocymo" not in str(postgres_config.dsn), "check grliq postgres_config"
+
+ return PostgresConfig(
+ dsn=settings.grliq_db,
+ connect_timeout=2,
+ statement_timeout=2,
+ )
+
+
+@pytest.fixture(scope="session")
+def thl_redis(settings) -> "Redis":
+ # todo: this should get replaced with redisconfig (in most places)
+ # I'm not sure where this would be? in the domain name?
+ assert "unittest" in str(settings.thl_redis) or "127.0.0.1" in str(
+ settings.thl_redis
+ )
+
+ return redis.Redis.from_url(
+ **{
+ "url": str(settings.thl_redis),
+ "decode_responses": True,
+ "socket_timeout": settings.redis_timeout,
+ "socket_connect_timeout": settings.redis_timeout,
+ }
+ )
+
+
+@pytest.fixture(scope="session")
+def thl_redis_config(settings) -> RedisConfig:
+ assert "unittest" in str(settings.thl_redis) or "127.0.0.1" in str(
+ settings.thl_redis
+ )
+ return RedisConfig(
+ dsn=settings.thl_redis,
+ decode_responses=True,
+ socket_timeout=settings.redis_timeout,
+ socket_connect_timeout=settings.redis_timeout,
+ )
+
+
+@pytest.fixture(scope="session")
+def gr_redis_config(settings) -> "RedisConfig":
+ assert "unittest" in str(settings.gr_redis) or "127.0.0.1" in str(settings.gr_redis)
+
+ return RedisConfig(
+ dsn=settings.gr_redis,
+ decode_responses=True,
+ socket_timeout=settings.redis_timeout,
+ socket_connect_timeout=settings.redis_timeout,
+ )
+
+
+@pytest.fixture(scope="session")
+def gr_redis(settings) -> "Redis":
+ assert "unittest" in str(settings.gr_redis) or "127.0.0.1" in str(settings.gr_redis)
+ return redis.Redis.from_url(
+ **{
+ "url": str(settings.gr_redis),
+ "decode_responses": True,
+ "socket_timeout": settings.redis_timeout,
+ "socket_connect_timeout": settings.redis_timeout,
+ }
+ )
+
+
+@pytest.fixture()
+def gr_redis_async(settings):
+ assert "unittest" in str(settings.gr_redis) or "127.0.0.1" in str(settings.gr_redis)
+
+ import redis.asyncio as redis_async
+
+ return redis_async.Redis.from_url(
+ str(settings.gr_redis),
+ decode_responses=True,
+ socket_timeout=0.20,
+ socket_connect_timeout=0.20,
+ )
+
+
+# === Random helpers ===
+
+
+@pytest.fixture(scope="function")
+def start():
+ return datetime(year=1900, month=1, day=1, tzinfo=timezone.utc)
+
+
+@pytest.fixture
+def wall_status(request) -> "Status":
+ from generalresearch.models.thl.session import Status
+
+ return request.param if hasattr(request, "wall_status") else Status.COMPLETE
+
+
+@pytest.fixture(scope="function")
+def utc_now() -> "datetime":
+ from datetime import datetime, timezone
+
+ return datetime.now(tz=timezone.utc)
+
+
+@pytest.fixture(scope="function")
+def utc_hour_ago() -> "datetime":
+ from datetime import datetime, timezone, timedelta
+
+ return datetime.now(tz=timezone.utc) - timedelta(hours=1)
+
+
+@pytest.fixture(scope="function")
+def utc_day_ago() -> "datetime":
+ from datetime import datetime, timezone, timedelta
+
+ return datetime.now(tz=timezone.utc) - timedelta(hours=24)
+
+
+@pytest.fixture(scope="function")
+def utc_90days_ago() -> "datetime":
+ from datetime import datetime, timezone, timedelta
+
+ return datetime.now(tz=timezone.utc) - timedelta(days=90)
+
+
+@pytest.fixture(scope="function")
+def utc_60days_ago() -> "datetime":
+ from datetime import datetime, timezone, timedelta
+
+ return datetime.now(tz=timezone.utc) - timedelta(days=60)
+
+
+@pytest.fixture(scope="function")
+def utc_30days_ago() -> "datetime":
+ from datetime import datetime, timezone, timedelta
+
+ return datetime.now(tz=timezone.utc) - timedelta(days=30)
+
+
+# === Clean up ===
+
+
+@pytest.fixture(scope="function")
+def delete_df_collection(thl_web_rw, create_main_accounts) -> Callable:
+ from generalresearch.incite.collections import (
+ DFCollection,
+ DFCollectionType,
+ )
+
+ def _teardown_events(coll: "DFCollection"):
+ match coll.data_type:
+ case DFCollectionType.LEDGER:
+ for table in [
+ "ledger_transactionmetadata",
+ "ledger_entry",
+ "ledger_transaction",
+ "ledger_account",
+ ]:
+ thl_web_rw.execute_write(
+ query=f"DELETE FROM {table};",
+ )
+ create_main_accounts()
+
+ case DFCollectionType.WALL | DFCollectionType.SESSION:
+ with thl_web_rw.make_connection() as conn:
+ with conn.cursor() as c:
+ c.execute("SET CONSTRAINTS ALL DEFERRED")
+ for table in [
+ "thl_wall",
+ "thl_session",
+ ]:
+ c.execute(
+ query=f"DELETE FROM {table};",
+ )
+
+ case DFCollectionType.USER:
+ for table in ["thl_usermetadata", "thl_user"]:
+ thl_web_rw.execute_write(
+ query=f"DELETE FROM {table};",
+ )
+
+ case _:
+ thl_web_rw.execute_write(
+ query=f"DELETE FROM {coll.data_type.value};",
+ )
+
+ return _teardown_events
+
+
+# === GR Related ===
+
+
+@pytest.fixture(scope="function")
+def amount_1(request) -> "USDCent":
+ from generalresearch.currency import USDCent
+
+ return USDCent(1)
+
+
+@pytest.fixture(scope="function")
+def amount_100(request) -> "USDCent":
+ from generalresearch.currency import USDCent
+
+ return USDCent(100)
+
+
+def clear_directory(path):
+ for entry in os.listdir(path):
+ full_path = os.path.join(path, entry)
+ if os.path.isfile(full_path) or os.path.islink(full_path):
+ os.unlink(full_path) # remove file or symlink
+ elif os.path.isdir(full_path):
+ shutil.rmtree(full_path) # remove folder
diff --git a/test_utils/grliq/__init__.py b/test_utils/grliq/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/grliq/__init__.py
diff --git a/test_utils/grliq/conftest.py b/test_utils/grliq/conftest.py
new file mode 100644
index 0000000..1818794
--- /dev/null
+++ b/test_utils/grliq/conftest.py
@@ -0,0 +1,28 @@
+from datetime import datetime, timedelta, timezone
+from typing import TYPE_CHECKING
+from uuid import uuid4
+
+import pytest
+
+
+if TYPE_CHECKING:
+ from generalresearch.grliq.models.forensic_data import GrlIqData
+
+
+@pytest.fixture(scope="function")
+def mnt_grliq_archive_dir(settings):
+ return settings.mnt_grliq_archive_dir
+
+
+@pytest.fixture(scope="function")
+def grliq_data() -> "GrlIqData":
+ from generalresearch.grliq.models.forensic_data import GrlIqData
+ from generalresearch.grliq.managers import DUMMY_GRLIQ_DATA
+
+ g: GrlIqData = DUMMY_GRLIQ_DATA[1]["data"]
+
+ g.id = None
+ g.uuid = uuid4().hex
+ g.created_at = datetime.now(tz=timezone.utc)
+ g.timestamp = g.created_at - timedelta(seconds=10)
+ return g
diff --git a/test_utils/grliq/managers/__init__.py b/test_utils/grliq/managers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/grliq/managers/__init__.py
diff --git a/test_utils/grliq/managers/conftest.py b/test_utils/grliq/managers/conftest.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/grliq/managers/conftest.py
diff --git a/test_utils/grliq/models/__init__.py b/test_utils/grliq/models/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/grliq/models/__init__.py
diff --git a/test_utils/grliq/models/conftest.py b/test_utils/grliq/models/conftest.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/grliq/models/conftest.py
diff --git a/test_utils/incite/__init__.py b/test_utils/incite/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/incite/__init__.py
diff --git a/test_utils/incite/collections/__init__.py b/test_utils/incite/collections/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/incite/collections/__init__.py
diff --git a/test_utils/incite/collections/conftest.py b/test_utils/incite/collections/conftest.py
new file mode 100644
index 0000000..1b61ed5
--- /dev/null
+++ b/test_utils/incite/collections/conftest.py
@@ -0,0 +1,205 @@
+from datetime import timedelta, datetime
+from typing import TYPE_CHECKING, Optional, Callable
+
+import pytest
+
+from test_utils.incite.conftest import mnt_filepath
+from test_utils.conftest import clear_directory
+
+if TYPE_CHECKING:
+ from generalresearch.incite.collections import DFCollection
+ from generalresearch.incite.base import GRLDatasets, DFCollectionType
+ from generalresearch.incite.collections.thl_web import LedgerDFCollection
+ from generalresearch.incite.collections.thl_web import (
+ WallDFCollection,
+ SessionDFCollection,
+ TaskAdjustmentDFCollection,
+ UserDFCollection,
+ AuditLogDFCollection,
+ )
+
+
+@pytest.fixture(scope="function")
+def user_collection(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+ thl_web_rr,
+) -> "UserDFCollection":
+ from generalresearch.incite.collections.thl_web import (
+ UserDFCollection,
+ DFCollectionType,
+ )
+
+ return UserDFCollection(
+ start=start,
+ finished=start + duration,
+ offset=offset,
+ pg_config=thl_web_rr,
+ archive_path=mnt_filepath.archive_path(enum_type=DFCollectionType.USER),
+ )
+
+
+@pytest.fixture(scope="function")
+def wall_collection(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+ thl_web_rr,
+) -> "WallDFCollection":
+ from generalresearch.incite.collections.thl_web import (
+ WallDFCollection,
+ DFCollectionType,
+ )
+
+ return WallDFCollection(
+ start=start,
+ finished=start + duration if duration else None,
+ offset=offset,
+ pg_config=thl_web_rr,
+ archive_path=mnt_filepath.archive_path(enum_type=DFCollectionType.WALL),
+ )
+
+
+@pytest.fixture(scope="function")
+def session_collection(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+ thl_web_rr,
+) -> "SessionDFCollection":
+ from generalresearch.incite.collections.thl_web import (
+ SessionDFCollection,
+ DFCollectionType,
+ )
+
+ return SessionDFCollection(
+ start=start,
+ finished=start + duration if duration else None,
+ offset=offset,
+ pg_config=thl_web_rr,
+ archive_path=mnt_filepath.archive_path(enum_type=DFCollectionType.SESSION),
+ )
+
+
+# IPInfoDFCollection
+# IPHistoryDFCollection
+# IPHistoryWSDFCollection
+
+# @pytest.fixture
+# def ip_history_collection(mnt_filepath, offset, duration, start,
+# thl_web_rw) -> IPHistoryDFCollection:
+# return IPHistoryDFCollection(
+# start=start,
+# finished=start + duration,
+# offset=offset,
+# pg_config=thl_web_rw,
+# archive_path=mnt_filepath.archive_path(enum_type=DFCollectionType.IP_HISTORY),
+# )
+
+
+@pytest.fixture(scope="function")
+def task_adj_collection(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: Optional[timedelta],
+ start: datetime,
+ thl_web_rr,
+) -> "TaskAdjustmentDFCollection":
+ from generalresearch.incite.collections.thl_web import (
+ TaskAdjustmentDFCollection,
+ DFCollectionType,
+ )
+
+ return TaskAdjustmentDFCollection(
+ start=start,
+ finished=start + duration if duration else duration,
+ offset=offset,
+ pg_config=thl_web_rr,
+ archive_path=mnt_filepath.archive_path(
+ enum_type=DFCollectionType.TASK_ADJUSTMENT
+ ),
+ )
+
+
+@pytest.fixture(scope="function")
+def auditlog_collection(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+ thl_web_rr,
+) -> "AuditLogDFCollection":
+ from generalresearch.incite.collections.thl_web import (
+ AuditLogDFCollection,
+ DFCollectionType,
+ )
+
+ return AuditLogDFCollection(
+ start=start,
+ finished=start + duration,
+ offset=offset,
+ pg_config=thl_web_rr,
+ archive_path=mnt_filepath.archive_path(enum_type=DFCollectionType.LEDGER),
+ )
+
+
+@pytest.fixture(scope="function")
+def ledger_collection(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+ thl_web_rr,
+) -> "LedgerDFCollection":
+ from generalresearch.incite.collections.thl_web import (
+ LedgerDFCollection,
+ DFCollectionType,
+ )
+
+ return LedgerDFCollection(
+ start=start,
+ finished=start + duration if duration else duration,
+ offset=offset,
+ pg_config=thl_web_rr,
+ archive_path=mnt_filepath.archive_path(enum_type=DFCollectionType.LEDGER),
+ )
+
+
+@pytest.fixture(scope="function")
+def rm_ledger_collection(ledger_collection) -> Callable:
+ def _rm_ledger_collection():
+ clear_directory(ledger_collection.archive_path)
+
+ return _rm_ledger_collection
+
+
+# --------------------------
+# Generic / Base
+# --------------------------
+
+
+@pytest.fixture(scope="function")
+def df_collection(
+ mnt_filepath,
+ df_collection_data_type: "DFCollectionType",
+ offset,
+ duration,
+ utc_90days_ago,
+ thl_web_rr,
+) -> "DFCollection":
+ from generalresearch.incite.collections import DFCollection
+
+ start = utc_90days_ago.replace(microsecond=0)
+
+ return DFCollection(
+ data_type=df_collection_data_type,
+ archive_path=mnt_filepath.archive_path(enum_type=df_collection_data_type),
+ offset=offset,
+ pg_config=thl_web_rr,
+ start=start,
+ finished=start + duration,
+ )
diff --git a/test_utils/incite/conftest.py b/test_utils/incite/conftest.py
new file mode 100644
index 0000000..759467a
--- /dev/null
+++ b/test_utils/incite/conftest.py
@@ -0,0 +1,201 @@
+from datetime import datetime, timezone, timedelta
+from os.path import join as pjoin
+from pathlib import Path
+from random import choice as randchoice
+from shutil import rmtree
+from typing import Callable, TYPE_CHECKING, Optional
+from uuid import uuid4
+
+import pytest
+from _pytest.fixtures import SubRequest
+from faker import Faker
+
+from test_utils.managers.ledger.conftest import session_with_tx_factory
+from test_utils.models.conftest import session_factory
+
+if TYPE_CHECKING:
+ from generalresearch.models.thl.user import User
+ from generalresearch.incite.base import GRLDatasets
+ from generalresearch.incite.mergers import MergeType
+ from generalresearch.incite.collections import (
+ DFCollection,
+ DFCollectionType,
+ DFCollectionItem,
+ )
+
+fake = Faker()
+
+
+@pytest.fixture(scope="function")
+def mnt_gr_api_dir(request: SubRequest, settings):
+ p = Path(settings.mnt_gr_api_dir)
+ p.mkdir(parents=True, exist_ok=True)
+
+ from generalresearch.models.admin.request import ReportType
+
+ for e in list(ReportType):
+ Path(pjoin(p, e.value)).mkdir(exist_ok=True)
+
+ def tmp_file_teardown():
+ assert "/mnt/" not in str(p), (
+ "Under no condition, testing or otherwise should we have code delete "
+ " any folders or potential data on a network mount"
+ )
+
+ rmtree(p)
+
+ request.addfinalizer(tmp_file_teardown)
+
+ return p
+
+
+@pytest.fixture(scope="function")
+def event_report_request(utc_hour_ago, start):
+ from generalresearch.models.admin.request import (
+ ReportRequest,
+ ReportType,
+ )
+
+ return ReportRequest.model_validate(
+ {
+ "report_type": ReportType.POP_EVENT,
+ "interval": "5min",
+ "start": start,
+ }
+ )
+
+
+@pytest.fixture(scope="function")
+def session_report_request(utc_hour_ago, start):
+ from generalresearch.models.admin.request import (
+ ReportRequest,
+ ReportType,
+ )
+
+ return ReportRequest.model_validate(
+ {
+ "report_type": ReportType.POP_SESSION,
+ "interval": "5min",
+ "start": start,
+ }
+ )
+
+
+@pytest.fixture(scope="function")
+def mnt_filepath(request: SubRequest) -> "GRLDatasets":
+ """Creates a temporary file path for all DFCollections & Mergers parquet
+ files.
+ """
+ from generalresearch.incite.base import GRLDatasets, NFSMount
+
+ instance = GRLDatasets(
+ data_src=Path(pjoin("/tmp", f"test-{uuid4().hex[:12]}")),
+ incite=NFSMount(point="thl-incite"),
+ )
+
+ def tmp_file_teardown():
+ assert "/mnt/" not in str(instance.data_src), (
+ "Under no condition, testing or otherwise should we have code delete "
+ " any folders or potential data on a network mount"
+ )
+
+ rmtree(instance.data_src)
+
+ request.addfinalizer(tmp_file_teardown)
+
+ return instance
+
+
+@pytest.fixture(scope="function")
+def start(utc_90days_ago) -> "datetime":
+ s = utc_90days_ago.replace(microsecond=0)
+ return s
+
+
+@pytest.fixture(scope="function")
+def offset() -> str:
+ return "15min"
+
+
+@pytest.fixture(scope="function")
+def duration() -> Optional["timedelta"]:
+ return timedelta(hours=1)
+
+
+@pytest.fixture(scope="function")
+def df_collection_data_type() -> "DFCollectionType":
+ from generalresearch.incite.collections import DFCollectionType
+
+ return DFCollectionType.TEST
+
+
+@pytest.fixture(scope="function")
+def merge_type() -> "MergeType":
+ from generalresearch.incite.mergers import MergeType
+
+ return MergeType.TEST
+
+
+@pytest.fixture(scope="function")
+def incite_item_factory(
+ session_factory,
+ product,
+ user_factory,
+ session_with_tx_factory,
+) -> Callable:
+ def _incite_item_factory(
+ item: "DFCollectionItem",
+ observations: int = 3,
+ user: Optional["User"] = None,
+ ):
+ from generalresearch.incite.collections import (
+ DFCollection,
+ DFCollectionType,
+ )
+ from generalresearch.models.thl.session import Source
+
+ collection: DFCollection = item._collection
+ data_type: DFCollectionType = collection.data_type
+
+ for idx in range(5):
+ item_time = fake.date_time_between(
+ start_date=item.start, end_date=item.finish, tzinfo=timezone.utc
+ )
+
+ match data_type:
+ case DFCollectionType.USER:
+ user_factory(product=product, created=item_time)
+
+ case DFCollectionType.LEDGER:
+ session_with_tx_factory(started=item_time, user=user)
+
+ case DFCollectionType.WALL:
+ u = (
+ user
+ if user
+ else user_factory(product=product, created=item_time)
+ )
+ session_factory(
+ user=u,
+ started=item_time,
+ wall_source=randchoice(list(Source)),
+ )
+
+ case DFCollectionType.SESSION:
+ u = (
+ user
+ if user
+ else user_factory(product=product, created=item_time)
+ )
+ session_factory(
+ user=u,
+ started=item_time,
+ wall_source=randchoice(list(Source)),
+ )
+
+ case _:
+ raise ValueError("Unsupported DFCollectionItem")
+
+ return None
+
+ return _incite_item_factory
diff --git a/test_utils/incite/mergers/__init__.py b/test_utils/incite/mergers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/incite/mergers/__init__.py
diff --git a/test_utils/incite/mergers/conftest.py b/test_utils/incite/mergers/conftest.py
new file mode 100644
index 0000000..e4e3bdd
--- /dev/null
+++ b/test_utils/incite/mergers/conftest.py
@@ -0,0 +1,247 @@
+from datetime import timedelta, datetime
+from typing import TYPE_CHECKING, Optional, Callable
+
+import pytest
+
+from test_utils.conftest import clear_directory
+from test_utils.incite.conftest import mnt_filepath
+
+if TYPE_CHECKING:
+ from generalresearch.incite.mergers import MergeType
+ from generalresearch.incite.mergers.ym_wall_summary import (
+ YMWallSummaryMerge,
+ YMWallSummaryMergeItem,
+ )
+ from generalresearch.incite.mergers.pop_ledger import PopLedgerMerge
+ from generalresearch.incite.mergers.ym_survey_wall import YMSurveyWallMerge
+ from generalresearch.incite.base import GRLDatasets
+ from generalresearch.incite.mergers.foundations.enriched_session import (
+ EnrichedSessionMerge,
+ )
+ from generalresearch.incite.mergers.foundations.enriched_task_adjust import (
+ EnrichedTaskAdjustMerge,
+ )
+ from generalresearch.incite.mergers.foundations.enriched_wall import (
+ EnrichedWallMerge,
+ )
+ from generalresearch.incite.mergers.foundations.user_id_product import (
+ UserIdProductMerge,
+ )
+ from generalresearch.incite.mergers.ym_survey_wall import (
+ YMSurveyWallMergeCollectionItem,
+ )
+
+
+# --------------------------
+# Merges
+# --------------------------
+
+
+@pytest.fixture(scope="function")
+def rm_pop_ledger_merge(pop_ledger_merge) -> Callable:
+ def _rm_pop_ledger_merge():
+ clear_directory(pop_ledger_merge.archive_path)
+
+ return _rm_pop_ledger_merge
+
+
+@pytest.fixture(scope="function")
+def pop_ledger_merge(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ start: datetime,
+ duration: timedelta,
+) -> "PopLedgerMerge":
+ from generalresearch.incite.mergers.pop_ledger import PopLedgerMerge
+ from generalresearch.incite.mergers import MergeType
+
+ return PopLedgerMerge(
+ start=start,
+ finished=start + duration if duration else None,
+ offset=offset,
+ archive_path=mnt_filepath.archive_path(enum_type=MergeType.POP_LEDGER),
+ )
+
+
+@pytest.fixture(scope="function")
+def pop_ledger_merge_item(
+ start,
+ pop_ledger_merge,
+) -> "PopLedgerMergeItem":
+ from generalresearch.incite.mergers.pop_ledger import PopLedgerMergeItem
+
+ return PopLedgerMergeItem(
+ start=start,
+ _collection=pop_ledger_merge,
+ )
+
+
+@pytest.fixture(scope="function")
+def ym_survey_wall_merge(
+ mnt_filepath: "GRLDatasets",
+ start: datetime,
+) -> "YMSurveyWallMerge":
+ from generalresearch.incite.mergers.ym_survey_wall import YMSurveyWallMerge
+ from generalresearch.incite.mergers import MergeType
+
+ return YMSurveyWallMerge(
+ start=None,
+ offset="10D",
+ archive_path=mnt_filepath.archive_path(enum_type=MergeType.YM_SURVEY_WALL),
+ )
+
+
+@pytest.fixture(scope="function")
+def ym_survey_wall_merge_item(
+ start, ym_survey_wall_merge
+) -> "YMSurveyWallMergeCollectionItem":
+ from generalresearch.incite.mergers.ym_survey_wall import (
+ YMSurveyWallMergeCollectionItem,
+ )
+
+ return YMSurveyWallMergeCollectionItem(
+ start=start,
+ _collection=pop_ledger_merge,
+ )
+
+
+@pytest.fixture(scope="function")
+def ym_wall_summary_merge(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+) -> "YMWallSummaryMerge":
+ from generalresearch.incite.mergers.ym_wall_summary import YMWallSummaryMerge
+ from generalresearch.incite.mergers import MergeType
+
+ return YMWallSummaryMerge(
+ start=start,
+ finished=start + duration,
+ offset=offset,
+ archive_path=mnt_filepath.archive_path(enum_type=MergeType.POP_LEDGER),
+ )
+
+
+def ym_wall_summary_merge_item(
+ start, ym_wall_summary_merge
+) -> "YMWallSummaryMergeItem":
+ from generalresearch.incite.mergers.ym_wall_summary import (
+ YMWallSummaryMergeItem,
+ )
+
+ return YMWallSummaryMergeItem(
+ start=start,
+ _collection=pop_ledger_merge,
+ )
+
+
+# --------------------------
+# Merges: Foundations
+# --------------------------
+
+
+@pytest.fixture(scope="function")
+def enriched_session_merge(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+) -> "EnrichedSessionMerge":
+ from generalresearch.incite.mergers.foundations.enriched_session import (
+ EnrichedSessionMerge,
+ )
+ from generalresearch.incite.mergers import MergeType
+
+ return EnrichedSessionMerge(
+ start=start,
+ finished=start + duration if duration else None,
+ offset=offset,
+ archive_path=mnt_filepath.archive_path(enum_type=MergeType.ENRICHED_SESSION),
+ )
+
+
+@pytest.fixture(scope="function")
+def enriched_task_adjust_merge(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+) -> "EnrichedTaskAdjustMerge":
+ from generalresearch.incite.mergers.foundations.enriched_task_adjust import (
+ EnrichedTaskAdjustMerge,
+ )
+ from generalresearch.incite.mergers import MergeType
+
+ return EnrichedTaskAdjustMerge(
+ start=start,
+ finished=start + duration,
+ offset=offset,
+ archive_path=mnt_filepath.archive_path(
+ enum_type=MergeType.ENRICHED_TASK_ADJUST
+ ),
+ )
+
+
+@pytest.fixture(scope="function")
+def enriched_wall_merge(
+ mnt_filepath: "GRLDatasets",
+ offset: str,
+ duration: timedelta,
+ start: datetime,
+) -> "EnrichedWallMerge":
+ from generalresearch.incite.mergers import MergeType
+ from generalresearch.incite.mergers.foundations.enriched_wall import (
+ EnrichedWallMerge,
+ )
+
+ return EnrichedWallMerge(
+ start=start,
+ finished=start + duration if duration else None,
+ offset=offset,
+ archive_path=mnt_filepath.archive_path(enum_type=MergeType.ENRICHED_WALL),
+ )
+
+
+@pytest.fixture(scope="function")
+def user_id_product_merge(
+ mnt_filepath: "GRLDatasets",
+ duration: timedelta,
+ offset,
+ start: datetime,
+) -> "UserIdProductMerge":
+ from generalresearch.incite.mergers.foundations.user_id_product import (
+ UserIdProductMerge,
+ )
+ from generalresearch.incite.mergers import MergeType
+
+ return UserIdProductMerge(
+ start=start,
+ finished=start + duration,
+ offset=None,
+ archive_path=mnt_filepath.archive_path(enum_type=MergeType.USER_ID_PRODUCT),
+ )
+
+
+# --------------------------
+# Generic / Base
+# --------------------------
+
+
+@pytest.fixture(scope="function")
+def merge_collection(
+ mnt_filepath,
+ merge_type: "MergeType",
+ offset,
+ duration,
+ start,
+):
+ from generalresearch.incite.mergers import MergeCollection
+
+ return MergeCollection(
+ merge_type=merge_type,
+ start=start,
+ finished=start + duration,
+ offset=offset,
+ archive_path=mnt_filepath.archive_path(enum_type=merge_type),
+ )
diff --git a/test_utils/managers/__init__.py b/test_utils/managers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/managers/__init__.py
diff --git a/test_utils/managers/cashout_methods.py b/test_utils/managers/cashout_methods.py
new file mode 100644
index 0000000..c338676
--- /dev/null
+++ b/test_utils/managers/cashout_methods.py
@@ -0,0 +1,76 @@
+from generalresearch.models.thl.wallet import PayoutType, Currency
+from generalresearch.models.thl.wallet.cashout_method import (
+ CashoutMethod,
+ TangoCashoutMethodData,
+ AmtCashoutMethodData,
+)
+import random
+
+from uuid import uuid4
+
+
+def random_ext_id(base: str = "U02"):
+ suffix = random.randint(0, 99999)
+ return f"{base}{suffix:05d}"
+
+
+EXAMPLE_TANGO_CASHOUT_METHODS = [
+ CashoutMethod(
+ id=uuid4().hex,
+ last_updated="2021-06-23T20:45:38.239182Z",
+ is_live=True,
+ type=PayoutType.TANGO,
+ ext_id=random_ext_id(),
+ name="Safeway eGift Card $25",
+ data=TangoCashoutMethodData(
+ value_type="fixed", countries=["US"], utid=random_ext_id()
+ ),
+ user=None,
+ image_url="https://d30s7yzk2az89n.cloudfront.net/images/brands/b694446-1200w-326ppi.png",
+ original_currency=Currency.USD,
+ min_value=2500,
+ max_value=2500,
+ ),
+ CashoutMethod(
+ id=uuid4().hex,
+ last_updated="2021-06-23T20:45:38.239182Z",
+ is_live=True,
+ type=PayoutType.TANGO,
+ ext_id=random_ext_id(),
+ name="Amazon.it Gift Certificate",
+ data=TangoCashoutMethodData(
+ value_type="variable", countries=["IT"], utid="U006961"
+ ),
+ user=None,
+ image_url="https://d30s7yzk2az89n.cloudfront.net/images/brands/b405753-1200w-326ppi.png",
+ original_currency=Currency.EUR,
+ min_value=1,
+ max_value=10000,
+ ),
+]
+
+AMT_ASSIGNMENT_CASHOUT_METHOD = CashoutMethod(
+ id=uuid4().hex,
+ last_updated="2021-06-23T20:45:38.239182Z",
+ is_live=True,
+ type=PayoutType.AMT,
+ ext_id=None,
+ name="AMT Assignment",
+ data=AmtCashoutMethodData(),
+ user=None,
+ min_value=1,
+ max_value=5,
+)
+
+AMT_BONUS_CASHOUT_METHOD = CashoutMethod(
+ id=uuid4().hex,
+ last_updated="2021-06-23T20:45:38.239182Z",
+ is_live=True,
+ type=PayoutType.AMT,
+ ext_id=None,
+ name="AMT Bonus",
+ data=AmtCashoutMethodData(),
+ user=None,
+ min_value=7,
+ max_value=4000,
+)
diff --git a/test_utils/managers/conftest.py b/test_utils/managers/conftest.py
new file mode 100644
index 0000000..3a237d1
--- /dev/null
+++ b/test_utils/managers/conftest.py
@@ -0,0 +1,701 @@
+from typing import Callable, TYPE_CHECKING
+
+import pymysql
+import pytest
+
+from generalresearch.managers.base import Permission
+from generalresearch.models import Source
+from test_utils.managers.cashout_methods import (
+ EXAMPLE_TANGO_CASHOUT_METHODS,
+ AMT_ASSIGNMENT_CASHOUT_METHOD,
+ AMT_BONUS_CASHOUT_METHOD,
+)
+
+if TYPE_CHECKING:
+ from generalresearch.grliq.managers.forensic_data import (
+ GrlIqDataManager,
+ )
+ from generalresearch.grliq.managers.forensic_events import (
+ GrlIqEventManager,
+ )
+ from generalresearch.grliq.managers.forensic_results import (
+ GrlIqCategoryResultsReader,
+ )
+ from generalresearch.managers.thl.userhealth import AuditLogManager
+ from generalresearch.managers.thl.payout import (
+ BusinessPayoutEventManager,
+ )
+ from generalresearch.managers.thl.maxmind.basic import (
+ MaxmindBasicManager,
+ )
+
+ from generalresearch.managers.gr.authentication import (
+ GRUserManager,
+ GRTokenManager,
+ )
+ from generalresearch.managers.gr.business import (
+ BusinessManager,
+ BusinessAddressManager,
+ BusinessBankAccountManager,
+ )
+ from generalresearch.managers.gr.team import (
+ TeamManager,
+ MembershipManager,
+ )
+ from generalresearch.managers.thl.contest_manager import ContestManager
+ from generalresearch.managers.thl.ipinfo import (
+ GeoIpInfoManager,
+ IPGeonameManager,
+ IPInformationManager,
+ )
+ from generalresearch.managers.thl.ledger_manager.ledger import (
+ LedgerTransactionManager,
+ LedgerManager,
+ LedgerAccountManager,
+ )
+ from generalresearch.managers.thl.ledger_manager.thl_ledger import (
+ ThlLedgerManager,
+ )
+ from generalresearch.managers.thl.maxmind import MaxmindManager
+ from generalresearch.managers.thl.payout import PayoutEventManager
+ from generalresearch.managers.thl.payout import (
+ PayoutEventManager,
+ UserPayoutEventManager,
+ BrokerageProductPayoutEventManager,
+ )
+ from generalresearch.managers.thl.product import ProductManager
+ from generalresearch.managers.thl.session import SessionManager
+ from generalresearch.managers.thl.user_manager.user_manager import (
+ UserManager,
+ )
+ from generalresearch.managers.thl.user_manager.user_metadata_manager import (
+ UserMetadataManager,
+ )
+ from generalresearch.managers.thl.userhealth import (
+ AuditLogManager,
+ IPRecordManager,
+ UserIpHistoryManager,
+ IPGeonameManager,
+ IPInformationManager,
+ IPRecordManager,
+ )
+ from generalresearch.managers.thl.wall import (
+ WallManager,
+ WallCacheManager,
+ )
+ from generalresearch.managers.thl.task_adjustment import (
+ TaskAdjustmentManager,
+ )
+
+
+# === THL ===
+
+
+@pytest.fixture(scope="session")
+def ltxm(thl_web_rw, thl_redis_config) -> "LedgerTransactionManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ledger_manager.ledger import (
+ LedgerTransactionManager,
+ )
+
+ return LedgerTransactionManager(
+ sql_helper=thl_web_rw,
+ permissions=[Permission.CREATE, Permission.READ],
+ testing=True,
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def lam(thl_web_rw, thl_redis_config) -> "LedgerAccountManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ledger_manager.ledger import (
+ LedgerAccountManager,
+ )
+
+ return LedgerAccountManager(
+ pg_config=thl_web_rw,
+ permissions=[Permission.CREATE, Permission.READ],
+ testing=True,
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def lm(thl_web_rw, thl_redis_config) -> "LedgerManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ledger_manager.ledger import (
+ LedgerManager,
+ )
+
+ return LedgerManager(
+ pg_config=thl_web_rw,
+ permissions=[
+ Permission.CREATE,
+ Permission.READ,
+ Permission.UPDATE,
+ Permission.DELETE,
+ ],
+ testing=True,
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def thl_lm(thl_web_rw, thl_redis_config) -> "ThlLedgerManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ledger_manager.thl_ledger import (
+ ThlLedgerManager,
+ )
+
+ return ThlLedgerManager(
+ pg_config=thl_web_rw,
+ permissions=[
+ Permission.CREATE,
+ Permission.READ,
+ Permission.UPDATE,
+ Permission.DELETE,
+ ],
+ testing=True,
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def payout_event_manager(thl_web_rw, thl_redis_config) -> "PayoutEventManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.payout import PayoutEventManager
+
+ return PayoutEventManager(
+ pg_config=thl_web_rw,
+ permissions=[Permission.CREATE, Permission.READ],
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def user_payout_event_manager(thl_web_rw, thl_redis_config) -> "UserPayoutEventManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.payout import UserPayoutEventManager
+
+ return UserPayoutEventManager(
+ pg_config=thl_web_rw,
+ permissions=[Permission.CREATE, Permission.READ],
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def brokerage_product_payout_event_manager(
+ thl_web_rw, thl_redis_config
+) -> "BrokerageProductPayoutEventManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.payout import (
+ BrokerageProductPayoutEventManager,
+ )
+
+ return BrokerageProductPayoutEventManager(
+ pg_config=thl_web_rw,
+ permissions=[Permission.CREATE, Permission.READ],
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def business_payout_event_manager(
+ thl_web_rw, thl_redis_config
+) -> "BusinessPayoutEventManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.payout import (
+ BusinessPayoutEventManager,
+ )
+
+ return BusinessPayoutEventManager(
+ pg_config=thl_web_rw,
+ permissions=[Permission.CREATE, Permission.READ],
+ redis_config=thl_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def product_manager(thl_web_rw) -> "ProductManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.product import ProductManager
+
+ return ProductManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def user_manager(settings, thl_web_rw, thl_web_rr) -> "UserManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+ assert "/unittest-" in thl_web_rr.dsn.path
+
+ from generalresearch.managers.thl.user_manager.user_manager import (
+ UserManager,
+ )
+
+ return UserManager(
+ pg_config=thl_web_rw,
+ pg_config_rr=thl_web_rr,
+ redis=settings.redis,
+ )
+
+
+@pytest.fixture(scope="session")
+def user_metadata_manager(thl_web_rw) -> "UserMetadataManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.user_manager.user_metadata_manager import (
+ UserMetadataManager,
+ )
+
+ return UserMetadataManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def session_manager(thl_web_rw) -> "SessionManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.session import SessionManager
+
+ return SessionManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def wall_manager(thl_web_rw) -> "WallManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.wall import WallManager
+
+ return WallManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def wall_cache_manager(thl_web_rw, thl_redis_config) -> "WallCacheManager":
+ # assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.wall import WallCacheManager
+
+ return WallCacheManager(pg_config=thl_web_rw, redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def task_adjustment_manager(thl_web_rw) -> "TaskAdjustmentManager":
+ # assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.task_adjustment import (
+ TaskAdjustmentManager,
+ )
+
+ return TaskAdjustmentManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def contest_manager(thl_web_rw) -> "ContestManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.contest_manager import ContestManager
+
+ return ContestManager(
+ pg_config=thl_web_rw,
+ permissions=[
+ Permission.CREATE,
+ Permission.READ,
+ Permission.UPDATE,
+ Permission.DELETE,
+ ],
+ )
+
+
+@pytest.fixture(scope="session")
+def category_manager(thl_web_rw):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.category import CategoryManager
+
+ return CategoryManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def buyer_manager(thl_web_rw):
+ # assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.buyer import BuyerManager
+
+ return BuyerManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def survey_manager(thl_web_rw):
+ # assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.survey import SurveyManager
+
+ return SurveyManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def surveystat_manager(thl_web_rw):
+ # assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.survey import SurveyStatManager
+
+ return SurveyStatManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def surveypenalty_manager(thl_redis_config):
+ from generalresearch.managers.thl.survey_penalty import SurveyPenaltyManager
+
+ return SurveyPenaltyManager(redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def upk_schema_manager(thl_web_rw):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.profiling.schema import (
+ UpkSchemaManager,
+ )
+
+ return UpkSchemaManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def user_upk_manager(thl_web_rw, thl_redis_config):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.profiling.user_upk import (
+ UserUpkManager,
+ )
+
+ return UserUpkManager(pg_config=thl_web_rw, redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def question_manager(thl_web_rw, thl_redis_config):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.profiling.question import (
+ QuestionManager,
+ )
+
+ return QuestionManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def uqa_manager(thl_web_rw, thl_redis_config):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.profiling.uqa import UQAManager
+
+ return UQAManager(redis_config=thl_redis_config, pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="function")
+def uqa_manager_clear_cache(uqa_manager, user):
+ # On successive py-test/jenkins runs, the cache may contain
+ # the previous run's info (keyed under the same user_id)
+ uqa_manager.clear_cache(user)
+ yield
+ uqa_manager.clear_cache(user)
+
+
+@pytest.fixture(scope="session")
+def audit_log_manager(thl_web_rw) -> "AuditLogManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.userhealth import AuditLogManager
+
+ return AuditLogManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def ip_geoname_manager(thl_web_rw) -> "IPGeonameManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ipinfo import IPGeonameManager
+
+ return IPGeonameManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def ip_information_manager(thl_web_rw) -> "IPInformationManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ipinfo import IPInformationManager
+
+ return IPInformationManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def ip_record_manager(thl_web_rw, thl_redis_config) -> "IPRecordManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.userhealth import IPRecordManager
+
+ return IPRecordManager(pg_config=thl_web_rw, redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def user_iphistory_manager(thl_web_rw, thl_redis_config) -> "UserIpHistoryManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.userhealth import (
+ UserIpHistoryManager,
+ )
+
+ return UserIpHistoryManager(pg_config=thl_web_rw, redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="function")
+def user_iphistory_manager_clear_cache(user_iphistory_manager, user):
+ # On successive py-test/jenkins runs, the cache may contain
+ # the previous run's info (keyed under the same user_id)
+ user_iphistory_manager.delete_user_ip_history_cache(user_id=user.user_id)
+ yield
+ user_iphistory_manager.delete_user_ip_history_cache(user_id=user.user_id)
+
+
+@pytest.fixture(scope="session")
+def geoipinfo_manager(thl_web_rw, thl_redis_config) -> "GeoIpInfoManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.ipinfo import GeoIpInfoManager
+
+ return GeoIpInfoManager(pg_config=thl_web_rw, redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def maxmind_basic_manager() -> "MaxmindBasicManager":
+ from generalresearch.managers.thl.maxmind.basic import (
+ MaxmindBasicManager,
+ )
+
+ return MaxmindBasicManager(data_dir="/tmp/")
+
+
+@pytest.fixture(scope="session")
+def maxmind_manager(thl_web_rw, thl_redis_config) -> "MaxmindManager":
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ from generalresearch.managers.thl.maxmind import MaxmindManager
+
+ return MaxmindManager(pg_config=thl_web_rw, redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def cashout_method_manager(thl_web_rw):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.cashout_method import (
+ CashoutMethodManager,
+ )
+
+ return CashoutMethodManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def event_manager(thl_redis_config):
+ from generalresearch.managers.events import EventManager
+
+ return EventManager(redis_config=thl_redis_config)
+
+
+@pytest.fixture(scope="session")
+def user_streak_manager(thl_web_rw):
+ assert "/unittest-" in thl_web_rw.dsn.path
+ from generalresearch.managers.thl.user_streak import (
+ UserStreakManager,
+ )
+
+ return UserStreakManager(pg_config=thl_web_rw)
+
+
+@pytest.fixture(scope="session")
+def uqa_db_index(thl_web_rw):
+ # There were some custom indices created not through django.
+ # Make sure the index used in the index hint exists
+ assert "/unittest-" in thl_web_rw.dsn.path
+
+ # query = f"""create index idx_user_id
+ # on `{thl_web_rw.db}`.marketplace_userquestionanswer (user_id);"""
+ # try:
+ # thl_web_rw.execute_sql_query(query, commit=True)
+ # except pymysql.OperationalError as e:
+ # if "Duplicate key name 'idx_user_id'" not in str(e):
+ # raise
+ return None
+
+
+@pytest.fixture(scope="session")
+def delete_cashoutmethod_db(thl_web_rw) -> Callable:
+ def _delete_cashoutmethod_db():
+ thl_web_rw.execute_write(
+ query="DELETE FROM accounting_cashoutmethod;",
+ )
+
+ return _delete_cashoutmethod_db
+
+
+@pytest.fixture(scope="session")
+def setup_cashoutmethod_db(cashout_method_manager, delete_cashoutmethod_db):
+ delete_cashoutmethod_db()
+ for x in EXAMPLE_TANGO_CASHOUT_METHODS:
+ cashout_method_manager.create(x)
+ cashout_method_manager.create(AMT_ASSIGNMENT_CASHOUT_METHOD)
+ cashout_method_manager.create(AMT_BONUS_CASHOUT_METHOD)
+ return None
+
+
+# === THL: Marketplaces ===
+
+
+@pytest.fixture(scope="session")
+def spectrum_manager(spectrum_rw):
+ from generalresearch.managers.spectrum.survey import (
+ SpectrumSurveyManager,
+ )
+
+ return SpectrumSurveyManager(sql_helper=spectrum_rw)
+
+
+# === GR ===
+@pytest.fixture(scope="session")
+def business_manager(gr_db, gr_redis_config) -> "BusinessManager":
+ from generalresearch.redis_helper import RedisConfig
+
+ assert "/unittest-" in gr_db.dsn.path
+ assert isinstance(gr_redis_config, RedisConfig)
+
+ from generalresearch.managers.gr.business import BusinessManager
+
+ return BusinessManager(
+ pg_config=gr_db,
+ redis_config=gr_redis_config,
+ )
+
+
+@pytest.fixture(scope="session")
+def business_address_manager(gr_db) -> "BusinessAddressManager":
+ assert "/unittest-" in gr_db.dsn.path
+
+ from generalresearch.managers.gr.business import BusinessAddressManager
+
+ return BusinessAddressManager(pg_config=gr_db)
+
+
+@pytest.fixture(scope="session")
+def business_bank_account_manager(gr_db) -> "BusinessBankAccountManager":
+ assert "/unittest-" in gr_db.dsn.path
+
+ from generalresearch.managers.gr.business import (
+ BusinessBankAccountManager,
+ )
+
+ return BusinessBankAccountManager(pg_config=gr_db)
+
+
+@pytest.fixture(scope="session")
+def team_manager(gr_db, gr_redis_config) -> "TeamManager":
+ assert "/unittest-" in gr_db.dsn.path
+
+ from generalresearch.managers.gr.team import TeamManager
+
+ return TeamManager(pg_config=gr_db, redis_config=gr_redis_config)
+
+
+@pytest.fixture(scope="session")
+def gr_um(gr_db, gr_redis_config) -> "GRUserManager":
+ assert "/unittest-" in gr_db.dsn.path
+
+ from generalresearch.managers.gr.authentication import GRUserManager
+
+ return GRUserManager(pg_config=gr_db, redis_config=gr_redis_config)
+
+
+@pytest.fixture(scope="session")
+def gr_tm(gr_db) -> "GRTokenManager":
+ assert "/unittest-" in gr_db.dsn.path
+
+ from generalresearch.managers.gr.authentication import GRTokenManager
+
+ return GRTokenManager(pg_config=gr_db)
+
+
+@pytest.fixture(scope="session")
+def membership_manager(gr_db) -> "MembershipManager":
+ assert "/unittest-" in gr_db.dsn.path
+
+ from generalresearch.managers.gr.team import MembershipManager
+
+ return MembershipManager(pg_config=gr_db)
+
+
+# === GRL IQ ===
+
+
+@pytest.fixture(scope="session")
+def grliq_dm(grliq_db) -> "GrlIqDataManager":
+ assert "/unittest-" in grliq_db.dsn.path
+
+ from generalresearch.grliq.managers.forensic_data import (
+ GrlIqDataManager,
+ )
+
+ return GrlIqDataManager(postgres_config=grliq_db)
+
+
+@pytest.fixture(scope="session")
+def grliq_em(grliq_db) -> "GrlIqEventManager":
+ assert "/unittest-" in grliq_db.dsn.path
+
+ from generalresearch.grliq.managers.forensic_events import (
+ GrlIqEventManager,
+ )
+
+ return GrlIqEventManager(postgres_config=grliq_db)
+
+
+@pytest.fixture(scope="session")
+def grliq_crr(grliq_db) -> "GrlIqCategoryResultsReader":
+ assert "/unittest-" in grliq_db.dsn.path
+
+ from generalresearch.grliq.managers.forensic_results import (
+ GrlIqCategoryResultsReader,
+ )
+
+ return GrlIqCategoryResultsReader(postgres_config=grliq_db)
+
+
+@pytest.fixture(scope="session")
+def delete_buyers_surveys(thl_web_rw, buyer_manager):
+ # assert "/unittest-" in thl_web_rw.dsn.path
+ thl_web_rw.execute_write(
+ """
+ DELETE FROM marketplace_surveystat
+ WHERE survey_id IN (
+ SELECT id
+ FROM marketplace_survey
+ WHERE source = %(source)s
+ );""",
+ params={"source": Source.TESTING.value},
+ )
+ thl_web_rw.execute_write(
+ """
+ DELETE FROM marketplace_survey
+ WHERE buyer_id IN (
+ SELECT id
+ FROM marketplace_buyer
+ WHERE source = %(source)s
+ );""",
+ params={"source": Source.TESTING.value},
+ )
+ thl_web_rw.execute_write(
+ """
+ DELETE from marketplace_buyer
+ WHERE source=%(source)s;
+ """,
+ params={"source": Source.TESTING.value},
+ )
+ buyer_manager.populate_caches()
diff --git a/test_utils/managers/contest/__init__.py b/test_utils/managers/contest/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/managers/contest/__init__.py
diff --git a/test_utils/managers/contest/conftest.py b/test_utils/managers/contest/conftest.py
new file mode 100644
index 0000000..c2d4ef6
--- /dev/null
+++ b/test_utils/managers/contest/conftest.py
@@ -0,0 +1,295 @@
+from datetime import datetime, timezone
+from decimal import Decimal
+from typing import Callable, TYPE_CHECKING
+from uuid import uuid4
+
+import pytest
+
+from generalresearch.currency import USDCent
+
+if TYPE_CHECKING:
+ from generalresearch.models.thl.contest.contest import Contest
+ from generalresearch.models.thl.contest import (
+ ContestPrize,
+ ContestEndCondition,
+ )
+
+ from generalresearch.models.thl.contest.definitions import (
+ ContestType,
+ ContestPrizeKind,
+ )
+ from generalresearch.models.thl.contest.io import contest_create_to_contest
+ from generalresearch.models.thl.contest.leaderboard import (
+ LeaderboardContestCreate,
+ )
+ from generalresearch.models.thl.contest.milestone import (
+ MilestoneContestCreate,
+ ContestEntryTrigger,
+ MilestoneContestEndCondition,
+ )
+ from generalresearch.models.thl.contest.raffle import (
+ ContestEntryType,
+ )
+ from generalresearch.models.thl.contest.raffle import (
+ RaffleContestCreate,
+ )
+ from generalresearch.models.thl.product import Product
+ from generalresearch.models.thl.user import User
+
+
+@pytest.fixture
+def raffle_contest_create() -> "RaffleContestCreate":
+ from generalresearch.models.thl.contest.raffle import (
+ RaffleContestCreate,
+ )
+ from generalresearch.models.thl.contest import (
+ ContestPrize,
+ ContestEndCondition,
+ )
+ from generalresearch.models.thl.contest.definitions import (
+ ContestType,
+ ContestPrizeKind,
+ )
+ from generalresearch.models.thl.contest.raffle import (
+ ContestEntryType,
+ )
+
+ # This is what we'll get from the fastapi endpoint
+ return RaffleContestCreate(
+ name="test",
+ contest_type=ContestType.RAFFLE,
+ entry_type=ContestEntryType.CASH,
+ prizes=[
+ ContestPrize(
+ name="iPod 64GB White",
+ kind=ContestPrizeKind.PHYSICAL,
+ estimated_cash_value=USDCent(100),
+ )
+ ],
+ end_condition=ContestEndCondition(target_entry_amount=USDCent(100)),
+ )
+
+
+@pytest.fixture
+def raffle_contest_in_db(
+ product_user_wallet_yes: "Product",
+ raffle_contest_create: "RaffleContestCreate",
+ contest_manager,
+) -> "Contest":
+ return contest_manager.create(
+ product_id=product_user_wallet_yes.uuid, contest_create=raffle_contest_create
+ )
+
+
+@pytest.fixture
+def raffle_contest(
+ product_user_wallet_yes: "Product", raffle_contest_create: "RaffleContestCreate"
+) -> "Contest":
+ from generalresearch.models.thl.contest.io import contest_create_to_contest
+
+ return contest_create_to_contest(
+ product_id=product_user_wallet_yes.uuid, contest_create=raffle_contest_create
+ )
+
+
+@pytest.fixture(scope="function")
+def raffle_contest_factory(
+ product_user_wallet_yes: "Product",
+ raffle_contest_create: "RaffleContestCreate",
+ contest_manager,
+) -> Callable:
+ def _create_contest(**kwargs):
+ raffle_contest_create.update(**kwargs)
+ return contest_manager.create(
+ product_id=product_user_wallet_yes.uuid,
+ contest_create=raffle_contest_create,
+ )
+
+ return _create_contest
+
+
+@pytest.fixture
+def milestone_contest_create() -> "MilestoneContestCreate":
+ from generalresearch.models.thl.contest import (
+ ContestPrize,
+ )
+ from generalresearch.models.thl.contest.definitions import (
+ ContestType,
+ ContestPrizeKind,
+ )
+ from generalresearch.models.thl.contest.milestone import (
+ MilestoneContestCreate,
+ ContestEntryTrigger,
+ MilestoneContestEndCondition,
+ )
+
+ # This is what we'll get from the fastapi endpoint
+ return MilestoneContestCreate(
+ name="Win a 50% bonus for 7 days and a $1 bonus after your first 3 completes!",
+ description="only valid for the first 5 users",
+ contest_type=ContestType.MILESTONE,
+ prizes=[
+ ContestPrize(
+ name="50% for 7 days",
+ kind=ContestPrizeKind.PROMOTION,
+ estimated_cash_value=USDCent(0),
+ ),
+ ContestPrize(
+ name="$1 Bonus",
+ kind=ContestPrizeKind.CASH,
+ cash_amount=USDCent(1_00),
+ estimated_cash_value=USDCent(1_00),
+ ),
+ ],
+ end_condition=MilestoneContestEndCondition(
+ ends_at=datetime(year=2030, month=1, day=1, tzinfo=timezone.utc),
+ max_winners=5,
+ ),
+ entry_trigger=ContestEntryTrigger.TASK_COMPLETE,
+ target_amount=3,
+ )
+
+
+@pytest.fixture
+def milestone_contest_in_db(
+ product_user_wallet_yes: "Product",
+ milestone_contest_create: "MilestoneContestCreate",
+ contest_manager,
+) -> "Contest":
+ return contest_manager.create(
+ product_id=product_user_wallet_yes.uuid, contest_create=milestone_contest_create
+ )
+
+
+@pytest.fixture
+def milestone_contest(
+ product_user_wallet_yes: "Product",
+ milestone_contest_create: "MilestoneContestCreate",
+) -> "Contest":
+ from generalresearch.models.thl.contest.io import contest_create_to_contest
+
+ return contest_create_to_contest(
+ product_id=product_user_wallet_yes.uuid, contest_create=milestone_contest_create
+ )
+
+
+@pytest.fixture(scope="function")
+def milestone_contest_factory(
+ product_user_wallet_yes: "Product",
+ milestone_contest_create: "MilestoneContestCreate",
+ contest_manager,
+) -> Callable:
+ def _create_contest(**kwargs):
+ milestone_contest_create.update(**kwargs)
+ return contest_manager.create(
+ product_id=product_user_wallet_yes.uuid,
+ contest_create=milestone_contest_create,
+ )
+
+ return _create_contest
+
+
+@pytest.fixture
+def leaderboard_contest_create(
+ product_user_wallet_yes: "Product",
+) -> "LeaderboardContestCreate":
+ from generalresearch.models.thl.contest.leaderboard import (
+ LeaderboardContestCreate,
+ )
+ from generalresearch.models.thl.contest import (
+ ContestPrize,
+ )
+ from generalresearch.models.thl.contest.definitions import (
+ ContestType,
+ ContestPrizeKind,
+ )
+
+ # This is what we'll get from the fastapi endpoint
+ return LeaderboardContestCreate(
+ name="test",
+ contest_type=ContestType.LEADERBOARD,
+ prizes=[
+ ContestPrize(
+ name="$15 Cash",
+ estimated_cash_value=USDCent(15_00),
+ cash_amount=USDCent(15_00),
+ kind=ContestPrizeKind.CASH,
+ leaderboard_rank=1,
+ ),
+ ContestPrize(
+ name="$10 Cash",
+ estimated_cash_value=USDCent(10_00),
+ cash_amount=USDCent(10_00),
+ kind=ContestPrizeKind.CASH,
+ leaderboard_rank=2,
+ ),
+ ],
+ leaderboard_key=f"leaderboard:{product_user_wallet_yes.uuid}:us:daily:2025-01-01:complete_count",
+ )
+
+
+@pytest.fixture
+def leaderboard_contest_in_db(
+ product_user_wallet_yes: "Product",
+ leaderboard_contest_create: "LeaderboardContestCreate",
+ contest_manager,
+) -> "Contest":
+ return contest_manager.create(
+ product_id=product_user_wallet_yes.uuid,
+ contest_create=leaderboard_contest_create,
+ )
+
+
+@pytest.fixture
+def leaderboard_contest(
+ product_user_wallet_yes: "Product",
+ leaderboard_contest_create: "LeaderboardContestCreate",
+):
+ from generalresearch.models.thl.contest.io import contest_create_to_contest
+
+ return contest_create_to_contest(
+ product_id=product_user_wallet_yes.uuid,
+ contest_create=leaderboard_contest_create,
+ )
+
+
+@pytest.fixture(scope="function")
+def leaderboard_contest_factory(
+ product_user_wallet_yes: "Product",
+ leaderboard_contest_create: "LeaderboardContestCreate",
+ contest_manager,
+) -> Callable:
+ def _create_contest(**kwargs):
+ leaderboard_contest_create.update(**kwargs)
+ return contest_manager.create(
+ product_id=product_user_wallet_yes.uuid,
+ contest_create=leaderboard_contest_create,
+ )
+
+ return _create_contest
+
+
+@pytest.fixture
+def user_with_money(
+ request, user_factory, product_user_wallet_yes: "Product", thl_lm
+) -> "User":
+ from generalresearch.models.thl.user import User
+
+ params = getattr(request, "param", dict()) or {}
+ min_balance = int(params.get("min_balance", USDCent(1_00)))
+
+ user: User = user_factory(product=product_user_wallet_yes)
+ wallet = thl_lm.get_account_or_create_user_wallet(user)
+ balance = thl_lm.get_account_balance(wallet)
+ todo = min_balance - balance
+ if todo > 0:
+ # # Put money in user's wallet
+ thl_lm.create_tx_user_bonus(
+ user=user,
+ ref_uuid=uuid4().hex,
+ description="bonus",
+ amount=Decimal(todo) / 100,
+ )
+ print(f"wallet balance: {thl_lm.get_user_wallet_balance(user=user)}")
+
+ return user
diff --git a/test_utils/managers/ledger/__init__.py b/test_utils/managers/ledger/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/managers/ledger/__init__.py
diff --git a/test_utils/managers/ledger/conftest.py b/test_utils/managers/ledger/conftest.py
new file mode 100644
index 0000000..b96d612
--- /dev/null
+++ b/test_utils/managers/ledger/conftest.py
@@ -0,0 +1,678 @@
+from datetime import datetime
+from decimal import Decimal
+from random import randint
+from typing import Optional, Dict, Callable, TYPE_CHECKING
+from uuid import uuid4
+
+import pytest
+
+from generalresearch.currency import USDCent
+from test_utils.models.conftest import (
+ product_factory,
+ user,
+ product,
+ user_factory,
+ product_user_wallet_no,
+ wall,
+ product_amt_true,
+ product_user_wallet_yes,
+ session_factory,
+ session,
+ wall_factory,
+ payout_config,
+)
+
+_ = (
+ user_factory,
+ product_user_wallet_no,
+ wall,
+ product_amt_true,
+ product_user_wallet_yes,
+ session_factory,
+ session,
+ wall_factory,
+ payout_config,
+)
+
+if TYPE_CHECKING:
+ from generalresearch.currency import LedgerCurrency
+ from generalresearch.models.thl.ledger import (
+ Direction,
+ AccountType,
+ LedgerTransaction,
+ )
+ from generalresearch.models.thl.ledger import (
+ LedgerEntry,
+ LedgerAccount,
+ )
+ from generalresearch.models.thl.payout import UserPayoutEvent
+
+
+@pytest.fixture(scope="function")
+def ledger_account(request, lm, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import (
+ Direction,
+ AccountType,
+ LedgerAccount,
+ )
+
+ account_type = getattr(request, "account_type", AccountType.CASH)
+ direction = getattr(request, "direction", Direction.CREDIT)
+
+ acct_uuid = uuid4().hex
+ qn = ":".join([currency, account_type, acct_uuid])
+
+ acct_model = LedgerAccount(
+ uuid=acct_uuid,
+ display_name=f"test-{acct_uuid}",
+ currency=currency,
+ qualified_name=qn,
+ account_type=account_type,
+ normal_balance=direction,
+ )
+ return lm.create_account(account=acct_model)
+
+
+@pytest.fixture(scope="function")
+def ledger_account_factory(request, thl_lm, lm, currency) -> Callable:
+ from generalresearch.models.thl.ledger import (
+ Direction,
+ AccountType,
+ LedgerAccount,
+ )
+
+ def _ledger_account_factory(
+ product,
+ account_type: AccountType = AccountType.CASH,
+ direction: Direction = Direction.CREDIT,
+ ):
+ thl_lm.get_account_or_create_bp_wallet(product=product)
+ acct_uuid = uuid4().hex
+ qn = ":".join([currency, account_type, acct_uuid])
+
+ acct_model = LedgerAccount(
+ uuid=acct_uuid,
+ display_name=f"test-{acct_uuid}",
+ currency=currency,
+ qualified_name=qn,
+ account_type=account_type,
+ normal_balance=direction,
+ )
+ return lm.create_account(account=acct_model)
+
+ return _ledger_account_factory
+
+
+@pytest.fixture(scope="function")
+def ledger_account_credit(request, lm, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import Direction, AccountType
+
+ account_type = AccountType.REVENUE
+ acct_uuid = uuid4().hex
+
+ qn = ":".join([currency, account_type, acct_uuid])
+ from generalresearch.models.thl.ledger import LedgerAccount
+
+ acct_model = LedgerAccount(
+ uuid=acct_uuid,
+ display_name=f"test-{acct_uuid}",
+ currency=currency,
+ qualified_name=qn,
+ account_type=account_type,
+ normal_balance=Direction.CREDIT,
+ )
+ return lm.create_account(account=acct_model)
+
+
+@pytest.fixture(scope="function")
+def ledger_account_debit(request, lm, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import Direction, AccountType
+
+ account_type = AccountType.EXPENSE
+ acct_uuid = uuid4().hex
+
+ qn = ":".join([currency, account_type, acct_uuid])
+ from generalresearch.models.thl.ledger import LedgerAccount
+
+ acct_model = LedgerAccount(
+ uuid=acct_uuid,
+ display_name=f"test-{acct_uuid}",
+ currency=currency,
+ qualified_name=qn,
+ account_type=account_type,
+ normal_balance=Direction.DEBIT,
+ )
+ return lm.create_account(account=acct_model)
+
+
+@pytest.fixture(scope="function")
+def tag(request, lm) -> str:
+ from generalresearch.currency import LedgerCurrency
+
+ return (
+ request.param
+ if hasattr(request, "tag")
+ else f"{LedgerCurrency.TEST}:{uuid4().hex}"
+ )
+
+
+@pytest.fixture(scope="function")
+def usd_cent(request) -> USDCent:
+ amount = randint(99, 9_999)
+ return request.param if hasattr(request, "usd_cent") else USDCent(amount)
+
+
+@pytest.fixture(scope="function")
+def bp_payout_event(
+ product, usd_cent, business_payout_event_manager, thl_lm
+) -> "UserPayoutEvent":
+ return business_payout_event_manager.create_bp_payout_event(
+ thl_ledger_manager=thl_lm,
+ product=product,
+ amount=usd_cent,
+ skip_wallet_balance_check=True,
+ skip_one_per_day_check=True,
+ )
+
+
+@pytest.fixture
+def bp_payout_event_factory(brokerage_product_payout_event_manager, thl_lm) -> Callable:
+ from generalresearch.models.thl.product import Product
+ from generalresearch.currency import USDCent
+
+ def _create_bp_payout_event(
+ product: Product, usd_cent: USDCent, ext_ref_id: Optional[str] = None
+ ):
+ return brokerage_product_payout_event_manager.create_bp_payout_event(
+ thl_ledger_manager=thl_lm,
+ product=product,
+ amount=usd_cent,
+ ext_ref_id=ext_ref_id,
+ skip_wallet_balance_check=True,
+ skip_one_per_day_check=True,
+ )
+
+ return _create_bp_payout_event
+
+
+@pytest.fixture(scope="function")
+def currency(lm) -> "LedgerCurrency":
+ # return request.param if hasattr(request, "currency") else LedgerCurrency.TEST
+ return lm.currency
+
+
+@pytest.fixture(scope="function")
+def tx_metadata(request) -> Optional[Dict[str, str]]:
+ return (
+ request.param
+ if hasattr(request, "tx_metadata")
+ else {f"key-{uuid4().hex[:10]}": uuid4().hex}
+ )
+
+
+@pytest.fixture(scope="function")
+def ledger_tx(
+ request,
+ ledger_account_credit,
+ ledger_account_debit,
+ tag,
+ currency,
+ tx_metadata,
+ lm,
+) -> "LedgerTransaction":
+ from generalresearch.models.thl.ledger import Direction, LedgerEntry
+
+ amount = int(Decimal("1.00") * 100)
+
+ entries = [
+ LedgerEntry(
+ direction=Direction.CREDIT,
+ account_uuid=ledger_account_credit.uuid,
+ amount=amount,
+ ),
+ LedgerEntry(
+ direction=Direction.DEBIT,
+ account_uuid=ledger_account_debit.uuid,
+ amount=amount,
+ ),
+ ]
+
+ return lm.create_tx(entries=entries, tag=tag, metadata=tx_metadata)
+
+
+@pytest.fixture(scope="function")
+def create_main_accounts(lm, currency) -> Callable:
+ def _create_main_accounts():
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ account = LedgerAccount(
+ display_name="Cash flow task complete",
+ qualified_name=f"{currency.value}:revenue:task_complete",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.REVENUE,
+ currency=lm.currency,
+ )
+ lm.get_account_or_create(account=account)
+
+ account = LedgerAccount(
+ display_name="Operating Cash Account",
+ qualified_name=f"{currency.value}:cash",
+ normal_balance=Direction.DEBIT,
+ account_type=AccountType.CASH,
+ currency=currency,
+ )
+
+ lm.get_account_or_create(account=account)
+
+ return _create_main_accounts
+
+
+@pytest.fixture(scope="function")
+def delete_ledger_db(thl_web_rw) -> Callable:
+ def _delete_ledger_db():
+ for table in [
+ "ledger_transactionmetadata",
+ "ledger_entry",
+ "ledger_transaction",
+ "ledger_account",
+ ]:
+ thl_web_rw.execute_write(
+ query=f"DELETE FROM {table};",
+ )
+
+ return _delete_ledger_db
+
+
+@pytest.fixture(scope="function")
+def wipe_main_accounts(thl_web_rw, lm, currency) -> Callable:
+ def _wipe_main_accounts():
+ db_table = thl_web_rw.db_name
+ qual_names = [
+ f"{currency.value}:revenue:task_complete",
+ f"{currency.value}:cash",
+ ]
+
+ res = thl_web_rw.execute_sql_query(
+ query=f"""
+ SELECT lt.id as ltid, le.id as leid, tmd.id as tmdid, la.uuid as lauuid
+ FROM `{db_table}`.`ledger_transaction` AS lt
+ LEFT JOIN `{db_table}`.ledger_entry le
+ ON lt.id = le.transaction_id
+ LEFT JOIN `{db_table}`.ledger_account la
+ ON la.uuid = le.account_id
+ LEFT JOIN `{db_table}`.ledger_transactionmetadata tmd
+ ON lt.id = tmd.transaction_id
+ WHERE la.qualified_name IN %s
+ """,
+ params=[qual_names],
+ )
+
+ lt = {x["ltid"] for x in res if x["ltid"]}
+ le = {x["leid"] for x in res if x["leid"]}
+ tmd = {x["tmdid"] for x in res if x["tmdid"]}
+ la = {x["lauuid"] for x in res if x["lauuid"]}
+
+ thl_web_rw.execute_sql_query(
+ query=f"""
+ DELETE FROM `{db_table}`.`ledger_transactionmetadata`
+ WHERE id IN %s
+ """,
+ params=[tmd],
+ commit=True,
+ )
+
+ thl_web_rw.execute_sql_query(
+ query=f"""
+ DELETE FROM `{db_table}`.`ledger_entry`
+ WHERE id IN %s
+ """,
+ params=[le],
+ commit=True,
+ )
+
+ thl_web_rw.execute_sql_query(
+ query=f"""
+ DELETE FROM `{db_table}`.`ledger_transaction`
+ WHERE id IN %s
+ """,
+ params=[lt],
+ commit=True,
+ )
+
+ thl_web_rw.execute_sql_query(
+ query=f"""
+ DELETE FROM `{db_table}`.`ledger_account`
+ WHERE uuid IN %s
+ """,
+ params=[la],
+ commit=True,
+ )
+
+ return _wipe_main_accounts
+
+
+@pytest.fixture(scope="function")
+def account_cash(lm, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ account = LedgerAccount(
+ display_name="Operating Cash Account",
+ qualified_name=f"{currency.value}:cash",
+ normal_balance=Direction.DEBIT,
+ account_type=AccountType.CASH,
+ currency=currency,
+ )
+ return lm.get_account_or_create(account=account)
+
+
+@pytest.fixture(scope="function")
+def account_revenue_task_complete(lm, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ account = LedgerAccount(
+ display_name="Cash flow task complete",
+ qualified_name=f"{currency.value}:revenue:task_complete",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.REVENUE,
+ currency=currency,
+ )
+ return lm.get_account_or_create(account=account)
+
+
+@pytest.fixture(scope="function")
+def account_expense_tango(lm, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ account = LedgerAccount(
+ display_name="Tango Fee",
+ qualified_name=f"{currency.value}:expense:tango_fee",
+ normal_balance=Direction.DEBIT,
+ account_type=AccountType.EXPENSE,
+ currency=currency,
+ )
+ return lm.get_account_or_create(account=account)
+
+
+@pytest.fixture(scope="function")
+def user_account_user_wallet(lm, user, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ account = LedgerAccount(
+ display_name=f"{user.uuid} Wallet",
+ qualified_name=f"{currency.value}:user_wallet:{user.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.USER_WALLET,
+ reference_type="user",
+ reference_uuid=user.uuid,
+ currency=currency,
+ )
+ return lm.get_account_or_create(account=account)
+
+
+@pytest.fixture(scope="function")
+def product_account_bp_wallet(lm, product, currency) -> "LedgerAccount":
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ account = LedgerAccount.model_validate(
+ dict(
+ display_name=f"{product.name} Wallet",
+ qualified_name=f"{currency.value}:bp_wallet:{product.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.BP_WALLET,
+ reference_type="bp",
+ reference_uuid=product.uuid,
+ currency=currency,
+ )
+ )
+ return lm.get_account_or_create(account=account)
+
+
+@pytest.fixture(scope="function")
+def setup_accounts(product_factory, lm, user, currency) -> None:
+ from generalresearch.models.thl.ledger import (
+ LedgerAccount,
+ Direction,
+ AccountType,
+ )
+
+ # BP's wallet and a revenue from their commissions account.
+ p1 = product_factory()
+
+ account = LedgerAccount(
+ display_name=f"Revenue from {p1.name} commission",
+ qualified_name=f"{currency.value}:revenue:bp_commission:{p1.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.REVENUE,
+ reference_type="bp",
+ reference_uuid=p1.uuid,
+ currency=currency,
+ )
+ lm.get_account_or_create(account=account)
+
+ account = LedgerAccount.model_validate(
+ dict(
+ display_name=f"{p1.name} Wallet",
+ qualified_name=f"{currency.value}:bp_wallet:{p1.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.BP_WALLET,
+ reference_type="bp",
+ reference_uuid=p1.uuid,
+ currency=currency,
+ )
+ )
+ lm.get_account_or_create(account=account)
+
+ # BP's wallet, user's wallet, and a revenue from their commissions account.
+ p2 = product_factory()
+ account = LedgerAccount(
+ display_name=f"Revenue from {p2.name} commission",
+ qualified_name=f"{currency.value}:revenue:bp_commission:{p2.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.REVENUE,
+ reference_type="bp",
+ reference_uuid=p2.uuid,
+ currency=currency,
+ )
+ lm.get_account_or_create(account)
+
+ account = LedgerAccount(
+ display_name=f"{p2.name} Wallet",
+ qualified_name=f"{currency.value}:bp_wallet:{p2.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.BP_WALLET,
+ reference_type="bp",
+ reference_uuid=p2.uuid,
+ currency=currency,
+ )
+ lm.get_account_or_create(account)
+
+ account = LedgerAccount(
+ display_name=f"{user.uuid} Wallet",
+ qualified_name=f"{currency.value}:user_wallet:{user.uuid}",
+ normal_balance=Direction.CREDIT,
+ account_type=AccountType.USER_WALLET,
+ reference_type="user",
+ reference_uuid=user.uuid,
+ currency="test",
+ )
+ lm.get_account_or_create(account=account)
+
+
+@pytest.fixture(scope="function")
+def session_with_tx_factory(
+ user_factory,
+ product,
+ session_factory,
+ session_manager,
+ wall_manager,
+ utc_hour_ago,
+ thl_lm,
+) -> Callable:
+ from generalresearch.models.thl.session import (
+ Status,
+ Session,
+ StatusCode1,
+ )
+ from generalresearch.models.thl.user import User
+
+ def _session_with_tx_factory(
+ user: User,
+ final_status: Status = Status.COMPLETE,
+ wall_req_cpi: Decimal = Decimal(".50"),
+ started: datetime = utc_hour_ago,
+ ) -> Session:
+ s: Session = session_factory(
+ user=user,
+ wall_count=2,
+ final_status=final_status,
+ wall_req_cpi=wall_req_cpi,
+ started=started,
+ )
+ last_wall = s.wall_events[-1]
+
+ wall_manager.finish(
+ wall=last_wall,
+ status=Status.COMPLETE,
+ status_code_1=StatusCode1.COMPLETE,
+ finished=last_wall.finished,
+ )
+
+ status, status_code_1 = s.determine_session_status()
+ thl_net, commission_amount, bp_pay, user_pay = s.determine_payments()
+ session_manager.finish_with_status(
+ session=s,
+ finished=last_wall.finished,
+ payout=bp_pay,
+ user_payout=user_pay,
+ status=status,
+ status_code_1=status_code_1,
+ )
+
+ thl_lm.create_tx_task_complete(
+ wall=last_wall,
+ user=user,
+ created=last_wall.finished,
+ force=True,
+ )
+
+ thl_lm.create_tx_bp_payment(session=s, created=last_wall.finished, force=True)
+
+ return s
+
+ return _session_with_tx_factory
+
+
+@pytest.fixture(scope="function")
+def adj_to_fail_with_tx_factory(session_manager, wall_manager, thl_lm) -> Callable:
+ from generalresearch.models.thl.session import (
+ Session,
+ )
+ from datetime import timedelta
+ from generalresearch.models.thl.definitions import WallAdjustedStatus
+
+ def _adj_to_fail_with_tx_factory(
+ session: Session,
+ created: datetime,
+ ) -> None:
+ w1 = wall_manager.get_wall_events(session_id=session.id)[-1]
+
+ # This is defined in `thl-grpc/thl/user_quality_history/recons.py:150`
+ # so we can't use it as part of this test anyway to add rows to the
+ # thl_taskadjustment table anyway.. until we created a
+ # TaskAdjustment Manager to put into py-utils!
+
+ # create_task_adjustment_event(
+ # wall,
+ # user,
+ # adjusted_status,
+ # amount_usd=amount_usd,
+ # alert_time=alert_time,
+ # ext_status_code=ext_status_code,
+ # )
+
+ wall_manager.adjust_status(
+ wall=w1,
+ adjusted_status=WallAdjustedStatus.ADJUSTED_TO_FAIL,
+ adjusted_cpi=Decimal("0.00"),
+ adjusted_timestamp=created,
+ )
+
+ thl_lm.create_tx_task_adjustment(
+ wall=w1,
+ user=session.user,
+ created=created + timedelta(milliseconds=1),
+ )
+
+ session.wall_events = wall_manager.get_wall_events(session_id=session.id)
+ session_manager.adjust_status(session=session)
+
+ thl_lm.create_tx_bp_adjustment(
+ session=session, created=created + timedelta(milliseconds=2)
+ )
+
+ return None
+
+ return _adj_to_fail_with_tx_factory
+
+
+@pytest.fixture(scope="function")
+def adj_to_complete_with_tx_factory(session_manager, wall_manager, thl_lm) -> Callable:
+ from generalresearch.models.thl.session import (
+ Session,
+ )
+ from datetime import timedelta
+ from generalresearch.models.thl.definitions import WallAdjustedStatus
+
+ def _adj_to_complete_with_tx_factory(
+ session: Session,
+ created: datetime,
+ ) -> None:
+ w1 = wall_manager.get_wall_events(session_id=session.id)[-1]
+
+ wall_manager.adjust_status(
+ wall=w1,
+ adjusted_status=WallAdjustedStatus.ADJUSTED_TO_COMPLETE,
+ adjusted_cpi=w1.req_cpi,
+ adjusted_timestamp=created,
+ )
+
+ thl_lm.create_tx_task_adjustment(
+ wall=w1,
+ user=session.user,
+ created=created + timedelta(milliseconds=1),
+ )
+
+ session.wall_events = wall_manager.get_wall_events(session_id=session.id)
+ session_manager.adjust_status(session=session)
+
+ thl_lm.create_tx_bp_adjustment(
+ session=session, created=created + timedelta(milliseconds=2)
+ )
+
+ return None
+
+ return _adj_to_complete_with_tx_factory
diff --git a/test_utils/managers/upk/__init__.py b/test_utils/managers/upk/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/managers/upk/__init__.py
diff --git a/test_utils/managers/upk/conftest.py b/test_utils/managers/upk/conftest.py
new file mode 100644
index 0000000..61be924
--- /dev/null
+++ b/test_utils/managers/upk/conftest.py
@@ -0,0 +1,161 @@
+import os
+import time
+from typing import Optional
+from uuid import UUID
+
+import pandas as pd
+import pytest
+
+from generalresearch.pg_helper import PostgresConfig
+
+
+def insert_data_from_csv(
+ thl_web_rw: PostgresConfig,
+ table_name: str,
+ fp: Optional[str] = None,
+ disable_fk_checks: bool = False,
+ df: Optional[pd.DataFrame] = None,
+):
+ assert fp is not None or df is not None and not (fp is not None and df is not None)
+ if fp:
+ df = pd.read_csv(fp, dtype=str)
+ df = df.where(pd.notnull(df), None)
+ cols = list(df.columns)
+ col_str = ", ".join(cols)
+ values_str = ", ".join(["%s"] * len(cols))
+ if "id" in df.columns and len(df["id"].iloc[0]) == 36:
+ df["id"] = df["id"].map(lambda x: UUID(x).hex)
+ args = df.to_dict("tight")["data"]
+
+ with thl_web_rw.make_connection() as conn:
+ with conn.cursor() as c:
+ if disable_fk_checks:
+ c.execute("SET CONSTRAINTS ALL DEFERRED")
+ c.executemany(
+ f"INSERT INTO {table_name} ({col_str}) VALUES ({values_str})",
+ params_seq=args,
+ )
+ conn.commit()
+
+
+@pytest.fixture(scope="session")
+def category_data(thl_web_rw, category_manager) -> None:
+ fp = os.path.join(os.path.dirname(__file__), "marketplace_category.csv.gz")
+ insert_data_from_csv(
+ thl_web_rw,
+ fp=fp,
+ table_name="marketplace_category",
+ disable_fk_checks=True,
+ )
+ # Don't strictly need to do this, but probably we should
+ category_manager.populate_caches()
+ cats = category_manager.categories.values()
+ path_id = {c.path: c.id for c in cats}
+ data = [
+ {"id": c.id, "parent_id": path_id[c.parent_path]} for c in cats if c.parent_path
+ ]
+ query = """
+ UPDATE marketplace_category
+ SET parent_id = %(parent_id)s
+ WHERE id = %(id)s;
+ """
+ with thl_web_rw.make_connection() as conn:
+ with conn.cursor() as c:
+ c.executemany(query=query, params_seq=data)
+ conn.commit()
+
+
+@pytest.fixture(scope="session")
+def property_data(thl_web_rw) -> None:
+ fp = os.path.join(os.path.dirname(__file__), "marketplace_property.csv.gz")
+ insert_data_from_csv(thl_web_rw, fp=fp, table_name="marketplace_property")
+
+
+@pytest.fixture(scope="session")
+def item_data(thl_web_rw) -> None:
+ fp = os.path.join(os.path.dirname(__file__), "marketplace_item.csv.gz")
+ insert_data_from_csv(thl_web_rw, fp=fp, table_name="marketplace_item")
+
+
+@pytest.fixture(scope="session")
+def propertycategoryassociation_data(
+ thl_web_rw, category_data, property_data, category_manager
+) -> None:
+ table_name = "marketplace_propertycategoryassociation"
+ fp = os.path.join(os.path.dirname(__file__), f"{table_name}.csv.gz")
+ # Need to lookup category pk from uuid
+ category_manager.populate_caches()
+ df = pd.read_csv(fp, dtype=str)
+ df["category_id"] = df["category_id"].map(
+ lambda x: category_manager.categories[x].id
+ )
+ insert_data_from_csv(thl_web_rw, df=df, table_name=table_name)
+
+
+@pytest.fixture(scope="session")
+def propertycountry_data(thl_web_rw, property_data) -> None:
+ fp = os.path.join(os.path.dirname(__file__), "marketplace_propertycountry.csv.gz")
+ insert_data_from_csv(thl_web_rw, fp=fp, table_name="marketplace_propertycountry")
+
+
+@pytest.fixture(scope="session")
+def propertymarketplaceassociation_data(thl_web_rw, property_data) -> None:
+ table_name = "marketplace_propertymarketplaceassociation"
+ fp = os.path.join(os.path.dirname(__file__), f"{table_name}.csv.gz")
+ insert_data_from_csv(thl_web_rw, fp=fp, table_name=table_name)
+
+
+@pytest.fixture(scope="session")
+def propertyitemrange_data(thl_web_rw, property_data, item_data) -> None:
+ table_name = "marketplace_propertyitemrange"
+ fp = os.path.join(os.path.dirname(__file__), f"{table_name}.csv.gz")
+ insert_data_from_csv(thl_web_rw, fp=fp, table_name=table_name)
+
+
+@pytest.fixture(scope="session")
+def question_data(thl_web_rw) -> None:
+ table_name = "marketplace_question"
+ fp = os.path.join(os.path.dirname(__file__), f"{table_name}.csv.gz")
+ insert_data_from_csv(
+ thl_web_rw, fp=fp, table_name=table_name, disable_fk_checks=True
+ )
+
+
+@pytest.fixture(scope="session")
+def clear_upk_tables(thl_web_rw):
+ tables = [
+ "marketplace_propertyitemrange",
+ "marketplace_propertymarketplaceassociation",
+ "marketplace_propertycategoryassociation",
+ "marketplace_category",
+ "marketplace_item",
+ "marketplace_property",
+ "marketplace_propertycountry",
+ "marketplace_question",
+ ]
+ table_str = ", ".join(tables)
+
+ with thl_web_rw.make_connection() as conn:
+ with conn.cursor() as c:
+ c.execute(f"TRUNCATE {table_str} RESTART IDENTITY CASCADE;")
+ conn.commit()
+
+
+@pytest.fixture(scope="session")
+def upk_data(
+ clear_upk_tables,
+ category_data,
+ property_data,
+ item_data,
+ propertycategoryassociation_data,
+ propertycountry_data,
+ propertymarketplaceassociation_data,
+ propertyitemrange_data,
+ question_data,
+) -> None:
+ # Wait a second to make sure the HarmonizerCache refresh loop pulls these in
+ time.sleep(2)
+
+
+def test_fixtures(upk_data):
+ pass
diff --git a/test_utils/managers/upk/marketplace_category.csv.gz b/test_utils/managers/upk/marketplace_category.csv.gz
new file mode 100644
index 0000000..0f8ec1c
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_category.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_item.csv.gz b/test_utils/managers/upk/marketplace_item.csv.gz
new file mode 100644
index 0000000..c12c5d8
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_item.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_property.csv.gz b/test_utils/managers/upk/marketplace_property.csv.gz
new file mode 100644
index 0000000..a781d1d
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_property.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_propertycategoryassociation.csv.gz b/test_utils/managers/upk/marketplace_propertycategoryassociation.csv.gz
new file mode 100644
index 0000000..5b4ea19
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_propertycategoryassociation.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_propertycountry.csv.gz b/test_utils/managers/upk/marketplace_propertycountry.csv.gz
new file mode 100644
index 0000000..5d2a637
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_propertycountry.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_propertyitemrange.csv.gz b/test_utils/managers/upk/marketplace_propertyitemrange.csv.gz
new file mode 100644
index 0000000..84f4f0e
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_propertyitemrange.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_propertymarketplaceassociation.csv.gz b/test_utils/managers/upk/marketplace_propertymarketplaceassociation.csv.gz
new file mode 100644
index 0000000..6b9fd1c
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_propertymarketplaceassociation.csv.gz
Binary files differ
diff --git a/test_utils/managers/upk/marketplace_question.csv.gz b/test_utils/managers/upk/marketplace_question.csv.gz
new file mode 100644
index 0000000..bcfc3ad
--- /dev/null
+++ b/test_utils/managers/upk/marketplace_question.csv.gz
Binary files differ
diff --git a/test_utils/models/__init__.py b/test_utils/models/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/models/__init__.py
diff --git a/test_utils/models/conftest.py b/test_utils/models/conftest.py
new file mode 100644
index 0000000..ecfd82b
--- /dev/null
+++ b/test_utils/models/conftest.py
@@ -0,0 +1,608 @@
+from datetime import datetime, timezone, timedelta
+from decimal import Decimal
+from random import randint, choice as randchoice
+from typing import Callable, TYPE_CHECKING, Optional, List, Dict
+from uuid import uuid4
+
+import pytest
+from pydantic import AwareDatetime, PositiveInt
+
+from generalresearch.models import Source
+from generalresearch.models.thl.definitions import (
+ WALL_ALLOWED_STATUS_STATUS_CODE,
+ Status,
+)
+from test_utils.managers.conftest import (
+ product_manager,
+ user_manager,
+ wall_manager,
+ session_manager,
+ gr_um,
+ membership_manager,
+ team_manager,
+ business_manager,
+ business_address_manager,
+)
+from generalresearch.models.thl.survey.model import Survey, Buyer
+
+if TYPE_CHECKING:
+ from generalresearch.models.thl.userhealth import AuditLog, AuditLogLevel
+ from generalresearch.models.thl.payout import UserPayoutEvent
+ from generalresearch.models.gr.authentication import GRUser, GRToken
+ from generalresearch.models.gr.team import Team, Membership
+ from generalresearch.models.gr.business import (
+ Business,
+ BusinessAddress,
+ BusinessBankAccount,
+ )
+ from generalresearch.models.thl.user import User
+ from generalresearch.models.thl.product import Product
+ from generalresearch.models.thl.session import Session, Wall
+ from generalresearch.currency import USDCent
+ from generalresearch.models.thl.product import (
+ PayoutConfig,
+ PayoutTransformation,
+ PayoutTransformationPercentArgs,
+ )
+ from generalresearch.models.thl.user_iphistory import IPRecord
+ from generalresearch.models.thl.ipinfo import IPGeoname, IPInformation
+
+
+# === THL ===
+
+
+@pytest.fixture(scope="function")
+def user(request, product_manager, user_manager, thl_web_rr) -> "User":
+ product = getattr(request, "product", None)
+
+ if product is None:
+ product = product_manager.create_dummy()
+
+ u = user_manager.create_dummy(product_id=product.id)
+ u.prefetch_product(pg_config=thl_web_rr)
+
+ return u
+
+
+@pytest.fixture
+def user_with_wallet(
+ request, user_factory, product_user_wallet_yes: "Product"
+) -> "User":
+ # A user on a product with user wallet enabled, but they have no money
+ return user_factory(product=product_user_wallet_yes)
+
+
+@pytest.fixture
+def user_with_wallet_amt(request, user_factory, product_amt_true: "Product") -> "User":
+ # A user on a product with user wallet enabled, on AMT, but they have no money
+ return user_factory(product=product_amt_true)
+
+
+@pytest.fixture(scope="function")
+def user_factory(user_manager, thl_web_rr) -> Callable:
+ def _create_user(product: "Product", created: Optional[datetime] = None):
+ u = user_manager.create_dummy(product=product, created=created)
+ u.prefetch_product(pg_config=thl_web_rr)
+
+ return u
+
+ return _create_user
+
+
+@pytest.fixture(scope="function")
+def wall_factory(wall_manager) -> Callable:
+ def _create_wall(
+ session: "Session", wall_status: "Status", req_cpi: Optional[Decimal] = None
+ ):
+
+ assert session.started <= datetime.now(
+ tz=timezone.utc
+ ), "Session can't start in the future"
+
+ if session.wall_events:
+ # Subsequent Wall events
+ wall = session.wall_events[-1]
+ assert not wall.finished, "Can't add new Walls until prior finishes"
+ # wall_started = last_wall.started + timedelta(milliseconds=1)
+ else:
+ # First Wall Event in a session
+ wall_started = session.started + timedelta(milliseconds=1)
+
+ wall = wall_manager.create_dummy(
+ session_id=session.id,
+ user_id=session.user_id,
+ started=wall_started,
+ req_cpi=req_cpi,
+ )
+ session.append_wall_event(w=wall)
+
+ options = list(WALL_ALLOWED_STATUS_STATUS_CODE.get(wall_status, {}))
+ wall.finish(
+ finished=wall.started + timedelta(seconds=randint(a=60 * 2, b=60 * 10)),
+ status=wall_status,
+ status_code_1=randchoice(options),
+ )
+
+ return wall
+
+ return _create_wall
+
+
+@pytest.fixture(scope="function")
+def wall(session, user, wall_manager) -> Optional["Wall"]:
+ from generalresearch.models.thl.task_status import StatusCode1
+
+ wall = wall_manager.create_dummy(session_id=session.id, user_id=user.user_id)
+ # thl_session.append_wall_event(wall)
+ wall.finish(
+ finished=wall.started + timedelta(seconds=randint(a=60 * 2, b=60 * 10)),
+ status=Status.COMPLETE,
+ status_code_1=StatusCode1.COMPLETE,
+ )
+ return wall
+
+
+@pytest.fixture(scope="function")
+def session_factory(
+ wall_factory, session_manager, wall_manager, utc_hour_ago
+) -> Callable:
+ from generalresearch.models.thl.session import Source
+
+ def _create_session(
+ user: "User",
+ # Wall details
+ wall_count: int = 5,
+ wall_req_cpi: Decimal = Decimal(".50"),
+ wall_req_cpis: Optional[List[Decimal]] = None,
+ wall_statuses: Optional[List[Status]] = None,
+ wall_source: Source = Source.TESTING,
+ # Session details
+ final_status: Status = Status.COMPLETE,
+ started: datetime = utc_hour_ago,
+ ) -> "Session":
+ if wall_req_cpis:
+ assert len(wall_req_cpis) == wall_count
+ if wall_statuses:
+ assert len(wall_statuses) == wall_count
+
+ s = session_manager.create_dummy(started=started, user=user, country_iso="us")
+ for idx in range(wall_count):
+ if idx == 0:
+ # First Wall Event in a session
+ wall_started = s.started + timedelta(milliseconds=1)
+ else:
+ # Subsequent Wall events
+ last_wall = s.wall_events[-1]
+ assert last_wall.finished, "Can't add new Walls until prior finishes"
+ wall_started = last_wall.started + timedelta(milliseconds=1)
+
+ w = wall_manager.create_dummy(
+ session_id=s.id,
+ source=wall_source,
+ user_id=s.user_id,
+ started=wall_started,
+ req_cpi=wall_req_cpis[idx] if wall_req_cpis else wall_req_cpi,
+ )
+ s.append_wall_event(w=w)
+
+ # If it's the last wall in the session, respect the final_status
+ # value for the Session
+ if wall_statuses:
+ _final_status = wall_statuses[idx]
+ else:
+ _final_status = final_status if idx == wall_count - 1 else Status.FAIL
+
+ options = list(WALL_ALLOWED_STATUS_STATUS_CODE.get(_final_status, {}))
+ wall_manager.finish(
+ wall=w,
+ status=_final_status,
+ status_code_1=randchoice(options),
+ finished=w.started + timedelta(seconds=randint(a=60 * 2, b=60 * 10)),
+ )
+
+ return s
+
+ return _create_session
+
+
+@pytest.fixture(scope="function")
+def finished_session_factory(
+ session_factory, session_manager, utc_hour_ago
+) -> Callable:
+ from generalresearch.models.thl.session import Source
+
+ def _create_finished_session(
+ user: "User",
+ # Wall details
+ wall_count: int = 5,
+ wall_req_cpi: Decimal = Decimal(".50"),
+ wall_req_cpis: Optional[List[Decimal]] = None,
+ wall_statuses: Optional[List[Status]] = None,
+ wall_source: Source = Source.TESTING,
+ # Session details
+ final_status: Status = Status.COMPLETE,
+ started: datetime = utc_hour_ago,
+ ) -> "Session":
+ s: Session = session_factory(
+ user=user,
+ wall_count=wall_count,
+ wall_req_cpi=wall_req_cpi,
+ wall_req_cpis=wall_req_cpis,
+ wall_statuses=wall_statuses,
+ wall_source=wall_source,
+ final_status=final_status,
+ started=started,
+ )
+ status, status_code_1 = s.determine_session_status()
+ thl_net, commission_amount, bp_pay, user_pay = s.determine_payments()
+ session_manager.finish_with_status(
+ s,
+ finished=s.wall_events[-1].finished,
+ payout=bp_pay,
+ user_payout=user_pay,
+ status=status,
+ status_code_1=status_code_1,
+ )
+ return s
+
+ return _create_finished_session
+
+
+@pytest.fixture(scope="function")
+def session(user, session_manager, wall_manager) -> "Session":
+ from generalresearch.models.thl.session import Wall, Session
+
+ session: Session = session_manager.create_dummy(user=user, country_iso="us")
+ wall: Wall = wall_manager.create_dummy(
+ session_id=session.id,
+ user_id=session.user_id,
+ started=session.started,
+ )
+ session.append_wall_event(w=wall)
+
+ return session
+
+
+@pytest.fixture
+def product(request, product_manager) -> "Product":
+ from generalresearch.managers.thl.product import ProductManager
+
+ team = getattr(request, "team", None)
+ business = getattr(request, "business", None)
+
+ product_manager: ProductManager
+ return product_manager.create_dummy(
+ team_id=team.uuid if team else None,
+ business_id=business.uuid if business else None,
+ )
+
+
+@pytest.fixture
+def product_factory(product_manager) -> Callable:
+ def _create_product(
+ team: Optional["Team"] = None,
+ business: Optional["Business"] = None,
+ commission_pct: Decimal = Decimal("0.05"),
+ ):
+ return product_manager.create_dummy(
+ team_id=team.uuid if team else None,
+ business_id=business.uuid if business else None,
+ commission_pct=commission_pct,
+ )
+
+ return _create_product
+
+
+@pytest.fixture(scope="function")
+def payout_config(request) -> "PayoutConfig":
+ from generalresearch.models.thl.product import (
+ PayoutConfig,
+ PayoutTransformation,
+ PayoutTransformationPercentArgs,
+ )
+
+ return (
+ request.param
+ if hasattr(request, "payout_config")
+ else PayoutConfig(
+ payout_format="${payout/100:.2f}",
+ payout_transformation=PayoutTransformation(
+ f="payout_transformation_percent",
+ kwargs=PayoutTransformationPercentArgs(pct=0.40),
+ ),
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def product_user_wallet_yes(payout_config, product_manager) -> "Product":
+ from generalresearch.models.thl.product import UserWalletConfig
+ from generalresearch.managers.thl.product import ProductManager
+
+ product_manager: ProductManager
+ return product_manager.create_dummy(
+ payout_config=payout_config, user_wallet_config=UserWalletConfig(enabled=True)
+ )
+
+
+@pytest.fixture(scope="function")
+def product_user_wallet_no(product_manager) -> "Product":
+ from generalresearch.models.thl.product import UserWalletConfig
+ from generalresearch.managers.thl.product import ProductManager
+
+ product_manager: ProductManager
+ return product_manager.create_dummy(
+ user_wallet_config=UserWalletConfig(enabled=False)
+ )
+
+
+@pytest.fixture(scope="function")
+def product_amt_true(product_manager, payout_config) -> "Product":
+ from generalresearch.models.thl.product import UserWalletConfig
+
+ return product_manager.create_dummy(
+ user_wallet_config=UserWalletConfig(amt=True, enabled=True),
+ payout_config=payout_config,
+ )
+
+
+@pytest.fixture(scope="function")
+def bp_payout_factory(
+ thl_lm, product_manager, business_payout_event_manager
+) -> Callable:
+ def _create_bp_payout(
+ product: Optional["Product"] = None,
+ amount: Optional["USDCent"] = None,
+ ext_ref_id: Optional[str] = None,
+ created: Optional[AwareDatetime] = None,
+ skip_wallet_balance_check: bool = False,
+ skip_one_per_day_check: bool = False,
+ ) -> "UserPayoutEvent":
+ from generalresearch.currency import USDCent
+
+ product = product or product_manager.create_dummy()
+ amount = amount or USDCent(randint(1, 99_99))
+
+ return business_payout_event_manager.create_bp_payout_event(
+ thl_ledger_manager=thl_lm,
+ product=product,
+ amount=amount,
+ ext_ref_id=ext_ref_id,
+ created=created,
+ skip_wallet_balance_check=skip_wallet_balance_check,
+ skip_one_per_day_check=skip_one_per_day_check,
+ )
+
+ return _create_bp_payout
+
+
+# === GR ===
+
+
+@pytest.fixture(scope="function")
+def business(request, business_manager) -> "Business":
+ from generalresearch.managers.gr.business import BusinessManager
+
+ business_manager: BusinessManager
+ return business_manager.create_dummy()
+
+
+@pytest.fixture(scope="function")
+def business_address(request, business, business_address_manager) -> "BusinessAddress":
+ from generalresearch.managers.gr.business import BusinessAddressManager
+
+ business_address_manager: BusinessAddressManager
+ return business_address_manager.create_dummy(business_id=business.id)
+
+
+@pytest.fixture(scope="function")
+def business_bank_account(
+ request, business, business_bank_account_manager
+) -> "BusinessBankAccount":
+ from generalresearch.managers.gr.business import BusinessBankAccountManager
+
+ business_bank_account_manager: BusinessBankAccountManager
+ return business_bank_account_manager.create_dummy(business_id=business.id)
+
+
+@pytest.fixture(scope="function")
+def team(request, team_manager) -> "Team":
+ from generalresearch.managers.gr.team import TeamManager
+
+ team_manager: TeamManager
+ return team_manager.create_dummy()
+
+
+@pytest.fixture(scope="function")
+def gr_user(gr_um) -> "GRUser":
+ from generalresearch.managers.gr.authentication import GRUserManager
+
+ gr_um: GRUserManager
+ return gr_um.create_dummy()
+
+
+@pytest.fixture(scope="function")
+def gr_user_cache(gr_user, gr_db, thl_web_rr, gr_redis_config):
+ gr_user.set_cache(
+ pg_config=gr_db, thl_web_rr=thl_web_rr, redis_config=gr_redis_config
+ )
+ return gr_user
+
+
+@pytest.fixture(scope="function")
+def gr_user_factory(gr_um) -> Callable:
+ def _create_gr_user():
+ return gr_um.create_dummy()
+
+ return _create_gr_user
+
+
+@pytest.fixture()
+def gr_user_token(gr_user, gr_tm, gr_db) -> "GRToken":
+ gr_tm.create(user_id=gr_user.id)
+ gr_user.prefetch_token(pg_config=gr_db)
+
+ return gr_user.token
+
+
+@pytest.fixture()
+def gr_user_token_header(gr_user_token) -> Dict:
+ return gr_user_token.auth_header
+
+
+@pytest.fixture(scope="function")
+def membership(request, team, gr_user, team_manager) -> "Membership":
+ assert team.id, "Team must be saved"
+ assert gr_user.id, "GRUser must be saved"
+ return team_manager.add_user(team=team, gr_user=gr_user)
+
+
+@pytest.fixture(scope="function")
+def membership_factory(
+ team: "Team", gr_user: "GRUser", membership_manager, team_manager, gr_um
+) -> Callable:
+ from generalresearch.managers.gr.team import MembershipManager
+
+ membership_manager: MembershipManager
+
+ def _create_membership(**kwargs):
+ _team = kwargs.get("team", team_manager.create_dummy())
+ _gr_user = kwargs.get("gr_user", gr_um.create_dummy())
+
+ return membership_manager.create(team=_team, gr_user=_gr_user)
+
+ return _create_membership
+
+
+@pytest.fixture(scope="function")
+def audit_log(audit_log_manager, user) -> "AuditLog":
+ from generalresearch.managers.thl.userhealth import AuditLogManager
+
+ audit_log_manager: AuditLogManager
+ return audit_log_manager.create_dummy(user_id=user.user_id)
+
+
+@pytest.fixture(scope="function")
+def audit_log_factory(audit_log_manager) -> Callable:
+ from generalresearch.managers.thl.userhealth import AuditLogManager
+
+ audit_log_manager: AuditLogManager
+
+ def _create_audit_log(
+ user_id: PositiveInt,
+ level: Optional["AuditLogLevel"] = None,
+ event_type: Optional[str] = None,
+ event_msg: Optional[str] = None,
+ event_value: Optional[float] = None,
+ ):
+ return audit_log_manager.create_dummy(
+ user_id=user_id,
+ level=level,
+ event_type=event_type,
+ event_msg=event_msg,
+ event_value=event_value,
+ )
+
+ return _create_audit_log
+
+
+@pytest.fixture(scope="function")
+def ip_geoname(ip_geoname_manager) -> "IPGeoname":
+ from generalresearch.managers.thl.ipinfo import IPGeonameManager
+
+ ip_geoname_manager: IPGeonameManager
+ return ip_geoname_manager.create_dummy()
+
+
+@pytest.fixture(scope="function")
+def ip_information(ip_information_manager, ip_geoname) -> "IPInformation":
+ from generalresearch.managers.thl.ipinfo import IPInformationManager
+
+ ip_information_manager: IPInformationManager
+ return ip_information_manager.create_dummy(
+ geoname_id=ip_geoname.geoname_id, country_iso=ip_geoname.country_iso
+ )
+
+
+@pytest.fixture(scope="function")
+def ip_information_factory(ip_information_manager) -> Callable:
+ from generalresearch.managers.thl.ipinfo import IPInformationManager
+
+ ip_information_manager: IPInformationManager
+
+ def _create_ip_info(ip: str, geoname: "IPGeoname", **kwargs):
+ return ip_information_manager.create_dummy(
+ ip=ip,
+ geoname_id=geoname.geoname_id,
+ country_iso=geoname.country_iso,
+ **kwargs,
+ )
+
+ return _create_ip_info
+
+
+@pytest.fixture(scope="function")
+def ip_record(ip_record_manager, ip_geoname, user) -> "IPRecord":
+ from generalresearch.managers.thl.userhealth import IPRecordManager
+
+ ip_record_manager: IPRecordManager
+
+ return ip_record_manager.create_dummy(user_id=user.user_id)
+
+
+@pytest.fixture(scope="function")
+def ip_record_factory(ip_record_manager, user) -> Callable:
+ from generalresearch.managers.thl.userhealth import IPRecordManager
+
+ ip_record_manager: IPRecordManager
+
+ def _create_ip_record(user_id: PositiveInt, ip: Optional[str] = None):
+ return ip_record_manager.create_dummy(user_id=user_id, ip=ip)
+
+ return _create_ip_record
+
+
+@pytest.fixture(scope="session")
+def buyer(buyer_manager) -> Buyer:
+ buyer_code = uuid4().hex
+ buyer_manager.bulk_get_or_create(source=Source.TESTING, codes=[buyer_code])
+ b = Buyer(
+ source=Source.TESTING, code=buyer_code, label=f"test-buyer-{buyer_code[:8]}"
+ )
+ buyer_manager.update(b)
+ return b
+
+
+@pytest.fixture(scope="session")
+def buyer_factory(buyer_manager) -> Callable:
+
+ def inner():
+ return buyer_manager.bulk_get_or_create(
+ source=Source.TESTING, codes=[uuid4().hex]
+ )[0]
+
+ return inner
+
+
+@pytest.fixture(scope="session")
+def survey(survey_manager, buyer) -> Survey:
+ s = Survey(source=Source.TESTING, survey_id=uuid4().hex, buyer_code=buyer.code)
+ survey_manager.create_bulk([s])
+ return s
+
+
+@pytest.fixture(scope="session")
+def survey_factory(survey_manager, buyer_factory) -> Callable:
+
+ def inner(buyer: Optional[Buyer] = None) -> Survey:
+ buyer = buyer or buyer_factory()
+ s = Survey(
+ source=Source.TESTING,
+ survey_id=uuid4().hex,
+ buyer_code=buyer.code,
+ buyer_id=buyer.id,
+ )
+ survey_manager.create_bulk([s])
+ return s
+
+ return inner
diff --git a/test_utils/spectrum/__init__.py b/test_utils/spectrum/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test_utils/spectrum/__init__.py
diff --git a/test_utils/spectrum/conftest.py b/test_utils/spectrum/conftest.py
new file mode 100644
index 0000000..b7887f6
--- /dev/null
+++ b/test_utils/spectrum/conftest.py
@@ -0,0 +1,79 @@
+import logging
+
+import time
+
+import pytest
+from datetime import datetime, timezone
+from generalresearch.managers.spectrum.survey import (
+ SpectrumSurveyManager,
+ SpectrumCriteriaManager,
+)
+from generalresearch.models.spectrum.survey import SpectrumSurvey
+from generalresearch.sql_helper import SqlHelper
+
+from .surveys_json import SURVEYS_JSON, CONDITIONS
+
+
+@pytest.fixture(scope="session")
+def spectrum_rw(settings) -> SqlHelper:
+ print(f"{settings.spectrum_rw_db=}")
+ logging.info(f"{settings.spectrum_rw_db=}")
+ assert "/unittest-" in settings.spectrum_rw_db.path
+ return SqlHelper(
+ dsn=settings.spectrum_rw_db,
+ read_timeout=2,
+ write_timeout=1,
+ connect_timeout=2,
+ )
+
+
+@pytest.fixture(scope="session")
+def spectrum_criteria_manager(spectrum_rw) -> SpectrumCriteriaManager:
+ assert "/unittest-" in spectrum_rw.dsn.path
+ return SpectrumCriteriaManager(spectrum_rw)
+
+
+@pytest.fixture(scope="session")
+def spectrum_survey_manager(spectrum_rw) -> SpectrumSurveyManager:
+ assert "/unittest-" in spectrum_rw.dsn.path
+ return SpectrumSurveyManager(spectrum_rw)
+
+
+@pytest.fixture(scope="session")
+def setup_spectrum_surveys(
+ spectrum_rw, spectrum_survey_manager, spectrum_criteria_manager
+):
+ now = datetime.now(timezone.utc)
+ # make sure these example surveys exist in db
+ surveys = [SpectrumSurvey.model_validate_json(x) for x in SURVEYS_JSON]
+ for s in surveys:
+ s.modified_api = datetime.now(tz=timezone.utc)
+ spectrum_survey_manager.create_or_update(surveys)
+ spectrum_criteria_manager.update(CONDITIONS)
+
+ # and make sure they have allocation for 687
+ spectrum_rw.execute_sql_query(
+ f"""
+ INSERT IGNORE INTO `{spectrum_rw.db}`.spectrum_supplier
+ (supplier_id, name, api_key, secret_key, username, password)
+ VALUES (%s, %s, %s, %s, %s, %s)""",
+ ["687", "GRL", "x", "x", "x", "x"],
+ commit=True,
+ )
+ supplier687_pk = spectrum_rw.execute_sql_query(
+ f"""
+ select id from `{spectrum_rw.db}`.spectrum_supplier where supplier_id = '687'"""
+ )[0]["id"]
+ conn = spectrum_rw.make_connection()
+ c = conn.cursor()
+ c.executemany(
+ f"""
+ INSERT IGNORE INTO `{spectrum_rw.db}`.spectrum_surveysupplier
+ (created, surveySig, supplier_id, survey_id)
+ VALUES (%s, %s, %s, %s)
+ """,
+ [[now, "xxx", supplier687_pk, s.survey_id] for s in surveys],
+ )
+ conn.commit()
+ # Wait a second to make sure the spectrum-grpc pulls these from the db into global-vars
+ time.sleep(1)
diff --git a/test_utils/spectrum/surveys_json.py b/test_utils/spectrum/surveys_json.py
new file mode 100644
index 0000000..eb747a5
--- /dev/null
+++ b/test_utils/spectrum/surveys_json.py
@@ -0,0 +1,140 @@
+from generalresearch.models import LogicalOperator
+from generalresearch.models.spectrum.survey import (
+ SpectrumCondition,
+ SpectrumSurvey,
+)
+from generalresearch.models.thl.survey.condition import ConditionValueType
+
+SURVEYS_JSON = [
+ '{"cpi":"3.90","country_isos":["us"],"language_isos":["eng"],"buyer_id":"215","bid_loi":780,"source":"s",'
+ '"used_question_ids":["1235","212"],"survey_id":"111111","survey_name":"Exciting New Survey #14472374",'
+ '"status":22,"field_end_date":"2023-03-02T07:05:36.261000Z","category_code":"232","calculation_type":"COMPLETES",'
+ '"requires_pii":false,"survey_exclusions":"13947261,14126487,14361592,14376811,14385771,14387789,14472374",'
+ '"exclusion_period":30,"bid_ir":0.2,"overall_loi":null,"overall_ir":null,"last_block_loi":null,'
+ '"last_block_ir":null,"project_last_complete_date":null,"country_iso":"us","language_iso":"eng",'
+ '"include_psids":null,"exclude_psids":null'
+ ',"qualifications":["ee5e842","e6e0b0b"],"quotas":[{"remaining_count":100,'
+ '"condition_hashes":["32cbf31"]}],"conditions":null,"created_api":"2023-02-28T07:05:36.698000Z",'
+ '"modified_api":"2024-03-10T09:43:40.030000Z","updated":"2024-05-30T21:52:46.431612Z","is_live":true'
+ "}",
+ '{"cpi":"3.90","country_isos":["us"],"language_isos":["eng"],"buyer_id":"215","bid_loi":780,"source":"s",'
+ '"used_question_ids":["1235","212"],"survey_id":"14472374","survey_name":"Exciting New Survey #14472374",'
+ '"status":22,"field_end_date":"2023-03-02T07:05:36.261000Z","category_code":"232","calculation_type":"COMPLETES",'
+ '"requires_pii":false,"survey_exclusions":"13947261,14126487,14361592,14376811,14385771,14387789,14472374",'
+ '"exclusion_period":30,"bid_ir":0.2,"overall_loi":null,"overall_ir":null,"last_block_loi":null,'
+ '"last_block_ir":null,"project_last_complete_date":null,"country_iso":"us","language_iso":"eng",'
+ '"include_psids":null,"exclude_psids":"0408319875e9dbffdc09e86671ad5636,23c4c66ecbc465906d0b0fd798740e64,'
+ '861df4603df3b7f754b8d4b89cbdb313","qualifications":["ee5e842","e6e0b0b"],"quotas":[{"remaining_count":100,'
+ '"condition_hashes":["32cbf31"]}],"conditions":null,"created_api":"2023-02-28T07:05:36.698000Z",'
+ '"modified_api":"2024-03-10T09:43:40.030000Z","updated":"2024-05-30T21:52:46.431612Z","is_live":true'
+ "}",
+ '{"cpi":"3.90","country_isos":["us"],"language_isos":["eng"],"buyer_id":"215","bid_loi":780,"source":"s",'
+ '"used_question_ids":["1235","212"],"survey_id":"12345","survey_name":"Exciting New Survey #14472374",'
+ '"status":22,"field_end_date":"2023-03-02T07:05:36.261000Z","category_code":"232","calculation_type":"COMPLETES",'
+ '"requires_pii":false,"survey_exclusions":"13947261,14126487,14361592,14376811,14385771,14387789,14472374",'
+ '"exclusion_period":30,"bid_ir":0.2,"overall_loi":null,"overall_ir":null,"last_block_loi":null,'
+ '"last_block_ir":null,"project_last_complete_date":null,"country_iso":"us","language_iso":"eng",'
+ '"include_psids":"7d043991b1494dbbb57786b11c88239c","exclude_psids":null'
+ ',"qualifications":["ee5e842","e6e0b0b"],"quotas":[{"remaining_count":100,'
+ '"condition_hashes":["32cbf31"]}],"conditions":null,"created_api":"2023-02-28T07:05:36.698000Z",'
+ '"modified_api":"2024-03-10T09:43:40.030000Z","updated":"2024-05-30T21:52:46.431612Z","is_live":true'
+ "}",
+ '{"cpi":"1.40","country_isos":["us"],"language_isos":["eng"],"buyer_id":"233","bid_loi":null,"source":"s",'
+ '"used_question_ids":["245","244","212","211","225"],"survey_id":"14970164","survey_name":"Exciting New Survey '
+ '#14970164","status":22,"field_end_date":"2024-05-07T16:18:33.000000Z","category_code":"232",'
+ '"calculation_type":"COMPLETES","requires_pii":false,"survey_exclusions":"14970164,29690277",'
+ '"exclusion_period":30,"bid_ir":null,"overall_loi":900,"overall_ir":0.56,"last_block_loi":600,'
+ '"last_block_ir":0.01,"project_last_complete_date":"2024-05-28T04:12:56.297000Z","country_iso":"us",'
+ '"language_iso":"eng","include_psids":null,"exclude_psids":"01c7156fd9639737effbbdebd7fd66f6,'
+ "0508b88f4991bac8b10e9de74ce80194,0a51c627d77cef41f802e51a00126697,15b888176ac4781c2c978a9a05c396f8,"
+ "17bc146b4f7fb05c7058d25da70c6a44,29935289c1f86a4144aab2e12652f305,2fe9d1d451efca10eba4fa4e5e2b74c9,"
+ "c3527b7ef570a1571ea19870f3c25600,cdf2771d57cda9f1bf334382b2b7afd8,cebf3ec50395d973310ea526457dd5a0,"
+ "cf3877cfc15e2e6ef2a56a7a7a37f3d3,dfa691e6d060e3643d5731df30be9f69,e0cb49537182660826aa351e1187809f,"
+ 'edb6d280113ca49561f25fdcb500fde6,fbfba66cfad602f1c26e61e6174eb1f7,fd4307b16fd15e8534a4551c9b6872fc",'
+ '"qualifications":["1ab337d","a01aa68","437774f","dc6065b","82b6ad6"],"quotas":[{"remaining_count":242,'
+ '"condition_hashes":["c23c0b9"]},{"remaining_count":0,"condition_hashes":["5b8c6cf"]},{"remaining_count":126,'
+ '"condition_hashes":["ac35a6e"]},{"remaining_count":110,"condition_hashes":["5e7e5aa"]},{"remaining_count":108,'
+ '"condition_hashes":["9a7aef3"]},{"remaining_count":127,"condition_hashes":["4f75127"]},{"remaining_count":0,'
+ '"condition_hashes":["95437ed"]},{"remaining_count":17,"condition_hashes":["b4b7b95"]},{"remaining_count":16,'
+ '"condition_hashes":["0ab0ae6"]},{"remaining_count":8,"condition_hashes":["6e86fb5"]},{"remaining_count":12,'
+ '"condition_hashes":["24de31e"]},{"remaining_count":69,"condition_hashes":["6bdf350"]},{"remaining_count":411,'
+ '"condition_hashes":["c94d422"]}],"conditions":null,"created_api":"2023-03-30T22:47:36.324000Z",'
+ '"modified_api":"2024-05-30T13:07:16.489000Z","updated":"2024-05-30T21:52:37.493282Z","is_live":true,'
+ '"all_hashes":["c94d422","b4b7b95","6bdf350","6e86fb5","82b6ad6","24de31e","1ab337d","c23c0b9","9a7aef3",'
+ '"ac35a6e","95437ed","5b8c6cf","437774f","a01aa68","5e7e5aa","4f75127","0ab0ae6","dc6065b"]}',
+ '{"cpi":"1.23","country_isos":["au"],"language_isos":["eng"],"buyer_id":"215","bid_loi":780,"source":"s",'
+ '"used_question_ids":[],"survey_id":"69420","survey_name":"Everyone is eligible AU",'
+ '"status":22,"field_end_date":"2023-03-02T07:05:36.261000Z","category_code":"232","calculation_type":"COMPLETES",'
+ '"requires_pii":false,"survey_exclusions":"13947261,14126487,14361592,14376811,14385771,14387789,14472374",'
+ '"exclusion_period":30,"bid_ir":0.2,"overall_loi":null,"overall_ir":null,"last_block_loi":null,'
+ '"last_block_ir":null,"project_last_complete_date":null,"country_iso":"au","language_iso":"eng",'
+ '"include_psids":null,"exclude_psids":null'
+ ',"qualifications":[],"quotas":[{"remaining_count":100,'
+ '"condition_hashes":[]}],"conditions":null,"created_api":"2023-02-28T07:05:36.698000Z",'
+ '"modified_api":"2024-03-10T09:43:40.030000Z","updated":"2024-05-30T21:52:46.431612Z","is_live":true'
+ "}",
+ '{"cpi":"1.23","country_isos":["us"],"language_isos":["eng"],"buyer_id":"215","bid_loi":780,"source":"s",'
+ '"used_question_ids":[],"survey_id":"69421","survey_name":"Everyone is eligible US",'
+ '"status":22,"field_end_date":"2023-03-02T07:05:36.261000Z","category_code":"232","calculation_type":"COMPLETES",'
+ '"requires_pii":false,"survey_exclusions":"13947261,14126487,14361592,14376811,14385771,14387789,14472374",'
+ '"exclusion_period":30,"bid_ir":0.2,"overall_loi":null,"overall_ir":null,"last_block_loi":null,'
+ '"last_block_ir":null,"project_last_complete_date":null,"country_iso":"us","language_iso":"eng",'
+ '"include_psids":null,"exclude_psids":null'
+ ',"qualifications":[],"quotas":[{"remaining_count":100,'
+ '"condition_hashes":[]}],"conditions":null,"created_api":"2023-02-28T07:05:36.698000Z",'
+ '"modified_api":"2024-03-10T09:43:40.030000Z","updated":"2024-05-30T21:52:46.431612Z","is_live":true'
+ "}",
+ # For partial eligibility
+ '{"cpi":"1.23","country_isos":["us"],"language_isos":["eng"],"buyer_id":"215","bid_loi":780,"source":"s",'
+ '"used_question_ids":["1031", "212"],"survey_id":"999000","survey_name":"Pet owners",'
+ '"status":22,"field_end_date":"2023-03-02T07:05:36.261000Z","category_code":"232","calculation_type":"COMPLETES",'
+ '"requires_pii":false,"survey_exclusions":"13947261",'
+ '"exclusion_period":30,"bid_ir":0.2,"overall_loi":null,"overall_ir":null,"last_block_loi":null,'
+ '"last_block_ir":null,"project_last_complete_date":null,"country_iso":"us","language_iso":"eng",'
+ '"include_psids":null,"exclude_psids":null'
+ ',"qualifications":["0039b0c", "00f60a8"],"quotas":[{"remaining_count":100,'
+ '"condition_hashes":[]}],"conditions":null,"created_api":"2023-02-28T07:05:36.698000Z",'
+ '"modified_api":"2024-03-10T09:43:40.030000Z","updated":"2024-05-30T21:52:46.431612Z","is_live":true'
+ "}",
+]
+
+# make sure hashes for 111111 are in db
+c1 = SpectrumCondition(
+ question_id="1001",
+ value_type=ConditionValueType.LIST,
+ values=["a", "b", "c"],
+ negate=False,
+ logical_operator=LogicalOperator.OR,
+)
+c2 = SpectrumCondition(
+ question_id="1001",
+ value_type=ConditionValueType.LIST,
+ values=["a"],
+ negate=False,
+ logical_operator=LogicalOperator.OR,
+)
+c3 = SpectrumCondition(
+ question_id="1002",
+ value_type=ConditionValueType.RANGE,
+ values=["18-24", "30-32"],
+ negate=False,
+ logical_operator=LogicalOperator.OR,
+)
+c4 = SpectrumCondition(
+ question_id="212",
+ value_type=ConditionValueType.LIST,
+ values=["23", "24"],
+ negate=False,
+ logical_operator=LogicalOperator.OR,
+)
+c5 = SpectrumCondition(
+ question_id="1031",
+ value_type=ConditionValueType.LIST,
+ values=["113", "114", "121"],
+ negate=False,
+ logical_operator=LogicalOperator.OR,
+)
+CONDITIONS = [c1, c2, c3, c4, c5]
+survey = SpectrumSurvey.model_validate_json(SURVEYS_JSON[0])
+assert c1.criterion_hash in survey.qualifications
+assert c3.criterion_hash in survey.qualifications