diff --git a/prediction_market_agent_tooling/deploy/agent.py b/prediction_market_agent_tooling/deploy/agent.py index f0a2b697..ae413968 100644 --- a/prediction_market_agent_tooling/deploy/agent.py +++ b/prediction_market_agent_tooling/deploy/agent.py @@ -153,10 +153,10 @@ def langfuse_update_current_trace( input=input, output=output, user_id=user_id or getpass.getuser(), - session_id=session_id - or self.session_id, # All traces within a single run execution will be grouped under a single session. - version=version - or APIKeys().LANGFUSE_DEPLOYMENT_VERSION, # Optionally, mark the current deployment with version (e.g. add git commit hash during docker building). + session_id=session_id or self.session_id, + # All traces within a single run execution will be grouped under a single session. + version=version or APIKeys().LANGFUSE_DEPLOYMENT_VERSION, + # Optionally, mark the current deployment with version (e.g. add git commit hash during docker building). release=release, metadata=metadata, tags=tags, @@ -342,6 +342,10 @@ def update_langfuse_trace_by_processed_market( ] ) + @property + def agent_name(self) -> str: + return self.__class__.__name__ + def check_min_required_balance_to_operate(self, market_type: MarketType) -> None: api_keys = APIKeys() @@ -444,7 +448,9 @@ def after_process_market( ) -> None: keys = APIKeys() if self.store_prediction: - market.store_prediction(processed_market=processed_market, keys=keys) + market.store_prediction( + processed_market=processed_market, keys=keys, agent_name=self.agent_name + ) else: logger.info( f"Prediction {processed_market} not stored because {self.store_prediction=}." @@ -613,10 +619,14 @@ def after_process_market( processed_market: ProcessedMarket | None, ) -> None: api_keys = APIKeys() - super().after_process_market(market_type, market, processed_market) + super().after_process_market( + market_type, + market, + processed_market, + ) if isinstance(processed_market, ProcessedTradedMarket): if self.store_trades: - market.store_trades(processed_market, api_keys) + market.store_trades(processed_market, api_keys, self.agent_name) else: logger.info( f"Trades {processed_market.trades} not stored because {self.store_trades=}." diff --git a/prediction_market_agent_tooling/markets/agent_market.py b/prediction_market_agent_tooling/markets/agent_market.py index 8344e5e9..40afe29c 100644 --- a/prediction_market_agent_tooling/markets/agent_market.py +++ b/prediction_market_agent_tooling/markets/agent_market.py @@ -231,7 +231,10 @@ def verify_operational_balance(api_keys: APIKeys) -> bool: raise NotImplementedError("Subclasses must implement this method") def store_prediction( - self, processed_market: ProcessedMarket | None, keys: APIKeys + self, + processed_market: ProcessedMarket | None, + keys: APIKeys, + agent_name: str, ) -> None: """ If market allows to upload predictions somewhere, implement it in this method. @@ -239,7 +242,10 @@ def store_prediction( raise NotImplementedError("Subclasses must implement this method") def store_trades( - self, traded_market: ProcessedTradedMarket | None, keys: APIKeys + self, + traded_market: ProcessedTradedMarket | None, + keys: APIKeys, + agent_name: str, ) -> None: """ If market allows to upload trades somewhere, implement it in this method. diff --git a/prediction_market_agent_tooling/markets/metaculus/metaculus.py b/prediction_market_agent_tooling/markets/metaculus/metaculus.py index 64efc75e..5a9daf97 100644 --- a/prediction_market_agent_tooling/markets/metaculus/metaculus.py +++ b/prediction_market_agent_tooling/markets/metaculus/metaculus.py @@ -107,7 +107,7 @@ def get_binary_markets( return [MetaculusAgentMarket.from_data_model(q) for q in all_questions[:limit]] def store_prediction( - self, processed_market: ProcessedMarket | None, keys: APIKeys + self, processed_market: ProcessedMarket | None, keys: APIKeys, agent_name: str ) -> None: if processed_market is not None: make_prediction(self.id, processed_market.answer.p_yes) diff --git a/prediction_market_agent_tooling/markets/omen/data_models.py b/prediction_market_agent_tooling/markets/omen/data_models.py index 1e1b5457..95becd6c 100644 --- a/prediction_market_agent_tooling/markets/omen/data_models.py +++ b/prediction_market_agent_tooling/markets/omen/data_models.py @@ -807,7 +807,7 @@ def from_tuple(values: tuple[t.Any]) -> "ContractPrediction": class IPFSAgentResult(BaseModel): reasoning: str - + agent_name: str model_config = ConfigDict( extra="forbid", ) diff --git a/prediction_market_agent_tooling/markets/omen/omen.py b/prediction_market_agent_tooling/markets/omen/omen.py index a1d54644..049a2089 100644 --- a/prediction_market_agent_tooling/markets/omen/omen.py +++ b/prediction_market_agent_tooling/markets/omen/omen.py @@ -414,17 +414,21 @@ def get_trade_balance(api_keys: APIKeys, web3: Web3 | None = None) -> xDai: @staticmethod def verify_operational_balance(api_keys: APIKeys) -> bool: return get_total_balance( - api_keys.public_key, # Use `public_key`, not `bet_from_address` because transaction costs are paid from the EOA wallet. + api_keys.public_key, + # Use `public_key`, not `bet_from_address` because transaction costs are paid from the EOA wallet. sum_wxdai=False, ) > xdai_type(0.001) def store_prediction( - self, processed_market: ProcessedMarket | None, keys: APIKeys + self, processed_market: ProcessedMarket | None, keys: APIKeys, agent_name: str ) -> None: """On Omen, we have to store predictions along with trades, see `store_trades`.""" def store_trades( - self, traded_market: ProcessedTradedMarket | None, keys: APIKeys + self, + traded_market: ProcessedTradedMarket | None, + keys: APIKeys, + agent_name: str, ) -> None: if traded_market is None: logger.warning(f"No prediction for market {self.id}, not storing anything.") @@ -438,7 +442,7 @@ def store_trades( if keys.enable_ipfs_upload: logger.info("Storing prediction on IPFS.") ipfs_hash = IPFSHandler(keys).store_agent_result( - IPFSAgentResult(reasoning=reasoning) + IPFSAgentResult(reasoning=reasoning, agent_name=agent_name) ) ipfs_hash_decoded = ipfscidv0_to_byte32(ipfs_hash) @@ -1238,12 +1242,12 @@ def redeem_from_all_user_positions( if not conditional_token_contract.is_condition_resolved(condition_id): logger.info( - f"[{index+1} / {len(user_positions)}] Skipping redeem, {user_position.id=} isn't resolved yet." + f"[{index + 1} / {len(user_positions)}] Skipping redeem, {user_position.id=} isn't resolved yet." ) continue logger.info( - f"[{index+1} / {len(user_positions)}] Processing redeem from {user_position.id=}." + f"[{index + 1} / {len(user_positions)}] Processing redeem from {user_position.id=}." ) original_balances = get_balances(public_key, web3) @@ -1264,9 +1268,11 @@ def redeem_from_all_user_positions( def get_binary_market_p_yes_history(market: OmenAgentMarket) -> list[Probability]: history: list[Probability] = [] trades = sorted( - OmenSubgraphHandler().get_trades( # We need to look at price both after buying or selling, so get trades, not bets. + OmenSubgraphHandler().get_trades( + # We need to look at price both after buying or selling, so get trades, not bets. market_id=market.market_maker_contract_address_checksummed, - end_time=market.close_time, # Even after market is closed, there can be many `Sell` trades which will converge the probability to the true one. + end_time=market.close_time, + # Even after market is closed, there can be many `Sell` trades which will converge the probability to the true one. ), key=lambda x: x.creation_datetime, )