From 14e4a44091ddb8f535713b1d6d36c58499dffc97 Mon Sep 17 00:00:00 2001 From: Carson Johnson Date: Wed, 5 Nov 2025 16:27:42 -0500 Subject: [PATCH] Fix JSON parsing in json_utils.py to handle LLM responses - Enhanced regex patterns to extract JSON objects and arrays from code blocks - Support for various language tags (json, python, py) and no tags - Improved manual extraction for both {} and [] structures - Added robust error handling with debug messages - Fixes LLM code capture issues in mission control and audit agents Also includes: - Control primitives for KSP operations - Flight agent improvements - Skills database updates - Documentation updates --- README.md | 117 +++- kosmos/__init__.py | 1 + kosmos/agents/flight.py | 2 +- kosmos/control_primitives/__init__.py | 17 + kosmos/control_primitives/dock_with_target.py | 80 +++ .../execute_maneuver_node.py | 64 ++ .../explore_until_body_found.py | 32 + kosmos/control_primitives/land_vessel.py | 82 +++ kosmos/control_primitives/launch_vessel.py | 83 +++ kosmos/control_primitives/should_stage.py | 15 + .../target_vessel_by_name.py | 12 + kosmos/control_primitives/transfer_fuel.py | 40 ++ kosmos/prompts/__init__.py | 10 + kosmos/prompts/flight_response_format.txt | 7 + kosmos/skill/chroma_db/chroma.sqlite3 | Bin 0 -> 163840 bytes kosmos/utils/debug_utils.py | 76 +++ kosmos/utils/file_utils.py | 568 ++++++++++++++++++ kosmos/utils/json_utils.py | 131 ++-- kosmos/utils/record_utils.py | 227 +++++++ krpc_readme_llm.md | 10 + main.py | 83 ++- poetry.lock | 77 ++- pyproject.toml | 4 +- 23 files changed, 1666 insertions(+), 72 deletions(-) create mode 100644 kosmos/__init__.py create mode 100644 kosmos/control_primitives/__init__.py create mode 100644 kosmos/control_primitives/dock_with_target.py create mode 100644 kosmos/control_primitives/execute_maneuver_node.py create mode 100644 kosmos/control_primitives/explore_until_body_found.py create mode 100644 kosmos/control_primitives/land_vessel.py create mode 100644 kosmos/control_primitives/launch_vessel.py create mode 100644 kosmos/control_primitives/should_stage.py create mode 100644 kosmos/control_primitives/target_vessel_by_name.py create mode 100644 kosmos/control_primitives/transfer_fuel.py create mode 100644 kosmos/skill/chroma_db/chroma.sqlite3 create mode 100644 kosmos/utils/debug_utils.py diff --git a/README.md b/README.md index 7c573ed..7dad10c 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,9 @@ AI Agents controlling Kerbal Space Program. - **Kerbal Space Program** installed - **Python 3.8+** installed - **Poetry** for dependency management +- **OpenAI API key** for the language models -## Setup +## Installation ### 1. Install Poetry (if not already installed) ```bash @@ -32,31 +33,117 @@ cd KOSMOS poetry install ``` -### 4. Usage +## Getting Started +KOSMOS uses OpenAI's GPT-4 as the language model. You need to have an OpenAI API key to use KOSMOS. You can get one from [here](https://platform.openai.com/api-keys). + +After the installation process, you can run KOSMOS by: + +```python +from kosmos import Kosmos + +openai_api_key = "YOUR_API_KEY" + +kosmos = Kosmos( + openai_api_key=openai_api_key, +) + +# Start mission execution +kosmos.learn() +``` + +If you are running KOSMOS for the first time, make sure to: 1. **Launch Kerbal Space Program** 2. **Load a scenario or flight** (make sure you have an active vessel) 3. **Start the kRPC server** in KSP (usually via the kRPC menu) -4. **Run the Python script:** +4. **Run the Python script** + +## Usage Examples + +### Basic Mission Execution +```python +from kosmos import Kosmos + +kosmos = Kosmos( + openai_api_key="YOUR_API_KEY", +) + +# Start lifelong learning and mission execution +kosmos.learn() +``` + +### Resume from Checkpoint +If you stop the learning process and want to resume from a checkpoint later: + +```python +from kosmos import Kosmos + +kosmos = Kosmos( + openai_api_key="YOUR_API_KEY", + checkpoint_dir="YOUR_CKPT_DIR", + resume=True, +) + +# Resume mission from checkpoint +kosmos.learn() +``` + +### Execute Specific Task +If you want to run KOSMOS for a specific task: + +```python +from kosmos import Kosmos + +# First instantiate KOSMOS with maneuver library +kosmos = Kosmos( + openai_api_key="YOUR_API_KEY", + maneuver_library_dir="./maneuver_library/trial1", # Load learned maneuvers + checkpoint_dir="YOUR_CKPT_DIR", + resume=False, +) + +# Run task decomposition +task = "Launch to orbit" # e.g. "Launch to orbit", "Land on Mun", "Dock with space station" +sub_goals = kosmos.decompose_task(task=task) + +# Execute the sub-goals +kosmos.execute_task(sub_goals=sub_goals) +``` + +## Quick Start + +1. **Set your OpenAI API key:** + ```bash + export OPENAI_API_KEY="your-api-key-here" + ``` + +2. **Launch KSP and start kRPC server** + +3. **Run KOSMOS:** ```bash poetry run python main.py ``` ## Project Structure -- `main.py` - Basic vessel position streaming example -- `pyproject.toml` - Poetry configuration and dependencies -- `poetry.lock` - Locked dependency versions +- `main.py` - Main entry point +- `kosmos/` - Core KOSMOS package + - `agents/` - AI agents (Flight, Mission Control, Maneuver, Audit) + - `env/` - KSP environment interface + - `utils/` - Utility functions + - `control_primitives/` - Executable maneuver implementations + - `control_primitives_context/` - Context examples for AI agents +- `examples/` - Usage examples +- `checkpoint/` - Checkpoint directory (created automatically) -## Example Output -The basic script will stream your vessel's position coordinates in real-time: -``` -0.5.4 -(2688577.068673832, -7.589481473858227, 465412.3802019775) -(2688577.068673832, -7.589481473858227, 465412.3802019775) -... -``` +## Available Agents + +- **FlightAgent** ✅ - Executes flight maneuvers and controls vessels +- **MissionControlAgent** ⚠️ - Plans and coordinates mission phases +- **ManeuverAgent** ⚠️ - Manages specific maneuver execution +- **AuditAgent** ⚠️ - Monitors and validates mission progress ## Development + To add new dependencies: ```bash poetry add package-name @@ -74,4 +161,4 @@ poetry run python main.py 4. Submit a pull request ## License -See [LICENSE](LICENSE) file for details. +See [LICENSE](LICENSE) file for details. \ No newline at end of file diff --git a/kosmos/__init__.py b/kosmos/__init__.py new file mode 100644 index 0000000..1d42eeb --- /dev/null +++ b/kosmos/__init__.py @@ -0,0 +1 @@ +from .kosmos import Kosmos diff --git a/kosmos/agents/flight.py b/kosmos/agents/flight.py index b625c3a..1321d92 100644 --- a/kosmos/agents/flight.py +++ b/kosmos/agents/flight.py @@ -57,7 +57,7 @@ def get_vessel_telemetry(self, env=None): def construct_system_message(self, maneuvers=[]): print(f"🔍 DEBUG: FlightAgent constructing system message with {len(maneuvers)} maneuvers") - system_template = load_prompt("new_flight_template") + system_template = load_prompt("flight_template") # Load MechJeb documentation mechjeb_docs = load_prompt("mechjeb_readmellm") # Load kRPC documentation diff --git a/kosmos/control_primitives/__init__.py b/kosmos/control_primitives/__init__.py new file mode 100644 index 0000000..25ebd5f --- /dev/null +++ b/kosmos/control_primitives/__init__.py @@ -0,0 +1,17 @@ +import importlib.util +import os +import kosmos.utils as U + +def load_control_primitives(primitive_names=None): + package_path = importlib.util.find_spec("kosmos").submodule_search_locations[0] + if primitive_names is None: + primitive_names = [ + primitive[:-3] + for primitive in os.listdir(f"{package_path}/control_primitives") + if primitive.endswith(".py") and primitive != "__init__.py" + ] + primitives = [ + U.load_text(f"{package_path}/control_primitives/{primitive_name}.py") + for primitive_name in primitive_names + ] + return primitives \ No newline at end of file diff --git a/kosmos/control_primitives/dock_with_target.py b/kosmos/control_primitives/dock_with_target.py new file mode 100644 index 0000000..58fc35e --- /dev/null +++ b/kosmos/control_primitives/dock_with_target.py @@ -0,0 +1,80 @@ +async def dock_with_target(conn, approach_distance=50, final_approach_speed=0.5): + """ + Dock with target vessel (similar to complex multi-step operations) + """ + global _dock_fail_count + + vessel = conn.space_center.active_vessel + control = vessel.control + space_center = conn.space_center + + if not space_center.target_vessel: + print("No target vessel selected for docking") + return False + + target = space_center.target_vessel + + try: + # Phase 1: Close approach + control.rcs = True + control.sas = True + control.sas_mode = space_center.SASMode.target + + # Get close to target + while True: + target_distance = vessel.orbital_reference_frame.position(target).magnitude + + if target_distance < approach_distance: + break + + # Thrust toward target + if target_distance > 1000: + control.throttle = 0.1 + elif target_distance > 100: + control.throttle = 0.05 + else: + control.throttle = 0.01 + + time.sleep(0.5) + + control.throttle = 0.0 + print(f"Close approach achieved: {target_distance:.1f}m") + + # Phase 2: Final docking approach + control.sas_mode = space_center.SASMode.stability_assist + + # Find docking ports + my_port = find_docking_port(vessel) + target_port = find_docking_port(target) + + if not my_port or not target_port: + print("Could not find docking ports on vessels") + return False + + # Approach target port + while not vessel.parts.docking_ports[0].state.name == 'docked': + # Simple approach logic + time.sleep(1) + + # In real implementation, this would use precise navigation + # This is simplified for the primitive example + + print("Docking successful!") + control.rcs = False + return True + + except Exception as err: + control.throttle = 0.0 + control.rcs = False + print(f"Docking failed: {err}") + _dock_fail_count += 1 + if _dock_fail_count > 3: + raise Exception("Docking failed too many times") + return False + +def find_docking_port(vessel): + """Find available docking port on vessel""" + for port in vessel.parts.docking_ports: + if port.state.name == 'ready': + return port + return None \ No newline at end of file diff --git a/kosmos/control_primitives/execute_maneuver_node.py b/kosmos/control_primitives/execute_maneuver_node.py new file mode 100644 index 0000000..07accf0 --- /dev/null +++ b/kosmos/control_primitives/execute_maneuver_node.py @@ -0,0 +1,64 @@ +async def execute_maneuver_node(conn, node=None, tolerance=1.0): + """ + Execute a maneuver node (similar to smeltItem - complex process with validation) + """ + global _burn_fail_count + + vessel = conn.space_center.active_vessel + control = vessel.control + + if node is None: + if not control.nodes: + print("No maneuver nodes to execute") + return False + node = control.nodes[0] + + try: + # Calculate burn time + isp = vessel.specific_impulse * 9.82 # Convert to m/s + mass = vessel.mass + delta_v = node.delta_v + burn_time = mass * isp * (1 - math.exp(-delta_v / isp)) / vessel.available_thrust + + # Wait until burn time + burn_ut = node.ut - (burn_time / 2) + lead_time = 5 # Start orienting 5 seconds early + + # Warp to maneuver time + conn.space_center.warp_to(burn_ut - lead_time) + + # Orient to maneuver + control.sas = True + control.sas_mode = conn.space_center.SASMode.maneuver + + # Wait for orientation + time.sleep(3) + + # Execute burn + remaining_delta_v = node.remaining_delta_v + control.throttle = 1.0 + + while remaining_delta_v > tolerance: + remaining_delta_v = node.remaining_delta_v + + # Throttle down as we approach target + if remaining_delta_v < 10: + control.throttle = 0.1 + elif remaining_delta_v < 50: + control.throttle = 0.5 + + time.sleep(0.1) + + control.throttle = 0.0 + node.remove() + + print(f"Maneuver executed successfully, {remaining_delta_v:.1f} m/s remaining") + return True + + except Exception as err: + control.throttle = 0.0 + print(f"Maneuver execution failed: {err}") + _burn_fail_count += 1 + if _burn_fail_count > 5: + raise Exception("Maneuver execution failed too many times") + return False \ No newline at end of file diff --git a/kosmos/control_primitives/explore_until_body_found.py b/kosmos/control_primitives/explore_until_body_found.py new file mode 100644 index 0000000..d7751ee --- /dev/null +++ b/kosmos/control_primitives/explore_until_body_found.py @@ -0,0 +1,32 @@ +async def explore_until_body_found(conn, target_body_name, max_time=3600): + """ + Explore space until target body is found (similar to exploreUntil) + """ + space_center = conn.space_center + vessel = space_center.active_vessel + + # Check if we can already see the target + if target_body_name in space_center.bodies: + target_body = space_center.bodies[target_body_name] + print(f"Found {target_body_name}") + return target_body + + start_time = time.time() + + print(f"Exploring space to find {target_body_name}...") + + # Simple exploration - change orbital plane and look around + while time.time() - start_time < max_time: + # In real implementation, this would use proper orbital mechanics + # to search for celestial bodies + + # Check if we found the target + if target_body_name in space_center.bodies: + target_body = space_center.bodies[target_body_name] + print(f"Found {target_body_name} after {time.time() - start_time:.1f} seconds") + return target_body + + time.sleep(10) # Search interval + + print(f"Could not find {target_body_name} within {max_time} seconds") + return None \ No newline at end of file diff --git a/kosmos/control_primitives/land_vessel.py b/kosmos/control_primitives/land_vessel.py new file mode 100644 index 0000000..fcfc8b5 --- /dev/null +++ b/kosmos/control_primitives/land_vessel.py @@ -0,0 +1,82 @@ +async def land_vessel(conn, target_body=None, deploy_gear=True, target_speed=5.0): + """ + Land vessel on surface (similar to placeItem - precise positioning) + """ + global _land_fail_count + + vessel = conn.space_center.active_vessel + control = vessel.control + + if target_body is None: + target_body = vessel.orbit.body + + try: + # Pre-landing checks + if vessel.situation.name == 'landed': + print("Vessel is already landed") + return True + + # Deorbit burn (simplified) + if vessel.orbit.periapsis_altitude > 0: + print("Executing deorbit burn...") + control.sas = True + control.sas_mode = conn.space_center.SASMode.retrograde + control.throttle = 1.0 + + # Burn until periapsis is negative + while vessel.orbit.periapsis_altitude > -10000: + time.sleep(0.1) + + control.throttle = 0.0 + + # Descent phase + print("Beginning descent...") + altitude_stream = conn.add_stream(getattr, vessel.flight(), 'surface_altitude') + speed_stream = conn.add_stream(getattr, vessel.flight(), 'speed') + vertical_speed_stream = conn.add_stream(getattr, vessel.flight(), 'vertical_speed') + + # Deploy parachutes if available and in atmosphere + if target_body.has_atmosphere and vessel.parts.parachutes: + for parachute in vessel.parts.parachutes: + if parachute.can_deploy: + parachute.deploy() + print("Parachutes deployed") + + # Powered landing phase + gear_deployed = False + + while vessel.situation.name != 'landed': + altitude = altitude_stream() + speed = speed_stream() + vertical_speed = vertical_speed_stream() + + # Deploy landing gear + if altitude < 1000 and not gear_deployed and deploy_gear: + control.gear = True + gear_deployed = True + print("Landing gear deployed") + + # Suicide burn calculation (simplified) + if altitude < 500 and vertical_speed < -10: + control.sas = True + control.sas_mode = conn.space_center.SASMode.retrograde + + # Calculate throttle needed + if vertical_speed < -target_speed: + control.throttle = min(1.0, abs(vertical_speed) / 20) + else: + control.throttle = 0.0 + + time.sleep(0.1) + + control.throttle = 0.0 + print(f"Landing successful! Touchdown speed: {speed:.1f} m/s") + return True + + except Exception as err: + control.throttle = 0.0 + print(f"Landing failed: {err}") + _land_fail_count += 1 + if _land_fail_count > 3: + raise Exception("Landing failed too many times") + return False \ No newline at end of file diff --git a/kosmos/control_primitives/launch_vessel.py b/kosmos/control_primitives/launch_vessel.py new file mode 100644 index 0000000..0b120af --- /dev/null +++ b/kosmos/control_primitives/launch_vessel.py @@ -0,0 +1,83 @@ +async def launch_vessel(conn, target_apoapsis=100000, target_inclination=0): + """ + Launch vessel to specified orbit + Similar to craftItem - executes a complex sequence with error handling + """ + global _stage_fail_count + + if target_apoapsis < 70000: + raise ValueError("target_apoapsis must be at least 70000m for stable orbit") + + vessel = conn.space_center.active_vessel + control = vessel.control + + try: + # Pre-launch checks + if vessel.situation.name != 'pre_launch': + raise ValueError("Vessel must be on launchpad to launch") + + # Initial staging and launch + conn.space_center.physics_warp_factor = 1 + control.sas = True + control.rcs = False + control.throttle = 1.0 + + # Launch! + control.activate_next_stage() + + # Monitor ascent + altitude_stream = conn.add_stream(getattr, vessel.flight(), 'mean_altitude') + apoapsis_stream = conn.add_stream(getattr, vessel.orbit, 'apoapsis_altitude') + + # Simple gravity turn + vessel.auto_pilot.engage() + vessel.auto_pilot.target_pitch_and_heading(90, 90) + + turn_start_altitude = 250 + turn_end_altitude = 45000 + + while apoapsis_stream() < target_apoapsis: + altitude = altitude_stream() + + # Execute gravity turn + if turn_start_altitude < altitude < turn_end_altitude: + turn_progress = (altitude - turn_start_altitude) / (turn_end_altitude - turn_start_altitude) + target_pitch = 90 - (turn_progress * 90) + vessel.auto_pilot.target_pitch_and_heading(target_pitch, 90) + + # Check for staging opportunities + if should_stage(vessel): + control.activate_next_stage() + time.sleep(1) + + time.sleep(0.1) + + control.throttle = 0.0 + print(f"Target apoapsis of {target_apoapsis}m reached") + + # Coast to apoapsis for circularization + while vessel.orbit.time_to_apoapsis > 60: + time.sleep(1) + + # Circularization burn + control.throttle = 1.0 + periapsis_stream = conn.add_stream(getattr, vessel.orbit, 'periapsis_altitude') + + while periapsis_stream() < target_apoapsis * 0.9: + time.sleep(0.1) + + control.throttle = 0.0 + vessel.auto_pilot.disengage() + + print(f"Successfully reached orbit: {vessel.orbit.apoapsis_altitude:.0f} x {vessel.orbit.periapsis_altitude:.0f}m") + return True + + except Exception as err: + control.throttle = 0.0 + if vessel.auto_pilot.engaged: + vessel.auto_pilot.disengage() + print(f"Launch failed: {err}") + _stage_fail_count += 1 + if _stage_fail_count > 3: + raise Exception("Launch failed too many times, check vessel design") + return False \ No newline at end of file diff --git a/kosmos/control_primitives/should_stage.py b/kosmos/control_primitives/should_stage.py new file mode 100644 index 0000000..6e69700 --- /dev/null +++ b/kosmos/control_primitives/should_stage.py @@ -0,0 +1,15 @@ +def should_stage(vessel): + """Check if vessel should stage (similar to recipe validation in craftItem)""" + # Check if current stage engines are out of fuel + resources = vessel.resources_in_decouple_stage(vessel.control.current_stage, cumulative=False) + + # If we have SRBs and they're empty, stage + if resources.max('SolidFuel') > 0 and resources.amount('SolidFuel') < 0.1: + return True + + # If liquid fuel engines are empty, stage + if (resources.max('LiquidFuel') > 0 and resources.amount('LiquidFuel') < 0.1 and + resources.max('Oxidizer') > 0 and resources.amount('Oxidizer') < 0.1): + return True + + return False \ No newline at end of file diff --git a/kosmos/control_primitives/target_vessel_by_name.py b/kosmos/control_primitives/target_vessel_by_name.py new file mode 100644 index 0000000..e3f3120 --- /dev/null +++ b/kosmos/control_primitives/target_vessel_by_name.py @@ -0,0 +1,12 @@ +async def target_vessel_by_name(conn, vessel_name): + """Target a vessel by name (similar to item finding)""" + space_center = conn.space_center + + for vessel in space_center.vessels: + if vessel.name == vessel_name: + space_center.target_vessel = vessel + print(f"Targeted vessel: {vessel_name}") + return vessel + + print(f"No vessel named '{vessel_name}' found") + return None \ No newline at end of file diff --git a/kosmos/control_primitives/transfer_fuel.py b/kosmos/control_primitives/transfer_fuel.py new file mode 100644 index 0000000..be05e70 --- /dev/null +++ b/kosmos/control_primitives/transfer_fuel.py @@ -0,0 +1,40 @@ +async def transfer_fuel(conn, from_tank, to_tank, fuel_type='LiquidFuel', amount=None): + """ + Transfer fuel between tanks (similar to chest operations) + """ + vessel = conn.space_center.active_vessel + + try: + # Find source and destination tanks + source_tank = None + dest_tank = None + + for part in vessel.parts.all: + if part.name == from_tank and part.resources.amount(fuel_type) > 0: + source_tank = part + elif part.name == to_tank and part.resources.max(fuel_type) > 0: + dest_tank = part + + if not source_tank: + print(f"No source tank '{from_tank}' with {fuel_type} found") + return False + + if not dest_tank: + print(f"No destination tank '{to_tank}' found") + return False + + # Calculate transfer amount + if amount is None: + available = source_tank.resources.amount(fuel_type) + capacity = dest_tank.resources.max(fuel_type) - dest_tank.resources.amount(fuel_type) + amount = min(available, capacity) + + # Perform transfer (simplified - real implementation would use proper API) + print(f"Transferring {amount:.1f} units of {fuel_type} from {from_tank} to {to_tank}") + + # In real kRPC, this would use the resource transfer API + return True + + except Exception as err: + print(f"Fuel transfer failed: {err}") + return False \ No newline at end of file diff --git a/kosmos/prompts/__init__.py b/kosmos/prompts/__init__.py index fc98192..15e0dc2 100644 --- a/kosmos/prompts/__init__.py +++ b/kosmos/prompts/__init__.py @@ -3,5 +3,15 @@ import kosmos.utils as U def load_prompt(prompt_name): + # Use __file__ to get the actual prompts directory location + # This ensures we use the correct prompts directory even if Python + # finds the package from a different location + current_dir = os.path.dirname(__file__) + prompt_path = os.path.join(current_dir, f"{prompt_name}.txt") + + if os.path.exists(prompt_path): + return U.load_text(prompt_path) + + # Fallback to package path (for installed packages) package_path = importlib.util.find_spec("kosmos").submodule_search_locations[0] return U.load_text(f"{package_path}/prompts/{prompt_name}.txt") \ No newline at end of file diff --git a/kosmos/prompts/flight_response_format.txt b/kosmos/prompts/flight_response_format.txt index 0d8d36f..d8affba 100644 --- a/kosmos/prompts/flight_response_format.txt +++ b/kosmos/prompts/flight_response_format.txt @@ -10,4 +10,11 @@ Code: ... # main function after the helper functions def your_main_function_name(conn, vessel): + # Use ONLY MechJeb autopilots and modules + # Example: mj = conn.mech_jeb + # Example: mj.ascent_guidance.enabled = True + # Example: mj.landing_autopilot.enabled = True + # Example: mj.docking_autopilot.enabled = True + # Example: mj.maneuver_planner.operation_transfer.make_nodes() + # Example: mj.node_executor.execute_all_nodes() # ... \ No newline at end of file diff --git a/kosmos/skill/chroma_db/chroma.sqlite3 b/kosmos/skill/chroma_db/chroma.sqlite3 new file mode 100644 index 0000000000000000000000000000000000000000..7dd146f66438df0f6e404bc4428418db9424182a GIT binary patch literal 163840 zcmeI53vgRkdYAz|01+f`Wy>-w%UT`L4-ytZ;ac0MS?i&Okn(K$G{V~#yi+j#J_dnnNpXa^jaQ)g+Ri~uVY}I9*Bz?zye!uV2 zg!p{EFTww5_-}n?;b6r20pEVtalgYa`DS0=9%Ye*_gI{>!g~|nnAjcv%=oF$*F!%w zRvbHb?0dnl2KRz52EG!|N8cU&iIJNl&kkQ6_7A-?RP=w>|B~-(zAw9z4@?I5&(5iH z!Dv43?=0(bxkhQdOjWho*eq4rwVF!qrCyZ7107S;;$#R4hLKfhEUQ@tZiO>1}BAukqQaRELYVDaE?xSxH#v@bcnzQtW~ECW_405 zyj=7!F(a^>axi3NwMbSrmX<=Xg~tNX>ta7;ot)RHE;H^W@!+{fI@)Ol+Q0um7ijCM zgV#hoxj7ha=E`N{wZi4XT480Ouukmg8qkl;@a)~7yGF*Xycwt6mf_*X=_-)o*0Oxa zI9;hVWh(&6$dYl^<@JP{8}1bg#gdN>va_S33z;^)cj6Q`^!`mRD$R)>>WxM_h{UCs zncFTW@UpOPVLOR8+;B^;hZyN9Eh_6&EkPYwTwE@!7w4C++D=3+6)w+jEEUPZ#u^x? zShAxS7jLXAUfU=TR$-VPI^$RnQ~{J19tlJj&h+y_^Fpm8T1O9e^3h=Q%o+b)fn&7J zTi-zs!b$7&q=T?`8PRvQX-_Xwn!N**X01jQz1nOTm${tO31Uua(+b5dJ{*W%JKry} zbs2nj6~U0vwaI@NT! zzRk*qtvj&C(|bmdzt9E?4dpB!C>_9$NhKC za|*5?*Wn3I?k8O#gFJ!6Ru@~mo7v?V&W{A>kRH~b$&uYy^392|( z&1RK0rtOTakx|UV2LjQ;*?uOnZ(5SKIe8(9qf~O7tzLCuQh3K`GYB zmaLIxqqYlz+V&u~s`?hh*{tr+22UUqi$?;{`P2QZW2#mX%>&`Ja4>rMw7(;8%H~<~ zE8-z+o*cl}lQUi;KQ|GGuE+Z+V^*jot9`d?X8TNd@3HYZw-{AUnU(x-0!lQ9_ zsHwVj+Un!Di;NjI(02X%2xe!1wF*WtJ!}Vol$*_3udalvZT2>OM(3e$ED(Jq*^jSN zQ`taEuIkS|Ikp!M2BXQO|Bjzi?r~j@Q^!24dqgmCyFx7Dt~$yIy*HUV`azMOAqMu& zQ^UdN)Rcd3ob!^Eo9$|gmTtA6lkWL3;$h?oGU>2RxAk##Q(bm9Jjsf~zS+T6kV zYgVgZH?F)4T9ISPp+TL?dUexigrQI591rhZ@bk{)ofyxZ6XF~VdGh6)?~y6BYP2i1 zh6nSrLtE|^Za~aRwFcGUfds*$>UgKE}TPn&fvB8_H)U&Q)^v)&f~YBXTQ8bD{aA zB1`}nGa1%dBN-Tq%rCtTC$C zIRB;6^tp zsSlv14;e9yYO3zO6Tu4;1VEl4fPNp() zN|MD~IU|>|6;a9O@-T_EPXnx&zWt=3dw}VlaqDiCSgZ5Xl?!7=QEft2pdb^iy}jLR zL4T0X&Jksc!t9jM`-5q^mSN@@0-Zz|jH|V4I{Yx=%0_JFCe_AHv$jJ`A8Bkt`4LgrAJE!9F-URg|L zvPw?MlyjA`oUY_E6)9V$6;%<{l%z^uI6cA&;ulS49$}eG#mxah+^ViLS57_sgT>3d zNiV#-xL#a02Q;pEsNRz()(DA@BcP@qkfN=?fL7^;V(d2wbaqr|3m^!2t2<()>cf=w{gC zU^XB3()`i}OeIfGnF&FsfeqKEVsnfks{y<20L>>f#@*3L6Y9j4(XwxBEMC&Aj!pN? zE#;v2u;GZ2KjWTka>mhz6;bwy_Ygb)#MF$Vh?TJWoShfDpVh6-0he+q#cDU0I)fA<#`p<#JFx(p*IWW71SY zN-9}VOwd#+nM~)!PH;F7^lxkynkf~>gng+77SH}m@1MrHt1VDTOHm3wWm>>-|w`xQ~r%X zfkV}(6nahruxaj4th**Nb#$8T-jA#9fvFsh$~7M9HElB&lJa6wfgWQzkxal)BA;e9 zOr>&(WR~XgqO^Y=#^7Uxx7H0yGDC_kpXzH%2k~7S*nZ<^$7)>zad)NK!$i)lyRGC3 zu?CDiP5)8P039@yM^kIIXD*b?CDW3cmC~{*Dq<#)O;uziB_+y|1cUm?u@K1PLd++8 zT8If>5Z)BF1x0vC_#eW55WXw?Q{kJ!*Mwgaep&dr4`v1cQ$Yeq00|%gB!C2v01`j~ zNB{{S0VHsb6L@$e?)Td-kZL7(JFZQKj-MQfj~zHIOpKiJ`}yOs@@T~MVY-zeV_0ijQPJW{72#63;$O5j__^ap9$X*epmR1!f(S2z;6n_F8pobZwS96{1xFZ z2|p|RwD6O{zVPG1uAmD|;j;o2eoS~3W(}?hOTrc5dEugv6OzI#%qTn|oE07wP723` zaUmf1BmX<{KO_Go@}DAq6#2u*cO(BQ^3NmxIP#AozZ3aJG!WcY0O zRQN@zsg1O#JG^uT1>X#4k+z z#fhJpcn6^32MHhnB!C2v01`j~NB{{Sfo=lBou`L=W5b;om(yHMarqRNPjdMLmydHf z$>ljN372QN{1lgGxO|Mu(_B8vmtig^ zxE$v)#N`;5$G8k~8Q^l1%MmVzxg6rs&!vya&olAkOuWg&=a_hdiPxFfW#Tpyx0u*r zqRoWPgvLaRiJMGpGtp$C!9<;j8WT5|_$(7uCbpQ^WTL_ZWkO{_VWP~0%*2l|QDWlH zGV!BKyvD?a_8i}$z79s zCU;Ekm)tG6S8}K1KFM8@dn9*A?vLCZxi@lW|ns2_OL^fCP{L52`e;ohc#|Fl%Apsv+N11p9)qSN(FKIDcurIG@g?GHDgkP+_0#c&Z|%;$=n5#S?i72~oBcpV;yHM>ndc zHp)u6EYfl%UQVWHJSCnWT?^L}6`^T@Y9YzOy|Nnk;Y0MK6Kmter2_OL^fCP{L501`j~NB{{S0VIF~kN^@u0!RP}Ac6aV06zb}AFPOZKmter2_OL^ zfCP{L5-qoSh!~0lkN^@u0!RP}AOR$R1dsp{Kmter34CG+u;>4N z;d?&#haV(>1dsp{Kmter2_OL^fCP{L5gkX+>_=^pZ{+azkg&|8e|( zUl}Ws-dAN^F3TFVe!HLlf7>T~`(Dc- z>VpK301`j~NB{{S0VIF~kN^@u0!RP}+>OB0@Wz-m1JE@wz<*j{#{2)@@d@9#TZ$Nt z1dsp{Kmter2_OL^fCP{L5*-u7lUC(STrJDVc&Z|%;$=n5#S?j& zl@zLonS`h`Ra#Q1rnIWtdbQcmtjFhr9<9GIxkDAb*(y~VD!o1V%o~$i4eb{D*0yDZ zPChePlO|^-X{Dqj1WwILGn3kGLn-N75VX~-sr=f+n{PsL^)gk}YGbohX*bOKG}^V=o5u5heEm7~{~rkaAOR$R1dsp{Kmter2_OL^ zfCP{L5>!F_- zD~_Ez_PyX&gL}aj178W~qwkLX#K_H&XNNBj`-k2cD*C_cf64bX-^=CK z5+3!YrZu`*Rc8piDp8*y+nbH%47s|txIDl13VE^ciaR(KBi9#;FRX49$=d4m#Y>%0 z5{%Bx`8((OLkg=a*KWkLhQF zQBgeBIoU^zgBqCkera*7xG}$ETH#*Tynb(wK5i3iU; z($P*c(Ej}gx_;_*MNR(hG*{v-8C|9<;^(l zwhRw9PFH~xx0dBY#_39}DO&+h4wsCxF0Uut+;Fd0D3*M5kewYJUC6ZYy%VRnq4#fk zQE5*6P;WHaK_o84%-nW4ftQ7S3)@M&;f7m!J;X>?X;E3HYDv~fadEk@UYuXPYC93R zRJc69u~Z}r8*5;oV#$tTT)eTecx|IVScPGF=!|1QPz6w4cq9;AIMdGy%?q`XXdOM= z$w!0HGiUsJ1&+}=Z+!fq%qAQTo zFiKc=b)i@5RMX}9HY*>t?#!M5!gz_!%PAB~KNN^AKHe`|4h^j&xqh5XpE&GgqU*AU z_M``c(Z?V6-#O1IxPn}VCp@{IbcGD^1QL&vcB6W;P041hS!S;o@*1Wg*Snp=6&0`$ zRza3!jq>O!AZ%#NGt6enC=FJe17(o7e8-B!>o>Bx0!3`80KI~R`SpeQONAMt@Yfkj zpVgX;5>#=nn$0S0Oxqb-Bcqs!4+Nrxv;9nD-?SvzhYyO!4|j=ZU-H0SS_nqZp7r0! zbJN%X_Q7$FS?tpT3a5+n`!oqJN;)@)aX65>5v3n>o{GiSPHl!T)G?-Fp`o=~l;~Sf zPs-47f>NxLEm7IOioRH`~<~E!}ECC*AX7#KXuFWYS@sZtLUfrn>BIc#;)| zeY2qn)Yr1!)~r^+Zd`d8v?9lnLxVb(_3Eb42t%L9IUe4-;OCvoJ29R+C&W1#^5n}o z-y>6M)o5324G-pLhql};+<=&sY7MHx0}ZGr(=78aUh;M<4&D?h)UCV4aan;(Lq=wZ zscOt~$5=S{nlI}AzEAjF;b$ZNEAsX5{|kR>;@ye;z}ol=#~um&Lhy~z-y9tosSN*v z;U~tvKla?Qcl_TU|D)k2oN+iFeWD2LC1!cEbms&wBKD9-D?xv+?M&5}zt4G!OLuI$ zS>hw>Ub7M0+kT+Ylhfa}()SM9^yYR|=~W!8D%%@<2SzK+79ZmoC5twcCR--s>Oxwr zIzJVPbsl><7+qfWcZ{yK4Z)c%(x}{0wn|2)^lstP9;AGRt80G8libww4bR|&>~v2y zkytRgy6W#-@LKeMdTLYAs;@gH?iXR|unL+9_MeK05mlZVv`;{HI>i;q4Pj4m!7!s0L< z>(7!6Jf1kL$}Dn!imo}wy`VW4W{iHE+gqjT;F(XzT-f2dx=vp!^!vWE(dVA?cg}eH zDsaJ^B!BeDAP-*f1aEd1ONW;|dv3U#&$Qj$CTn53l~qsfIa~LvAk1(#Z_ozR3>T+h z8W%cN^{H6XoQ^d+Quds!Gwu2TO~;y*daRR-2cs{F{?08vP3&aaavL_xx96A-1``v; z;>xAM%dY9Qo|m&3g&s*vLm#VZ1D;z8@cRd${7Xgqa37%hOtw4cWI$nT)O*29^;knL*+YHP=`bhS^oH9gW} zgz=yD-TuTrMu3jws~_xRguv5gUDJ8wg7HM^inp>cyLsJR7w>8B6Div*yphbl;GmIU zuSq+6j9~Ro@i78EMt}#BAMIlVeE)ANXgVj0K~9`qt@XZC6vvV+r`=s&+{%?7`StqGm=E+Mv-MwQ^j;%(|7|cxUybYE3#EEt9E`21=nG%<3f=)7Xr++29yt z9J6j0H(rJFl3a&{3EoL4@iALrxT_OvsOB>58Jr~K& zg+wI_#xIxUWGW-4Bw5UrGjcgw5tV!{4}UXcp9WYlefvp6_W;v9`q~<{)DYST85ct2rLqn!MIwxro#^-u583+ zZc=URG;2H5^buICA(IBZW%`KRP$93|yKEJQzGW;jB~7T}X33(?JqV7;jcToC-k2xP zl9l;8-BfN6zVaP7w(E^qsA?#Yx3_EX_f_~iX?%Gc^FEfk;g=0c!Rg@?tZ_Ehyc+j4 zl-21btf<@VXC-_6qT5;r7gF{t-G{z5MX8)kWpepsx?GXcYATaR$%zV8rDP@}ikVJ% z6zqIu@1kMnQD*0KyhZCkF3wlgcn!P8Lkt9b&5c2eR9ekCKf7S>1pu}LldK#29hfCq z3-xtbUa^@z%WvE>;~MD^uPi1rStTcB%DGBePFM1oij*zWimHfeN>ZgSoE~8X@r$N2 zkFZRp;^ut{=^aE0~6&TPe z{ZOp^9*oIVzm@cXP59AaS*GAC*7`0g`!F{(;CHs1r`Prc014`>1-=&vQE5en%FGo_7ml%0zerW@?l9`?|6M{|yTdqG9n_~=F4cK)D zXg;Aa?v75HP$#yGmVIMm@sehBY`Sl5DF?-eEs-(uXWWx*&fw_7iYWWUdk7u?Vrs@w z#7fwG&d!V7&+1m^fJ?cQVznD=9bxTX(Y1>NTcvAu(FR>JW$8LFA4VU~rHrSW=Q;20 z>A3=UfpGeIt`O*^q;ff^9%-(kfH7$*AtjZpC?;qsl}x7dVkbBpi2ASZ8ZE#u(>UpF z>1_nA`mv81?v~8H%v$3;7h-j>q$bkYl$wN^Sq1pBQd-F;WqJR~5Y*Gpy)783 z3^7$CYi!WBdbT>e18PsBIKSU*ZJhFNEE0978Y@kEZUexIRVdb7lbJd?&35nYs_ucU z95^b^!f{J?XHDB&NXm;z1$vC>L^1(GiF}&bFqO(Bl3AL|i_-piSc5-Scx&CTBr~Mw z@~OVIbP(UQf$g`B-m#j!K+7@OO+Y9>!V%ZY9*Y&TapIW>i~n%w2^R#|81dbbv0 z#Tq)8P=!`1kh5$K zRbVYDv=BGrRW%Ni4KAqeCdyf`>TaRffvv1XGz)(>qIa1z*LF}!nN=do88K(`k z3WG#8r?Ck`Nf@lcisv#L{xx8P2;=|c_kG6DuTYKih1=#yU zxxt2}usrh?OkdS6$QNK91||;jx!KUh)k|#eOP9{;g(6GTQuZQu3A6WMG$ha)nXWcq zaqRSzC?!+r49rH%KfeHb(m{tvYnNHb7QGF-&#_G!VlyNzk{KexmX*&wOGv`p59D+2 zHxbE|Nb0DGBwdN*j+#iyNko*6nn>D}Nam=CWSm5#BZJMl5;-#1oGXzdgUvgMB##VM zOxVtrJTh2Ov=d1k8LTKdi5wZMm~;|3GFUNXCz3uo*tC<#(ZOcyL^38=qkq(=3U$xG z7!`_ls>W4wnL`qDTE|s~@d;^G7s2?1=2e=?WYf89Ml2_@Nrh%|QeH|^RZ)^sW`8XV z760nnC(Mf9oW8Lp9--QYIZ>Dlb)FM=7SwxZZ|wHh?&Ou2R>m%ncYdT;mo{6Tkj#k213I^|w4|9&-aZ>TVEy}mo5rGxvNR|#meGnoW5+-XWBMNPoy zK%tc+t>iM$TcWwWixXhf^?hm@)f{%R%`ISJ?zRqdkH&Ts2hI9n>~t|_O<3?JF^-dT zZMH$CTTSQ=bZ-w~HpM-{jFG*Qp5?Tj0lpJ#-EJ;a$!BF1y5=dF%5ts(W93Q$o--uM znN+R}Vf$yt8Nat;Ccp0faooC-iyzGBtmC{q6y#p>@{SQKa_+9;!D;!BD9Pz;qLLLQ zsI=sqLbK_FEXi3VnNejaQF&`R6g)My{?0|yLS6m;UIywjb~(gw#Tldb+62qVn>Bv5 zwi_@8J7y=C?H1lOX8wUT?Xm{kj&cXKJno(+aWu89V03Ve$98<5bJy70j?RbPMrQhN zZ)tBFjG0l1pdU_c)cB!C2v01`j~NB{{S0VIF~ zkN^@u0`~)fP-xiqsF{&r|FCZY_y4;e%!+wH0!RP}AOR$R1dsp{Kmter2_OL^FqnX~ z|KH$P7>Wdt01`j~NB{{S0VIF~kN^@u0!RP}e3A*^`2Uk^J=7fuAOR$R1dsp{Kmter z2_OL^fCP}h;Rvwz|NZdC|M2hs9gY{qLIOwt2_OL^fCP{L5 bool: + """Check if message should be logged based on debug level""" + return self.levels.get(level, 1) >= self.levels.get(self.debug_level, 1) + + def _format_message(self, level: str, message: str, **kwargs) -> str: + """Format debug message with timestamp and agent info""" + timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3] + elapsed = f"{time.time() - self.start_time:.3f}s" + + # Color codes for different levels + colors = { + "DEBUG": "\033[36m", # Cyan + "INFO": "\033[32m", # Green + "WARNING": "\033[33m", # Yellow + "ERROR": "\033[31m", # Red + } + reset = "\033[0m" + + color = colors.get(level, "") + formatted = f"{color}🔍 {level}: {self.agent_name} [{timestamp}|{elapsed}] {message}{reset}" + + # Add any additional context + if kwargs: + context = ", ".join([f"{k}={v}" for k, v in kwargs.items()]) + formatted += f" ({context})" + + return formatted + + def debug(self, message: str, **kwargs): + """Log debug message""" + if self._should_log("DEBUG"): + print(self._format_message("DEBUG", message, **kwargs)) + + def info(self, message: str, **kwargs): + """Log info message""" + if self._should_log("INFO"): + print(self._format_message("INFO", message, **kwargs)) + + def warning(self, message: str, **kwargs): + """Log warning message""" + if self._should_log("WARNING"): + print(self._format_message("WARNING", message, **kwargs)) + + def error(self, message: str, **kwargs): + """Log error message""" + if self._should_log("ERROR"): + print(self._format_message("ERROR", message, **kwargs)) + + +def create_debug_logger(agent_name: str, debug_level: str = None) -> DebugLogger: + """Create a debug logger for an agent""" + if debug_level is None: + debug_level = os.getenv("KOSMOS_DEBUG_LEVEL", "INFO") + return DebugLogger(agent_name, debug_level) \ No newline at end of file diff --git a/kosmos/utils/file_utils.py b/kosmos/utils/file_utils.py index e69de29..6c3bfea 100644 --- a/kosmos/utils/file_utils.py +++ b/kosmos/utils/file_utils.py @@ -0,0 +1,568 @@ +""" +File system utils. +""" +import collections +import os +import pickle +import sys +import errno +import shutil +import glob + +# import pwd +import codecs +import hashlib +import tarfile +import fnmatch +import tempfile +from datetime import datetime +from socket import gethostname +import logging + + +f_ext = os.path.splitext + +f_size = os.path.getsize + +is_file = os.path.isfile + +is_dir = os.path.isdir + +get_dir = os.path.dirname + + +def host_name(): + "Get host name, alias with ``socket.gethostname()``" + return gethostname() + + +def host_id(): + """ + Returns: first part of hostname up to '.' + """ + return host_name().split(".")[0] + + +def utf_open(fname, mode): + """ + Wrapper for codecs.open + """ + return codecs.open(fname, mode=mode, encoding="utf-8") + + +def is_sequence(obj): + """ + Returns: + True if the sequence is a collections.Sequence and not a string. + """ + return isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str) + + +def pack_varargs(args): + """ + Pack *args or a single list arg as list + + def f(*args): + arg_list = pack_varargs(args) + # arg_list is now packed as a list + """ + assert isinstance(args, tuple), "please input the tuple `args` as in *args" + if len(args) == 1 and is_sequence(args[0]): + return args[0] + else: + return args + + +def f_not_empty(*fpaths): + """ + Returns: + True if and only if the file exists and file size > 0 + if fpath is a dir, if and only if dir exists and has at least 1 file + """ + fpath = f_join(*fpaths) + if not os.path.exists(fpath): + return False + + if os.path.isdir(fpath): + return len(os.listdir(fpath)) > 0 + else: + return os.path.getsize(fpath) > 0 + + +def f_expand(fpath): + return os.path.expandvars(os.path.expanduser(fpath)) + + +def f_exists(*fpaths): + return os.path.exists(f_join(*fpaths)) + + +def f_join(*fpaths): + """ + join file paths and expand special symbols like `~` for home dir + """ + fpaths = pack_varargs(fpaths) + fpath = f_expand(os.path.join(*fpaths)) + if isinstance(fpath, str): + fpath = fpath.strip() + return fpath + + +def f_listdir( + *fpaths, + filter_ext=None, + filter=None, + sort=True, + full_path=False, + nonexist_ok=True, + recursive=False, +): + """ + Args: + full_path: True to return full paths to the dir contents + filter: function that takes in file name and returns True to include + nonexist_ok: True to return [] if the dir is non-existent, False to raise + sort: sort the file names by alphabetical + recursive: True to use os.walk to recursively list files. Note that `filter` + will be applied to the relative path string to the root dir. + e.g. filter will take "a/data1.txt" and "a/b/data3.txt" as input, instead of + just the base file names "data1.txt" and "data3.txt". + if False, will simply call os.listdir() + """ + assert not (filter_ext and filter), "filter_ext and filter are mutually exclusive" + dir_path = f_join(*fpaths) + if not os.path.exists(dir_path) and nonexist_ok: + return [] + if recursive: + files = [ + os.path.join(os.path.relpath(root, dir_path), file) + for root, _, files in os.walk(dir_path) + for file in files + ] + else: + files = os.listdir(dir_path) + if filter is not None: + files = [f for f in files if filter(f)] + elif filter_ext is not None: + files = [f for f in files if f.endswith(filter_ext)] + if sort: + files.sort() + if full_path: + return [os.path.join(dir_path, f) for f in files] + else: + return files + + +def f_mkdir(*fpaths): + """ + Recursively creates all the subdirs + If exist, do nothing. + """ + fpath = f_join(*fpaths) + os.makedirs(fpath, exist_ok=True) + return fpath + + +def f_mkdir_in_path(*fpaths): + """ + fpath is a file, + recursively creates all the parent dirs that lead to the file + If exist, do nothing. + """ + os.makedirs(get_dir(f_join(*fpaths)), exist_ok=True) + + +def last_part_in_path(fpath): + """ + https://stackoverflow.com/questions/3925096/how-to-get-only-the-last-part-of-a-path-in-python + """ + return os.path.basename(os.path.normpath(f_expand(fpath))) + + +def is_abs_path(*fpath): + return os.path.isabs(f_join(*fpath)) + + +def is_relative_path(*fpath): + return not is_abs_path(f_join(*fpath)) + + +def f_time(*fpath): + "File modification time" + return str(os.path.getctime(f_join(*fpath))) + + +def f_append_before_ext(fpath, suffix): + """ + Append a suffix to file name and retain its extension + """ + name, ext = f_ext(fpath) + return name + suffix + ext + + +def f_add_ext(fpath, ext): + """ + Append an extension if not already there + Args: + ext: will add a preceding `.` if doesn't exist + """ + if not ext.startswith("."): + ext = "." + ext + if fpath.endswith(ext): + return fpath + else: + return fpath + ext + + +def f_has_ext(fpath, ext): + "Test if file path is a text file" + _, actual_ext = f_ext(fpath) + return actual_ext == "." + ext.lstrip(".") + + +def f_glob(*fpath): + return glob.glob(f_join(*fpath), recursive=True) + + +def f_remove(*fpath, verbose=False, dry_run=False): + """ + If exist, remove. Supports both dir and file. Supports glob wildcard. + """ + assert isinstance(verbose, bool) + fpath = f_join(fpath) + if dry_run: + print("Dry run, delete:", fpath) + return + for f in glob.glob(fpath): + try: + shutil.rmtree(f) + except OSError as e: + if e.errno == errno.ENOTDIR: + try: + os.remove(f) + except: # final resort safeguard + pass + if verbose: + print(f'Deleted "{fpath}"') + + +def f_copy(fsrc, fdst, ignore=None, include=None, exists_ok=True, verbose=False): + """ + Supports both dir and file. Supports glob wildcard. + """ + fsrc, fdst = f_expand(fsrc), f_expand(fdst) + for f in glob.glob(fsrc): + try: + f_copytree(f, fdst, ignore=ignore, include=include, exist_ok=exists_ok) + except OSError as e: + if e.errno == errno.ENOTDIR: + shutil.copy(f, fdst) + else: + raise + if verbose: + print(f'Copied "{fsrc}" to "{fdst}"') + + +def _f_copytree( + src, + dst, + symlinks=False, + ignore=None, + exist_ok=True, + copy_function=shutil.copy2, + ignore_dangling_symlinks=False, +): + """Copied from python standard lib shutil.copytree + except that we allow exist_ok + Use f_copytree as entry + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst, exist_ok=exist_ok) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + # We can't just leave it to `copy_function` because legacy + # code with a custom `copy_function` may rely on copytree + # doing the right thing. + os.symlink(linkto, dstname) + shutil.copystat(srcname, dstname, follow_symlinks=not symlinks) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + if os.path.isdir(srcname): + _f_copytree( + srcname, dstname, symlinks, ignore, exist_ok, copy_function + ) + else: + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + _f_copytree(srcname, dstname, symlinks, ignore, exist_ok, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except shutil.Error as err: + errors.extend(err.args[0]) + except OSError as why: + errors.append((srcname, dstname, str(why))) + try: + shutil.copystat(src, dst) + except OSError as why: + # Copying file access times may fail on Windows + if getattr(why, "winerror", None) is None: + errors.append((src, dst, str(why))) + if errors: + raise shutil.Error(errors) + return dst + + +def _include_patterns(*patterns): + """Factory function that can be used with copytree() ignore parameter. + + Arguments define a sequence of glob-style patterns + that are used to specify what files to NOT ignore. + Creates and returns a function that determines this for each directory + in the file hierarchy rooted at the source directory when used with + shutil.copytree(). + """ + + def _ignore_patterns(path, names): + keep = set( + name for pattern in patterns for name in fnmatch.filter(names, pattern) + ) + ignore = set( + name + for name in names + if name not in keep and not os.path.isdir(os.path.join(path, name)) + ) + return ignore + + return _ignore_patterns + + +def f_copytree(fsrc, fdst, symlinks=False, ignore=None, include=None, exist_ok=True): + fsrc, fdst = f_expand(fsrc), f_expand(fdst) + assert (ignore is None) or ( + include is None + ), "ignore= and include= are mutually exclusive" + if ignore: + ignore = shutil.ignore_patterns(*ignore) + elif include: + ignore = _include_patterns(*include) + _f_copytree(fsrc, fdst, ignore=ignore, symlinks=symlinks, exist_ok=exist_ok) + + +def f_move(fsrc, fdst): + fsrc, fdst = f_expand(fsrc), f_expand(fdst) + for f in glob.glob(fsrc): + shutil.move(f, fdst) + + +def f_split_path(fpath, normpath=True): + """ + Splits path into a list of its component folders + + Args: + normpath: call os.path.normpath to remove redundant '/' and + up-level references like ".." + """ + if normpath: + fpath = os.path.normpath(fpath) + allparts = [] + while 1: + parts = os.path.split(fpath) + if parts[0] == fpath: # sentinel for absolute paths + allparts.insert(0, parts[0]) + break + elif parts[1] == fpath: # sentinel for relative paths + allparts.insert(0, parts[1]) + break + else: + fpath = parts[0] + allparts.insert(0, parts[1]) + return allparts + + +def get_script_dir(): + """ + Returns: the dir of current script + """ + return os.path.dirname(os.path.realpath(sys.argv[0])) + + +def get_script_file_name(): + """ + Returns: the dir of current script + """ + return os.path.basename(sys.argv[0]) + + +def get_script_self_path(): + """ + Returns: the dir of current script + """ + return os.path.realpath(sys.argv[0]) + + +def get_parent_dir(location, abspath=False): + """ + Args: + location: current directory or file + + Returns: + parent directory absolute or relative path + """ + _path = os.path.abspath if abspath else os.path.relpath + return _path(f_join(location, os.pardir)) + + +def md5_checksum(*fpath): + """ + File md5 signature + """ + hash_md5 = hashlib.md5() + with open(f_join(*fpath), "rb") as f: + for chunk in iter(lambda: f.read(65536), b""): + hash_md5.update(chunk) + return hash_md5.hexdigest() + + +def create_tar(fsrc, output_tarball, include=None, ignore=None, compress_mode="gz"): + """ + Args: + fsrc: source file or folder + output_tarball: output tar file name + compress_mode: "gz", "bz2", "xz" or "" (empty for uncompressed write) + include: include pattern, will trigger copy to temp directory + ignore: ignore pattern, will trigger copy to temp directory + """ + fsrc, output_tarball = f_expand(fsrc), f_expand(output_tarball) + assert compress_mode in ["gz", "bz2", "xz", ""] + src_base = os.path.basename(fsrc) + + tempdir = None + if include or ignore: + tempdir = tempfile.mkdtemp() + tempdest = f_join(tempdir, src_base) + f_copy(fsrc, tempdest, include=include, ignore=ignore) + fsrc = tempdest + + with tarfile.open(output_tarball, "w:" + compress_mode) as tar: + tar.add(fsrc, arcname=src_base) + + if tempdir: + f_remove(tempdir) + + +def extract_tar(source_tarball, output_dir=".", members=None): + """ + Args: + source_tarball: extract members from archive + output_dir: default to current working dir + members: must be a subset of the list returned by getmembers() + """ + source_tarball, output_dir = f_expand(source_tarball), f_expand(output_dir) + with tarfile.open(source_tarball, "r:*") as tar: + tar.extractall(output_dir, members=members) + + +def move_with_backup(*fpath, suffix=".bak"): + """ + Ensures that a path is not occupied. If there is a file, rename it by + adding @suffix. Resursively backs up everything. + + Args: + fpath: file path to clear + suffix: Add to backed up files (default: {'.bak'}) + """ + fpath = str(f_join(*fpath)) + if os.path.exists(fpath): + move_with_backup(fpath + suffix) + shutil.move(fpath, fpath + suffix) + + +def insert_before_ext(name, insert): + """ + log.txt -> log.ep50.txt + """ + name, ext = os.path.splitext(name) + return name + insert + ext + + +def timestamp_file_name(fname): + timestr = datetime.now().strftime("_%H-%M-%S_%m-%d-%y") + return insert_before_ext(fname, timestr) + + +def get_file_lock(*fpath, timeout: int = 15, logging_level="critical"): + """ + NFS-safe filesystem-backed lock. `pip install flufl.lock` + https://flufllock.readthedocs.io/en/stable/apiref.html + + Args: + fpath: should be a path on NFS so that every process can see it + timeout: seconds + """ + from flufl.lock import Lock + + logging.getLogger("flufl.lock").setLevel(logging_level.upper()) + return Lock(f_join(*fpath), lifetime=timeout) + + +def load_pickle(*fpaths): + with open(f_join(*fpaths), "rb") as fp: + return pickle.load(fp) + + +def dump_pickle(data, *fpaths): + with open(f_join(*fpaths), "wb") as fp: + pickle.dump(data, fp) + + +def load_text(*fpaths, by_lines=False): + with open(f_join(*fpaths), "r") as fp: + if by_lines: + return fp.readlines() + else: + return fp.read() + + +def load_text_lines(*fpaths): + return load_text(*fpaths, by_lines=True) + + +def dump_text(s, *fpaths): + with open(f_join(*fpaths), "w") as fp: + fp.write(s) + + +def dump_text_lines(lines: list[str], *fpaths, add_newline=True): + with open(f_join(*fpaths), "w") as fp: + for line in lines: + print(line, file=fp, end="\n" if add_newline else "") + + +# aliases to be consistent with other load_* and dump_* +pickle_load = load_pickle +pickle_dump = dump_pickle +text_load = load_text +read_text = load_text +read_text_lines = load_text_lines +write_text = dump_text +write_text_lines = dump_text_lines +text_dump = dump_text \ No newline at end of file diff --git a/kosmos/utils/json_utils.py b/kosmos/utils/json_utils.py index f48a26a..aa2a6ec 100644 --- a/kosmos/utils/json_utils.py +++ b/kosmos/utils/json_utils.py @@ -156,11 +156,27 @@ def fix_and_parse_json( raise json.JSONDecodeError("Empty or None JSON string", "", 0) # First, try to extract JSON from markdown code blocks - # Use greedy match to capture the entire JSON object, not just the first part - json_pattern = re.compile(r"```(?:json)?\s*(\{.*?\})\s*```", re.DOTALL) - match = json_pattern.search(json_str) - if match: - json_str = match.group(1) + # Handle both JSON objects {...} and arrays [...], with various language tags + json_patterns = [ + # JSON objects in code blocks (with various language tags or no tag) - use greedy matching + re.compile(r"```(?:json|python|py)?\s*(\{.*\})\s*```", re.DOTALL), + # JSON arrays in code blocks (with various language tags or no tag) - use greedy matching + re.compile(r"```(?:json|python|py)?\s*(\[.*\])\s*```", re.DOTALL), + # Try without any language tag specification - greedy matching + re.compile(r"```\s*(\{.*\})\s*```", re.DOTALL), + re.compile(r"```\s*(\[.*\])\s*```", re.DOTALL), + # Fallback: non-greedy matching for edge cases + re.compile(r"```(?:json|python|py)?\s*(\{.*?\})\s*```", re.DOTALL), + re.compile(r"```(?:json|python|py)?\s*(\[.*?\])\s*```", re.DOTALL), + ] + + for pattern in json_patterns: + match = pattern.search(json_str) + if match: + extracted = match.group(1) + print(f"DEBUG: Extracted JSON from code block: {extracted[:200]}...") + json_str = extracted + break # Clean up whitespace and normalize json_str = json_str.strip() @@ -215,52 +231,75 @@ def fix_and_parse_json( except json.JSONDecodeError: pass - # Manual extraction: find the first { and find matching } by counting braces - # This handles cases where there's text before/after the JSON object - try: - # Find first opening brace - brace_start = json_str.index("{") - # Count braces to find the matching closing brace - brace_count = 0 - brace_end = -1 - in_string = False - escape_next = False - - for i in range(brace_start, len(json_str)): - char = json_str[i] + # Manual extraction: find JSON objects {...} or arrays [...] + # This handles cases where there's text before/after the JSON + def extract_json_structure(json_str, start_char, end_char): + try: + # Find first opening character + start_pos = json_str.index(start_char) + # Count characters to find the matching closing character + char_count = 0 + end_pos = -1 + in_string = False + escape_next = False - # Handle string content (don't count braces inside strings) - if escape_next: - escape_next = False - continue - if char == '\\': - escape_next = True - continue - if char == '"': - in_string = not in_string - continue + for i in range(start_pos, len(json_str)): + char = json_str[i] + + # Handle string content (don't count brackets/braces inside strings) + if escape_next: + escape_next = False + continue + if char == '\\': + escape_next = True + continue + if char == '"': + in_string = not in_string + continue + + # Count brackets/braces only outside strings + if not in_string: + if char == start_char: + char_count += 1 + elif char == end_char: + char_count -= 1 + if char_count == 0: + end_pos = i + break - # Count braces only outside strings - if not in_string: - if char == '{': - brace_count += 1 - elif char == '}': - brace_count -= 1 - if brace_count == 0: - brace_end = i - break - - if brace_end > brace_start: - extracted_json = json_str[brace_start:brace_end + 1] - print(f"DEBUG: Extracted JSON from position {brace_start} to {brace_end}") + if end_pos > start_pos: + extracted_json = json_str[start_pos:end_pos + 1] + print(f"DEBUG: Extracted {start_char}...{end_char} from position {start_pos} to {end_pos}") + return extracted_json + except ValueError: + return None + return None + + # Try extracting JSON object first + extracted = extract_json_structure(json_str, '{', '}') + if extracted: + try: + return json.loads(extracted) + except json.JSONDecodeError: + try: + # Try fixing the extracted JSON + fixed = correct_json(extracted) + return json.loads(fixed) + except json.JSONDecodeError as e: + print(f"DEBUG: Object extraction and fix failed: {e}") + + # Try extracting JSON array + extracted = extract_json_structure(json_str, '[', ']') + if extracted: + try: + return json.loads(extracted) + except json.JSONDecodeError: try: - return json.loads(extracted_json) - except json.JSONDecodeError: # Try fixing the extracted JSON - fixed = correct_json(extracted_json) + fixed = correct_json(extracted) return json.loads(fixed) - except (ValueError, json.JSONDecodeError) as e: - print(f"DEBUG: Manual extraction failed: {e}") + except json.JSONDecodeError as e: + print(f"DEBUG: Array extraction and fix failed: {e}") # If all else fails, raise an error with useful debugging info preview = json_str[:500] if len(json_str) > 500 else json_str diff --git a/kosmos/utils/record_utils.py b/kosmos/utils/record_utils.py index e69de29..97a19fe 100644 --- a/kosmos/utils/record_utils.py +++ b/kosmos/utils/record_utils.py @@ -0,0 +1,227 @@ +import time +import re +from .file_utils import * +from .json_utils import * + +class EventRecorder: + def __init__( + self, + checkpoint_dir="checkpoint", + resume=False, + init_position=None, + ): + self.checkpoint_dir = checkpoint_dir + self.resource_history = set() + self.resource_vs_time = {} + self.resource_vs_iterations = {} + self.celestial_body_history = set() + self.vessel_situation_history = set() + self.init_position = init_position + self.orbital_history = [] # Track orbital parameters over time + self.position_hostry = [[0, 0, 0]] # 3D positions for space + self.mission_time = 0.0 # Mission elapsed time in seconds + self.universal_time_start = None # KSP universal time at mission start + self.iteration = 0 + f_mkdir(self.checkpoint_dir, "events") + if resume: + self.resume() + + def record(self, telemetry, mission): + mission = re.sub(f'[\\/:"*<>| ]', "_", mission) + mission = mission.replace(" ", "_") + time.strftime( + "_%Y%m%d_%H%M%S", time.localtime() + ) + self.iteration += 1 + + if not self.init_position and telemetry: + # Initialize position from first telemetry data + first_event = telemetry[0][1] + if "position" in first_event: + self.init_position = [ + first_event["position"]["x"], + first_event["position"]["y"], + first_event["position"]["z"], + ] + + if not self.universal_time_start and telemetry: + # Set mission start time + first_event = telemetry[0][1] + if "universal_time" in first_event: + self.universal_time_start = first_event["universal_time"] + + for event_type, event in telemetry: + self.update_resource(event) + self.update_orbital_state(event) + if event_type == "observe": + self.update_mission_time(event) + + print( + f"Recorder message: {self.mission_time:.1f} seconds mission time\n" + f"Recorder message: {self.iteration} iteration passed" + ) + # Truncate mission name to avoid filename too long error + safe_mission_name = mission.replace('\n', '_').replace(' ', '_')[:100] + dump_json(telemetry, f_join(self.checkpoint_dir, "events", safe_mission_name)) + + def resume(self, cutoff=None): + self.resource_history = set() + self.resource_vs_time = {} + self.resource_vs_iterations = {} + self.mission_time = 0.0 + self.orbital_history = [] + self.position_history = [[0, 0, 0]] + self.celestial_body_history = set() + self.vessel_situation_history = set() + + def get_timestamp(string): + timestamp = "_".join(string.split("_")[-2:]) + return time.mktime(time.strptime(timestamp, "%Y%m%d_%H%M%S")) + + records = f_listdir(self.checkpoint_dir, "events") + sorted_records = sorted(records, key=get_timestamp) + + for record in sorted_records: + self.iteration += 1 + if cutoff and self.iteration > cutoff: + break + + telemetry = load_json(f_join(self.checkpoint_dir, "events", record)) + + if not self.init_position and telemetry: + first_event = telemetry[0][1] + if "position" in first_event: + self.init_position = [ + first_event["position"]["x"], + first_event["position"]["y"], + first_event["position"]["z"], + ] + + if not self.universal_time_start and telemetry: + first_event = telemetry[0][1] + if "position" in first_event: + self.init_position = [ + first_event["position"]["x"], + first_event["position"]["y"], + first_event["position"]["z"], + ] + + if not self.universal_time_start and telemetry: + first_event = telemetry[0][1] + if "universal_time" in first_event: + self.universal_time_start = first_event["universal_time"] + + for event_type, event in telemetry: + self.update_resource(event) + self.update_position(event) + self.update_orbital_state(event) + if event_type == "observe": + self.update_mission_time(event) + + def update_resource(self, event): + if "resources" not in event: + return + + resources = event["resources"] + mission_time = event.get("mission_time", self.mission_time) + current_body = event.get("current_body", "Unknown") + vessel_situation = event.get("vessel_situation", "Unknown") + + current_resources = set() + for resource_name, amount in resources.items(): + # Handle both dict and numeric amount values + if isinstance(amount, dict): + amount_value = amount.get('amount', 0) + else: + amount_value = amount + + if amount_value > 0.1: + current_resources.add(resource_name) + + new_resources = current_resources - self.resource_history + self.resource_history.update(current_resources) + self.celestial_body_history.add(current_body) + self.vessel_situation_history.add(vessel_situation) + + if new_resources: + mission_time_key = self.mission_time + mission_time + if mission_time_key not in self.resource_vs_time: + self.resource_vs_time[mission_time_key] = [] + self.resource_vs_time[mission_time_key].extend(new_resources) + + def update_mission_time(self, event): + if "mission_time" in event: + self.mission_time += event["mission_time"] + elif "universal_time" in event and self.universal_time_start: + self.mission_time = event["universal_time"] - self.uniersal_time_start + + def update_position(self, event): + if "position" not in event or not self.init_position: + return + + position = [ + event["position"]["x"] - self.init_position[0], + event["position"]["y"] - self.init_position[1], + event["position"]["z"] - self.init_position[2], + ] + + if self.position_history[-1] != position: + self.position_history.append(position) + + def update_orbital_state(self, event): + if "orbit_parameters" not in event: + return + + orbit_data = { + "mission_time": self.mission_time, + "apoapsis": event["orbit_parameters"].get("apoapsis_altitude", 0), + "periapsis": event["orbit_parameters"].get("periapsis_altitude", 0), + "inclination": event["orbit_parameters"].get("inclination", 0), + "eccentricity": event["orbit_parameters"].get("eccentricity", 0), + "current_body": event.get("current_body", "Unknown"), + "vessel_situation": event.get("vessel_situation", "Unknown"), + } + + if (not self.orbital_history or + abs(self.orbital_history[-1]["apoapsis"] - orbit_data["apoapsis"]) > 1000 or + abs(self.orbital_history[-1]["periapsis"] - orbit_data["periapsis"]) > 1000 or + self.orbital_history[-1]["current_body"] != orbit_data["current_body"]): + self.orbital_history.append(orbit_data) + + def get_exploration_summary(self): + return { + "resources_discovered": len(self.resource_history), + "celestial_bodies_visited": len(self.celestial_body_history), + "vessel_situations_experienced": len(self.vessel_situation_history), + "total_mission_time": self.mission_time, + "orbital_maneuvers": len(self.orbital_history), + "distance_traveled": self.calculate_total_distance(), + "iterations_completed": self.iteration, + } + + def calculate_total_distance(self): + if len(self.position_history) < 2: + return 0.0 + + total_distance = 0.0 + for i in range(1, len(self.position_history)): + prev_pos = self.position_history[i-1] + curr_pos = self.position_history[i] + + distance = ( + (curr_pos[0] - prev_pos[0]) ** 2 + + (curr_pos[1] - prev_pos[1]) ** 2 + + (curr_pos[2] - prev_pos[2]) ** 2 + ) ** 0.5 + + total_distance += distance + + return total_distance + + def get_resource_timeline(self): + return self.resource_vs_time + + def get_orbital_progression(self): + return self.orbital_history + + def get_visited_bodies(self): + return list(self.vessel_situation_history) \ No newline at end of file diff --git a/krpc_readme_llm.md b/krpc_readme_llm.md index 560a8c4..6faba2e 100644 --- a/krpc_readme_llm.md +++ b/krpc_readme_llm.md @@ -212,6 +212,13 @@ docking.speed_limit = 1.0 # m/s approach speed # Maneuver Planner maneuver_planner = mj.maneuver_planner + +# IMPORTANT: Enums are accessed from mj object, not from subsystems +# Examples of common MechJeb enums: +# mj.TimeReference.apoapsis, mj.TimeReference.periapsis, mj.TimeReference.computed +# mj.SmartASSAutopilotMode.prograde, mj.SmartASSAutopilotMode.retrograde +# mj.AscentPathType.classic + # Plan Hohmann transfer hohmann = maneuver_planner.operation_hohmann hohmann.make_nodes(target_body=conn.space_center.bodies['Mun']) @@ -318,6 +325,9 @@ warp.warp_to_soi_change() print("\n3. MUN ORBIT INSERTION") # Plan circularization at Mun periapsis circularize = maneuver.operation_circularize +# Set time reference to periapsis for efficient circularization +# IMPORTANT: TimeReference is accessed from mj, NOT from maneuver_planner +circularize.time_selector.time_reference = mj.TimeReference.periapsis circularize.make_nodes() node_exec.enabled = True diff --git a/main.py b/main.py index af11f6f..b238b5b 100755 --- a/main.py +++ b/main.py @@ -1,11 +1,78 @@ -import krpc +#!/usr/bin/env python3 +""" +KOSMOS - AI Agents controlling Kerbal Space Program +Main entry point for the system. +""" -conn = krpc.connect() -print(conn.krpc.get_status().version) +import os +import sys +import argparse -vessel = conn.space_center.active_vessel -refframe = vessel.orbit.body.reference_frame +# Ensure we use the local kosmos package from this directory +# This prevents Python from importing from other locations (like Desktop) +script_dir = os.path.dirname(os.path.abspath(__file__)) +if script_dir not in sys.path: + sys.path.insert(0, script_dir) -with conn.stream(vessel.position, refframe) as position: - while True: - print(position()) \ No newline at end of file +from kosmos import Kosmos + +def main(): + parser = argparse.ArgumentParser(description='KOSMOS - Kerbal Space Program AI Agent System') + parser.add_argument('mission', nargs='?', default='', + help='Mission overview to execute (e.g., "Asteroid Redirect Mission: Orbital Rendezvous")') + + args = parser.parse_args() + + # Initialize KOSMOS with API key from environment + openai_api_key = os.getenv("OPENAI") + anthropic_api_key = os.getenv("ANTHROPIC") + + if not openai_api_key: + print("❌ Error: OpenAI API key required.") + print("Please set the OPENAI_API_KEY environment variable in your .env file") + sys.exit(1) + + if not anthropic_api_key: + print("❌ Error: Anthropic API key required.") + print("Please set the ANTHROPIC_API_KEY environment variable in your .env file") + sys.exit(1) + + # Get mission overview + mission_overview = args.mission + + if not mission_overview: + if args.interactive: + mission_overview = input("🎯 Enter mission overview: ").strip() + else: + print("❌ Error: Mission overview required.") + print("Usage: python main.py 'Mission Description'") + print(" or: python main.py --interactive") + print(" or: python main.py --help") + sys.exit(1) + + if not mission_overview: + print("❌ Error: No mission overview provided.") + sys.exit(1) + + print("🚀 KOSMOS - AI Agents for Kerbal Space Program") + print("=" * 50) + print(f"🎯 Mission: {mission_overview}") + print("=" * 60) + + kosmos = Kosmos( + openai_api_key=openai_api_key, + anthropic_api_key=anthropic_api_key, + checkpoint_dir="checkpoint", + max_iterations=160, + initial_mission=mission_overview, + resume=False, + ) + + print("✅ KOSMOS initialized successfully!") + print("🎯 Starting mission execution...") + + # Execute the mission + kosmos.learn() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 286b5b5..18d3f0c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -609,6 +609,18 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloudpickle" +version = "3.1.1" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e"}, + {file = "cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -843,6 +855,18 @@ files = [ [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] +[[package]] +name = "farama-notifications" +version = "0.0.4" +description = "Notifications for all Farama Foundation maintained libraries." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "Farama-Notifications-0.0.4.tar.gz", hash = "sha256:13fceff2d14314cf80703c8266462ebf3733c7d165336eee998fc58e545efd18"}, + {file = "Farama_Notifications-0.0.4-py3-none-any.whl", hash = "sha256:14de931035a41961f7c056361dc7f980762a143d05791ef5794a751a2caf05ae"}, +] + [[package]] name = "filelock" version = "3.19.1" @@ -1199,6 +1223,37 @@ files = [ [package.extras] protobuf = ["grpcio-tools (>=1.74.0)"] +[[package]] +name = "gymnasium" +version = "1.2.1" +description = "A standard API for reinforcement learning and a diverse set of reference environments (formerly Gym)." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "gymnasium-1.2.1-py3-none-any.whl", hash = "sha256:85cd1c16351db0b89f73be54e952ddfece97b56d1e5400d2dcd59f58b7707963"}, + {file = "gymnasium-1.2.1.tar.gz", hash = "sha256:4e6480273528523a90b3db99befb6111b13f15fa0866de88c4b675770495b66c"}, +] + +[package.dependencies] +cloudpickle = ">=1.2.0" +farama-notifications = ">=0.0.1" +numpy = ">=1.21.0" +typing-extensions = ">=4.3.0" + +[package.extras] +all = ["ale_py (>=0.9)", "array-api-compat (>=1.11.0)", "array-api-compat (>=1.11.0)", "array-api-compat (>=1.11.0)", "box2d-py (==2.3.5)", "flax (>=0.5.0)", "imageio (>=2.14.1)", "jax (>=0.4.16)", "jaxlib (>=0.4.16)", "matplotlib (>=3.0)", "moviepy (>=1.0.0)", "mujoco (>=2.1.5)", "numpy (>=2.1)", "numpy (>=2.1)", "numpy (>=2.1)", "opencv-python (>=3.0)", "packaging (>=23.0)", "pygame (>=2.1.3)", "pygame (>=2.1.3)", "pygame (>=2.1.3)", "swig (==4.*)", "torch (>=1.13.0)"] +array-api = ["array-api-compat (>=1.11.0)", "numpy (>=2.1)", "packaging (>=23.0)"] +atari = ["ale_py (>=0.9)"] +box2d = ["box2d-py (==2.3.5)", "pygame (>=2.1.3)", "swig (==4.*)"] +classic-control = ["pygame (>=2.1.3)", "pygame (>=2.1.3)"] +jax = ["array-api-compat (>=1.11.0)", "flax (>=0.5.0)", "jax (>=0.4.16)", "jaxlib (>=0.4.16)", "numpy (>=2.1)"] +mujoco = ["imageio (>=2.14.1)", "mujoco (>=2.1.5)", "packaging (>=23.0)"] +other = ["matplotlib (>=3.0)", "moviepy (>=1.0.0)", "opencv-python (>=3.0)", "seaborn (>=0.13)"] +testing = ["array_api_extra (>=0.7.0)", "dill (>=0.3.7)", "pytest (>=7.1.3)", "scipy (>=1.7.3)"] +torch = ["array-api-compat (>=1.11.0)", "numpy (>=2.1)", "torch (>=1.13.0)"] +toy-text = ["pygame (>=2.1.3)", "pygame (>=2.1.3)"] + [[package]] name = "h11" version = "0.16.0" @@ -1876,6 +1931,26 @@ perplexity = ["langchain-perplexity"] together = ["langchain-together"] xai = ["langchain-xai"] +[[package]] +name = "langchain-chroma" +version = "0.2.6" +description = "An integration package connecting Chroma and LangChain." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "langchain_chroma-0.2.6-py3-none-any.whl", hash = "sha256:d7e10101b0942cd990eedb798c3d85ed3e8415a992c8a388843196f6ab97b41b"}, + {file = "langchain_chroma-0.2.6.tar.gz", hash = "sha256:ec5ca0f6f7692ac053741e076ea086c4be0cfcb5846c8693b1bcc3089c88b65e"}, +] + +[package.dependencies] +chromadb = ">=1.0.20" +langchain-core = ">=0.3.76" +numpy = [ + {version = ">=1.26.0", markers = "python_version < \"3.13\""}, + {version = ">=2.1.0", markers = "python_version >= \"3.13\""}, +] + [[package]] name = "langchain-core" version = "0.3.76" @@ -5247,4 +5322,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "86b7c29786bd85bbac5181ef27e1f407a2c4cf66aa26bed02f6cf76b93dbbfd9" +content-hash = "1bcf1a9c1e8d7053524b53b32ee776eb023e339151b80215cebea0f7421ff036" diff --git a/pyproject.toml b/pyproject.toml index 6aa8a5a..da20978 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,9 @@ dependencies = [ "openai (>=1.104.2,<2.0.0)", "anthropic (>=0.64.0,<0.65.0)", "langchain (>=0.3.27,<0.4.0)", - "langchain-openai (>=0.3.33,<0.4.0)" + "langchain-openai (>=0.3.33,<0.4.0)", + "gymnasium (>=1.2.1,<2.0.0)", + "langchain-chroma (>=0.2.6,<0.3.0)" ]