diff --git a/README.md b/README.md index 7c573ed..7dad10c 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,9 @@ AI Agents controlling Kerbal Space Program. - **Kerbal Space Program** installed - **Python 3.8+** installed - **Poetry** for dependency management +- **OpenAI API key** for the language models -## Setup +## Installation ### 1. Install Poetry (if not already installed) ```bash @@ -32,31 +33,117 @@ cd KOSMOS poetry install ``` -### 4. Usage +## Getting Started +KOSMOS uses OpenAI's GPT-4 as the language model. You need to have an OpenAI API key to use KOSMOS. You can get one from [here](https://platform.openai.com/api-keys). + +After the installation process, you can run KOSMOS by: + +```python +from kosmos import Kosmos + +openai_api_key = "YOUR_API_KEY" + +kosmos = Kosmos( + openai_api_key=openai_api_key, +) + +# Start mission execution +kosmos.learn() +``` + +If you are running KOSMOS for the first time, make sure to: 1. **Launch Kerbal Space Program** 2. **Load a scenario or flight** (make sure you have an active vessel) 3. **Start the kRPC server** in KSP (usually via the kRPC menu) -4. **Run the Python script:** +4. **Run the Python script** + +## Usage Examples + +### Basic Mission Execution +```python +from kosmos import Kosmos + +kosmos = Kosmos( + openai_api_key="YOUR_API_KEY", +) + +# Start lifelong learning and mission execution +kosmos.learn() +``` + +### Resume from Checkpoint +If you stop the learning process and want to resume from a checkpoint later: + +```python +from kosmos import Kosmos + +kosmos = Kosmos( + openai_api_key="YOUR_API_KEY", + checkpoint_dir="YOUR_CKPT_DIR", + resume=True, +) + +# Resume mission from checkpoint +kosmos.learn() +``` + +### Execute Specific Task +If you want to run KOSMOS for a specific task: + +```python +from kosmos import Kosmos + +# First instantiate KOSMOS with maneuver library +kosmos = Kosmos( + openai_api_key="YOUR_API_KEY", + maneuver_library_dir="./maneuver_library/trial1", # Load learned maneuvers + checkpoint_dir="YOUR_CKPT_DIR", + resume=False, +) + +# Run task decomposition +task = "Launch to orbit" # e.g. "Launch to orbit", "Land on Mun", "Dock with space station" +sub_goals = kosmos.decompose_task(task=task) + +# Execute the sub-goals +kosmos.execute_task(sub_goals=sub_goals) +``` + +## Quick Start + +1. **Set your OpenAI API key:** + ```bash + export OPENAI_API_KEY="your-api-key-here" + ``` + +2. **Launch KSP and start kRPC server** + +3. **Run KOSMOS:** ```bash poetry run python main.py ``` ## Project Structure -- `main.py` - Basic vessel position streaming example -- `pyproject.toml` - Poetry configuration and dependencies -- `poetry.lock` - Locked dependency versions +- `main.py` - Main entry point +- `kosmos/` - Core KOSMOS package + - `agents/` - AI agents (Flight, Mission Control, Maneuver, Audit) + - `env/` - KSP environment interface + - `utils/` - Utility functions + - `control_primitives/` - Executable maneuver implementations + - `control_primitives_context/` - Context examples for AI agents +- `examples/` - Usage examples +- `checkpoint/` - Checkpoint directory (created automatically) -## Example Output -The basic script will stream your vessel's position coordinates in real-time: -``` -0.5.4 -(2688577.068673832, -7.589481473858227, 465412.3802019775) -(2688577.068673832, -7.589481473858227, 465412.3802019775) -... -``` +## Available Agents + +- **FlightAgent** ✅ - Executes flight maneuvers and controls vessels +- **MissionControlAgent** ⚠️ - Plans and coordinates mission phases +- **ManeuverAgent** ⚠️ - Manages specific maneuver execution +- **AuditAgent** ⚠️ - Monitors and validates mission progress ## Development + To add new dependencies: ```bash poetry add package-name @@ -74,4 +161,4 @@ poetry run python main.py 4. Submit a pull request ## License -See [LICENSE](LICENSE) file for details. +See [LICENSE](LICENSE) file for details. \ No newline at end of file diff --git a/kosmos/__init__.py b/kosmos/__init__.py new file mode 100644 index 0000000..1d42eeb --- /dev/null +++ b/kosmos/__init__.py @@ -0,0 +1 @@ +from .kosmos import Kosmos diff --git a/kosmos/agents/flight.py b/kosmos/agents/flight.py index b625c3a..1321d92 100644 --- a/kosmos/agents/flight.py +++ b/kosmos/agents/flight.py @@ -57,7 +57,7 @@ def get_vessel_telemetry(self, env=None): def construct_system_message(self, maneuvers=[]): print(f"🔍 DEBUG: FlightAgent constructing system message with {len(maneuvers)} maneuvers") - system_template = load_prompt("new_flight_template") + system_template = load_prompt("flight_template") # Load MechJeb documentation mechjeb_docs = load_prompt("mechjeb_readmellm") # Load kRPC documentation diff --git a/kosmos/control_primitives/__init__.py b/kosmos/control_primitives/__init__.py new file mode 100644 index 0000000..25ebd5f --- /dev/null +++ b/kosmos/control_primitives/__init__.py @@ -0,0 +1,17 @@ +import importlib.util +import os +import kosmos.utils as U + +def load_control_primitives(primitive_names=None): + package_path = importlib.util.find_spec("kosmos").submodule_search_locations[0] + if primitive_names is None: + primitive_names = [ + primitive[:-3] + for primitive in os.listdir(f"{package_path}/control_primitives") + if primitive.endswith(".py") and primitive != "__init__.py" + ] + primitives = [ + U.load_text(f"{package_path}/control_primitives/{primitive_name}.py") + for primitive_name in primitive_names + ] + return primitives \ No newline at end of file diff --git a/kosmos/control_primitives/dock_with_target.py b/kosmos/control_primitives/dock_with_target.py new file mode 100644 index 0000000..58fc35e --- /dev/null +++ b/kosmos/control_primitives/dock_with_target.py @@ -0,0 +1,80 @@ +async def dock_with_target(conn, approach_distance=50, final_approach_speed=0.5): + """ + Dock with target vessel (similar to complex multi-step operations) + """ + global _dock_fail_count + + vessel = conn.space_center.active_vessel + control = vessel.control + space_center = conn.space_center + + if not space_center.target_vessel: + print("No target vessel selected for docking") + return False + + target = space_center.target_vessel + + try: + # Phase 1: Close approach + control.rcs = True + control.sas = True + control.sas_mode = space_center.SASMode.target + + # Get close to target + while True: + target_distance = vessel.orbital_reference_frame.position(target).magnitude + + if target_distance < approach_distance: + break + + # Thrust toward target + if target_distance > 1000: + control.throttle = 0.1 + elif target_distance > 100: + control.throttle = 0.05 + else: + control.throttle = 0.01 + + time.sleep(0.5) + + control.throttle = 0.0 + print(f"Close approach achieved: {target_distance:.1f}m") + + # Phase 2: Final docking approach + control.sas_mode = space_center.SASMode.stability_assist + + # Find docking ports + my_port = find_docking_port(vessel) + target_port = find_docking_port(target) + + if not my_port or not target_port: + print("Could not find docking ports on vessels") + return False + + # Approach target port + while not vessel.parts.docking_ports[0].state.name == 'docked': + # Simple approach logic + time.sleep(1) + + # In real implementation, this would use precise navigation + # This is simplified for the primitive example + + print("Docking successful!") + control.rcs = False + return True + + except Exception as err: + control.throttle = 0.0 + control.rcs = False + print(f"Docking failed: {err}") + _dock_fail_count += 1 + if _dock_fail_count > 3: + raise Exception("Docking failed too many times") + return False + +def find_docking_port(vessel): + """Find available docking port on vessel""" + for port in vessel.parts.docking_ports: + if port.state.name == 'ready': + return port + return None \ No newline at end of file diff --git a/kosmos/control_primitives/execute_maneuver_node.py b/kosmos/control_primitives/execute_maneuver_node.py new file mode 100644 index 0000000..07accf0 --- /dev/null +++ b/kosmos/control_primitives/execute_maneuver_node.py @@ -0,0 +1,64 @@ +async def execute_maneuver_node(conn, node=None, tolerance=1.0): + """ + Execute a maneuver node (similar to smeltItem - complex process with validation) + """ + global _burn_fail_count + + vessel = conn.space_center.active_vessel + control = vessel.control + + if node is None: + if not control.nodes: + print("No maneuver nodes to execute") + return False + node = control.nodes[0] + + try: + # Calculate burn time + isp = vessel.specific_impulse * 9.82 # Convert to m/s + mass = vessel.mass + delta_v = node.delta_v + burn_time = mass * isp * (1 - math.exp(-delta_v / isp)) / vessel.available_thrust + + # Wait until burn time + burn_ut = node.ut - (burn_time / 2) + lead_time = 5 # Start orienting 5 seconds early + + # Warp to maneuver time + conn.space_center.warp_to(burn_ut - lead_time) + + # Orient to maneuver + control.sas = True + control.sas_mode = conn.space_center.SASMode.maneuver + + # Wait for orientation + time.sleep(3) + + # Execute burn + remaining_delta_v = node.remaining_delta_v + control.throttle = 1.0 + + while remaining_delta_v > tolerance: + remaining_delta_v = node.remaining_delta_v + + # Throttle down as we approach target + if remaining_delta_v < 10: + control.throttle = 0.1 + elif remaining_delta_v < 50: + control.throttle = 0.5 + + time.sleep(0.1) + + control.throttle = 0.0 + node.remove() + + print(f"Maneuver executed successfully, {remaining_delta_v:.1f} m/s remaining") + return True + + except Exception as err: + control.throttle = 0.0 + print(f"Maneuver execution failed: {err}") + _burn_fail_count += 1 + if _burn_fail_count > 5: + raise Exception("Maneuver execution failed too many times") + return False \ No newline at end of file diff --git a/kosmos/control_primitives/explore_until_body_found.py b/kosmos/control_primitives/explore_until_body_found.py new file mode 100644 index 0000000..d7751ee --- /dev/null +++ b/kosmos/control_primitives/explore_until_body_found.py @@ -0,0 +1,32 @@ +async def explore_until_body_found(conn, target_body_name, max_time=3600): + """ + Explore space until target body is found (similar to exploreUntil) + """ + space_center = conn.space_center + vessel = space_center.active_vessel + + # Check if we can already see the target + if target_body_name in space_center.bodies: + target_body = space_center.bodies[target_body_name] + print(f"Found {target_body_name}") + return target_body + + start_time = time.time() + + print(f"Exploring space to find {target_body_name}...") + + # Simple exploration - change orbital plane and look around + while time.time() - start_time < max_time: + # In real implementation, this would use proper orbital mechanics + # to search for celestial bodies + + # Check if we found the target + if target_body_name in space_center.bodies: + target_body = space_center.bodies[target_body_name] + print(f"Found {target_body_name} after {time.time() - start_time:.1f} seconds") + return target_body + + time.sleep(10) # Search interval + + print(f"Could not find {target_body_name} within {max_time} seconds") + return None \ No newline at end of file diff --git a/kosmos/control_primitives/land_vessel.py b/kosmos/control_primitives/land_vessel.py new file mode 100644 index 0000000..fcfc8b5 --- /dev/null +++ b/kosmos/control_primitives/land_vessel.py @@ -0,0 +1,82 @@ +async def land_vessel(conn, target_body=None, deploy_gear=True, target_speed=5.0): + """ + Land vessel on surface (similar to placeItem - precise positioning) + """ + global _land_fail_count + + vessel = conn.space_center.active_vessel + control = vessel.control + + if target_body is None: + target_body = vessel.orbit.body + + try: + # Pre-landing checks + if vessel.situation.name == 'landed': + print("Vessel is already landed") + return True + + # Deorbit burn (simplified) + if vessel.orbit.periapsis_altitude > 0: + print("Executing deorbit burn...") + control.sas = True + control.sas_mode = conn.space_center.SASMode.retrograde + control.throttle = 1.0 + + # Burn until periapsis is negative + while vessel.orbit.periapsis_altitude > -10000: + time.sleep(0.1) + + control.throttle = 0.0 + + # Descent phase + print("Beginning descent...") + altitude_stream = conn.add_stream(getattr, vessel.flight(), 'surface_altitude') + speed_stream = conn.add_stream(getattr, vessel.flight(), 'speed') + vertical_speed_stream = conn.add_stream(getattr, vessel.flight(), 'vertical_speed') + + # Deploy parachutes if available and in atmosphere + if target_body.has_atmosphere and vessel.parts.parachutes: + for parachute in vessel.parts.parachutes: + if parachute.can_deploy: + parachute.deploy() + print("Parachutes deployed") + + # Powered landing phase + gear_deployed = False + + while vessel.situation.name != 'landed': + altitude = altitude_stream() + speed = speed_stream() + vertical_speed = vertical_speed_stream() + + # Deploy landing gear + if altitude < 1000 and not gear_deployed and deploy_gear: + control.gear = True + gear_deployed = True + print("Landing gear deployed") + + # Suicide burn calculation (simplified) + if altitude < 500 and vertical_speed < -10: + control.sas = True + control.sas_mode = conn.space_center.SASMode.retrograde + + # Calculate throttle needed + if vertical_speed < -target_speed: + control.throttle = min(1.0, abs(vertical_speed) / 20) + else: + control.throttle = 0.0 + + time.sleep(0.1) + + control.throttle = 0.0 + print(f"Landing successful! Touchdown speed: {speed:.1f} m/s") + return True + + except Exception as err: + control.throttle = 0.0 + print(f"Landing failed: {err}") + _land_fail_count += 1 + if _land_fail_count > 3: + raise Exception("Landing failed too many times") + return False \ No newline at end of file diff --git a/kosmos/control_primitives/launch_vessel.py b/kosmos/control_primitives/launch_vessel.py new file mode 100644 index 0000000..0b120af --- /dev/null +++ b/kosmos/control_primitives/launch_vessel.py @@ -0,0 +1,83 @@ +async def launch_vessel(conn, target_apoapsis=100000, target_inclination=0): + """ + Launch vessel to specified orbit + Similar to craftItem - executes a complex sequence with error handling + """ + global _stage_fail_count + + if target_apoapsis < 70000: + raise ValueError("target_apoapsis must be at least 70000m for stable orbit") + + vessel = conn.space_center.active_vessel + control = vessel.control + + try: + # Pre-launch checks + if vessel.situation.name != 'pre_launch': + raise ValueError("Vessel must be on launchpad to launch") + + # Initial staging and launch + conn.space_center.physics_warp_factor = 1 + control.sas = True + control.rcs = False + control.throttle = 1.0 + + # Launch! + control.activate_next_stage() + + # Monitor ascent + altitude_stream = conn.add_stream(getattr, vessel.flight(), 'mean_altitude') + apoapsis_stream = conn.add_stream(getattr, vessel.orbit, 'apoapsis_altitude') + + # Simple gravity turn + vessel.auto_pilot.engage() + vessel.auto_pilot.target_pitch_and_heading(90, 90) + + turn_start_altitude = 250 + turn_end_altitude = 45000 + + while apoapsis_stream() < target_apoapsis: + altitude = altitude_stream() + + # Execute gravity turn + if turn_start_altitude < altitude < turn_end_altitude: + turn_progress = (altitude - turn_start_altitude) / (turn_end_altitude - turn_start_altitude) + target_pitch = 90 - (turn_progress * 90) + vessel.auto_pilot.target_pitch_and_heading(target_pitch, 90) + + # Check for staging opportunities + if should_stage(vessel): + control.activate_next_stage() + time.sleep(1) + + time.sleep(0.1) + + control.throttle = 0.0 + print(f"Target apoapsis of {target_apoapsis}m reached") + + # Coast to apoapsis for circularization + while vessel.orbit.time_to_apoapsis > 60: + time.sleep(1) + + # Circularization burn + control.throttle = 1.0 + periapsis_stream = conn.add_stream(getattr, vessel.orbit, 'periapsis_altitude') + + while periapsis_stream() < target_apoapsis * 0.9: + time.sleep(0.1) + + control.throttle = 0.0 + vessel.auto_pilot.disengage() + + print(f"Successfully reached orbit: {vessel.orbit.apoapsis_altitude:.0f} x {vessel.orbit.periapsis_altitude:.0f}m") + return True + + except Exception as err: + control.throttle = 0.0 + if vessel.auto_pilot.engaged: + vessel.auto_pilot.disengage() + print(f"Launch failed: {err}") + _stage_fail_count += 1 + if _stage_fail_count > 3: + raise Exception("Launch failed too many times, check vessel design") + return False \ No newline at end of file diff --git a/kosmos/control_primitives/should_stage.py b/kosmos/control_primitives/should_stage.py new file mode 100644 index 0000000..6e69700 --- /dev/null +++ b/kosmos/control_primitives/should_stage.py @@ -0,0 +1,15 @@ +def should_stage(vessel): + """Check if vessel should stage (similar to recipe validation in craftItem)""" + # Check if current stage engines are out of fuel + resources = vessel.resources_in_decouple_stage(vessel.control.current_stage, cumulative=False) + + # If we have SRBs and they're empty, stage + if resources.max('SolidFuel') > 0 and resources.amount('SolidFuel') < 0.1: + return True + + # If liquid fuel engines are empty, stage + if (resources.max('LiquidFuel') > 0 and resources.amount('LiquidFuel') < 0.1 and + resources.max('Oxidizer') > 0 and resources.amount('Oxidizer') < 0.1): + return True + + return False \ No newline at end of file diff --git a/kosmos/control_primitives/target_vessel_by_name.py b/kosmos/control_primitives/target_vessel_by_name.py new file mode 100644 index 0000000..e3f3120 --- /dev/null +++ b/kosmos/control_primitives/target_vessel_by_name.py @@ -0,0 +1,12 @@ +async def target_vessel_by_name(conn, vessel_name): + """Target a vessel by name (similar to item finding)""" + space_center = conn.space_center + + for vessel in space_center.vessels: + if vessel.name == vessel_name: + space_center.target_vessel = vessel + print(f"Targeted vessel: {vessel_name}") + return vessel + + print(f"No vessel named '{vessel_name}' found") + return None \ No newline at end of file diff --git a/kosmos/control_primitives/transfer_fuel.py b/kosmos/control_primitives/transfer_fuel.py new file mode 100644 index 0000000..be05e70 --- /dev/null +++ b/kosmos/control_primitives/transfer_fuel.py @@ -0,0 +1,40 @@ +async def transfer_fuel(conn, from_tank, to_tank, fuel_type='LiquidFuel', amount=None): + """ + Transfer fuel between tanks (similar to chest operations) + """ + vessel = conn.space_center.active_vessel + + try: + # Find source and destination tanks + source_tank = None + dest_tank = None + + for part in vessel.parts.all: + if part.name == from_tank and part.resources.amount(fuel_type) > 0: + source_tank = part + elif part.name == to_tank and part.resources.max(fuel_type) > 0: + dest_tank = part + + if not source_tank: + print(f"No source tank '{from_tank}' with {fuel_type} found") + return False + + if not dest_tank: + print(f"No destination tank '{to_tank}' found") + return False + + # Calculate transfer amount + if amount is None: + available = source_tank.resources.amount(fuel_type) + capacity = dest_tank.resources.max(fuel_type) - dest_tank.resources.amount(fuel_type) + amount = min(available, capacity) + + # Perform transfer (simplified - real implementation would use proper API) + print(f"Transferring {amount:.1f} units of {fuel_type} from {from_tank} to {to_tank}") + + # In real kRPC, this would use the resource transfer API + return True + + except Exception as err: + print(f"Fuel transfer failed: {err}") + return False \ No newline at end of file diff --git a/kosmos/prompts/__init__.py b/kosmos/prompts/__init__.py index fc98192..15e0dc2 100644 --- a/kosmos/prompts/__init__.py +++ b/kosmos/prompts/__init__.py @@ -3,5 +3,15 @@ import kosmos.utils as U def load_prompt(prompt_name): + # Use __file__ to get the actual prompts directory location + # This ensures we use the correct prompts directory even if Python + # finds the package from a different location + current_dir = os.path.dirname(__file__) + prompt_path = os.path.join(current_dir, f"{prompt_name}.txt") + + if os.path.exists(prompt_path): + return U.load_text(prompt_path) + + # Fallback to package path (for installed packages) package_path = importlib.util.find_spec("kosmos").submodule_search_locations[0] return U.load_text(f"{package_path}/prompts/{prompt_name}.txt") \ No newline at end of file diff --git a/kosmos/prompts/flight_response_format.txt b/kosmos/prompts/flight_response_format.txt index 0d8d36f..d8affba 100644 --- a/kosmos/prompts/flight_response_format.txt +++ b/kosmos/prompts/flight_response_format.txt @@ -10,4 +10,11 @@ Code: ... # main function after the helper functions def your_main_function_name(conn, vessel): + # Use ONLY MechJeb autopilots and modules + # Example: mj = conn.mech_jeb + # Example: mj.ascent_guidance.enabled = True + # Example: mj.landing_autopilot.enabled = True + # Example: mj.docking_autopilot.enabled = True + # Example: mj.maneuver_planner.operation_transfer.make_nodes() + # Example: mj.node_executor.execute_all_nodes() # ... \ No newline at end of file diff --git a/kosmos/skill/chroma_db/chroma.sqlite3 b/kosmos/skill/chroma_db/chroma.sqlite3 new file mode 100644 index 0000000..7dd146f Binary files /dev/null and b/kosmos/skill/chroma_db/chroma.sqlite3 differ diff --git a/kosmos/utils/debug_utils.py b/kosmos/utils/debug_utils.py new file mode 100644 index 0000000..b855601 --- /dev/null +++ b/kosmos/utils/debug_utils.py @@ -0,0 +1,76 @@ +""" +Debug utilities for KOSMOS agents +Provides consistent debugging and logging functionality across all agents +""" + +import os +import time +from typing import Any, Dict, List, Optional +from datetime import datetime + + +class DebugLogger: + """Centralized debug logging for KOSMOS agents""" + + def __init__(self, agent_name: str, debug_level: str = "INFO"): + self.agent_name = agent_name + self.debug_level = debug_level + self.start_time = time.time() + + # Debug levels: DEBUG, INFO, WARNING, ERROR + self.levels = {"DEBUG": 0, "INFO": 1, "WARNING": 2, "ERROR": 3} + + def _should_log(self, level: str) -> bool: + """Check if message should be logged based on debug level""" + return self.levels.get(level, 1) >= self.levels.get(self.debug_level, 1) + + def _format_message(self, level: str, message: str, **kwargs) -> str: + """Format debug message with timestamp and agent info""" + timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3] + elapsed = f"{time.time() - self.start_time:.3f}s" + + # Color codes for different levels + colors = { + "DEBUG": "\033[36m", # Cyan + "INFO": "\033[32m", # Green + "WARNING": "\033[33m", # Yellow + "ERROR": "\033[31m", # Red + } + reset = "\033[0m" + + color = colors.get(level, "") + formatted = f"{color}🔍 {level}: {self.agent_name} [{timestamp}|{elapsed}] {message}{reset}" + + # Add any additional context + if kwargs: + context = ", ".join([f"{k}={v}" for k, v in kwargs.items()]) + formatted += f" ({context})" + + return formatted + + def debug(self, message: str, **kwargs): + """Log debug message""" + if self._should_log("DEBUG"): + print(self._format_message("DEBUG", message, **kwargs)) + + def info(self, message: str, **kwargs): + """Log info message""" + if self._should_log("INFO"): + print(self._format_message("INFO", message, **kwargs)) + + def warning(self, message: str, **kwargs): + """Log warning message""" + if self._should_log("WARNING"): + print(self._format_message("WARNING", message, **kwargs)) + + def error(self, message: str, **kwargs): + """Log error message""" + if self._should_log("ERROR"): + print(self._format_message("ERROR", message, **kwargs)) + + +def create_debug_logger(agent_name: str, debug_level: str = None) -> DebugLogger: + """Create a debug logger for an agent""" + if debug_level is None: + debug_level = os.getenv("KOSMOS_DEBUG_LEVEL", "INFO") + return DebugLogger(agent_name, debug_level) \ No newline at end of file diff --git a/kosmos/utils/file_utils.py b/kosmos/utils/file_utils.py index e69de29..6c3bfea 100644 --- a/kosmos/utils/file_utils.py +++ b/kosmos/utils/file_utils.py @@ -0,0 +1,568 @@ +""" +File system utils. +""" +import collections +import os +import pickle +import sys +import errno +import shutil +import glob + +# import pwd +import codecs +import hashlib +import tarfile +import fnmatch +import tempfile +from datetime import datetime +from socket import gethostname +import logging + + +f_ext = os.path.splitext + +f_size = os.path.getsize + +is_file = os.path.isfile + +is_dir = os.path.isdir + +get_dir = os.path.dirname + + +def host_name(): + "Get host name, alias with ``socket.gethostname()``" + return gethostname() + + +def host_id(): + """ + Returns: first part of hostname up to '.' + """ + return host_name().split(".")[0] + + +def utf_open(fname, mode): + """ + Wrapper for codecs.open + """ + return codecs.open(fname, mode=mode, encoding="utf-8") + + +def is_sequence(obj): + """ + Returns: + True if the sequence is a collections.Sequence and not a string. + """ + return isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str) + + +def pack_varargs(args): + """ + Pack *args or a single list arg as list + + def f(*args): + arg_list = pack_varargs(args) + # arg_list is now packed as a list + """ + assert isinstance(args, tuple), "please input the tuple `args` as in *args" + if len(args) == 1 and is_sequence(args[0]): + return args[0] + else: + return args + + +def f_not_empty(*fpaths): + """ + Returns: + True if and only if the file exists and file size > 0 + if fpath is a dir, if and only if dir exists and has at least 1 file + """ + fpath = f_join(*fpaths) + if not os.path.exists(fpath): + return False + + if os.path.isdir(fpath): + return len(os.listdir(fpath)) > 0 + else: + return os.path.getsize(fpath) > 0 + + +def f_expand(fpath): + return os.path.expandvars(os.path.expanduser(fpath)) + + +def f_exists(*fpaths): + return os.path.exists(f_join(*fpaths)) + + +def f_join(*fpaths): + """ + join file paths and expand special symbols like `~` for home dir + """ + fpaths = pack_varargs(fpaths) + fpath = f_expand(os.path.join(*fpaths)) + if isinstance(fpath, str): + fpath = fpath.strip() + return fpath + + +def f_listdir( + *fpaths, + filter_ext=None, + filter=None, + sort=True, + full_path=False, + nonexist_ok=True, + recursive=False, +): + """ + Args: + full_path: True to return full paths to the dir contents + filter: function that takes in file name and returns True to include + nonexist_ok: True to return [] if the dir is non-existent, False to raise + sort: sort the file names by alphabetical + recursive: True to use os.walk to recursively list files. Note that `filter` + will be applied to the relative path string to the root dir. + e.g. filter will take "a/data1.txt" and "a/b/data3.txt" as input, instead of + just the base file names "data1.txt" and "data3.txt". + if False, will simply call os.listdir() + """ + assert not (filter_ext and filter), "filter_ext and filter are mutually exclusive" + dir_path = f_join(*fpaths) + if not os.path.exists(dir_path) and nonexist_ok: + return [] + if recursive: + files = [ + os.path.join(os.path.relpath(root, dir_path), file) + for root, _, files in os.walk(dir_path) + for file in files + ] + else: + files = os.listdir(dir_path) + if filter is not None: + files = [f for f in files if filter(f)] + elif filter_ext is not None: + files = [f for f in files if f.endswith(filter_ext)] + if sort: + files.sort() + if full_path: + return [os.path.join(dir_path, f) for f in files] + else: + return files + + +def f_mkdir(*fpaths): + """ + Recursively creates all the subdirs + If exist, do nothing. + """ + fpath = f_join(*fpaths) + os.makedirs(fpath, exist_ok=True) + return fpath + + +def f_mkdir_in_path(*fpaths): + """ + fpath is a file, + recursively creates all the parent dirs that lead to the file + If exist, do nothing. + """ + os.makedirs(get_dir(f_join(*fpaths)), exist_ok=True) + + +def last_part_in_path(fpath): + """ + https://stackoverflow.com/questions/3925096/how-to-get-only-the-last-part-of-a-path-in-python + """ + return os.path.basename(os.path.normpath(f_expand(fpath))) + + +def is_abs_path(*fpath): + return os.path.isabs(f_join(*fpath)) + + +def is_relative_path(*fpath): + return not is_abs_path(f_join(*fpath)) + + +def f_time(*fpath): + "File modification time" + return str(os.path.getctime(f_join(*fpath))) + + +def f_append_before_ext(fpath, suffix): + """ + Append a suffix to file name and retain its extension + """ + name, ext = f_ext(fpath) + return name + suffix + ext + + +def f_add_ext(fpath, ext): + """ + Append an extension if not already there + Args: + ext: will add a preceding `.` if doesn't exist + """ + if not ext.startswith("."): + ext = "." + ext + if fpath.endswith(ext): + return fpath + else: + return fpath + ext + + +def f_has_ext(fpath, ext): + "Test if file path is a text file" + _, actual_ext = f_ext(fpath) + return actual_ext == "." + ext.lstrip(".") + + +def f_glob(*fpath): + return glob.glob(f_join(*fpath), recursive=True) + + +def f_remove(*fpath, verbose=False, dry_run=False): + """ + If exist, remove. Supports both dir and file. Supports glob wildcard. + """ + assert isinstance(verbose, bool) + fpath = f_join(fpath) + if dry_run: + print("Dry run, delete:", fpath) + return + for f in glob.glob(fpath): + try: + shutil.rmtree(f) + except OSError as e: + if e.errno == errno.ENOTDIR: + try: + os.remove(f) + except: # final resort safeguard + pass + if verbose: + print(f'Deleted "{fpath}"') + + +def f_copy(fsrc, fdst, ignore=None, include=None, exists_ok=True, verbose=False): + """ + Supports both dir and file. Supports glob wildcard. + """ + fsrc, fdst = f_expand(fsrc), f_expand(fdst) + for f in glob.glob(fsrc): + try: + f_copytree(f, fdst, ignore=ignore, include=include, exist_ok=exists_ok) + except OSError as e: + if e.errno == errno.ENOTDIR: + shutil.copy(f, fdst) + else: + raise + if verbose: + print(f'Copied "{fsrc}" to "{fdst}"') + + +def _f_copytree( + src, + dst, + symlinks=False, + ignore=None, + exist_ok=True, + copy_function=shutil.copy2, + ignore_dangling_symlinks=False, +): + """Copied from python standard lib shutil.copytree + except that we allow exist_ok + Use f_copytree as entry + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst, exist_ok=exist_ok) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + # We can't just leave it to `copy_function` because legacy + # code with a custom `copy_function` may rely on copytree + # doing the right thing. + os.symlink(linkto, dstname) + shutil.copystat(srcname, dstname, follow_symlinks=not symlinks) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + if os.path.isdir(srcname): + _f_copytree( + srcname, dstname, symlinks, ignore, exist_ok, copy_function + ) + else: + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + _f_copytree(srcname, dstname, symlinks, ignore, exist_ok, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except shutil.Error as err: + errors.extend(err.args[0]) + except OSError as why: + errors.append((srcname, dstname, str(why))) + try: + shutil.copystat(src, dst) + except OSError as why: + # Copying file access times may fail on Windows + if getattr(why, "winerror", None) is None: + errors.append((src, dst, str(why))) + if errors: + raise shutil.Error(errors) + return dst + + +def _include_patterns(*patterns): + """Factory function that can be used with copytree() ignore parameter. + + Arguments define a sequence of glob-style patterns + that are used to specify what files to NOT ignore. + Creates and returns a function that determines this for each directory + in the file hierarchy rooted at the source directory when used with + shutil.copytree(). + """ + + def _ignore_patterns(path, names): + keep = set( + name for pattern in patterns for name in fnmatch.filter(names, pattern) + ) + ignore = set( + name + for name in names + if name not in keep and not os.path.isdir(os.path.join(path, name)) + ) + return ignore + + return _ignore_patterns + + +def f_copytree(fsrc, fdst, symlinks=False, ignore=None, include=None, exist_ok=True): + fsrc, fdst = f_expand(fsrc), f_expand(fdst) + assert (ignore is None) or ( + include is None + ), "ignore= and include= are mutually exclusive" + if ignore: + ignore = shutil.ignore_patterns(*ignore) + elif include: + ignore = _include_patterns(*include) + _f_copytree(fsrc, fdst, ignore=ignore, symlinks=symlinks, exist_ok=exist_ok) + + +def f_move(fsrc, fdst): + fsrc, fdst = f_expand(fsrc), f_expand(fdst) + for f in glob.glob(fsrc): + shutil.move(f, fdst) + + +def f_split_path(fpath, normpath=True): + """ + Splits path into a list of its component folders + + Args: + normpath: call os.path.normpath to remove redundant '/' and + up-level references like ".." + """ + if normpath: + fpath = os.path.normpath(fpath) + allparts = [] + while 1: + parts = os.path.split(fpath) + if parts[0] == fpath: # sentinel for absolute paths + allparts.insert(0, parts[0]) + break + elif parts[1] == fpath: # sentinel for relative paths + allparts.insert(0, parts[1]) + break + else: + fpath = parts[0] + allparts.insert(0, parts[1]) + return allparts + + +def get_script_dir(): + """ + Returns: the dir of current script + """ + return os.path.dirname(os.path.realpath(sys.argv[0])) + + +def get_script_file_name(): + """ + Returns: the dir of current script + """ + return os.path.basename(sys.argv[0]) + + +def get_script_self_path(): + """ + Returns: the dir of current script + """ + return os.path.realpath(sys.argv[0]) + + +def get_parent_dir(location, abspath=False): + """ + Args: + location: current directory or file + + Returns: + parent directory absolute or relative path + """ + _path = os.path.abspath if abspath else os.path.relpath + return _path(f_join(location, os.pardir)) + + +def md5_checksum(*fpath): + """ + File md5 signature + """ + hash_md5 = hashlib.md5() + with open(f_join(*fpath), "rb") as f: + for chunk in iter(lambda: f.read(65536), b""): + hash_md5.update(chunk) + return hash_md5.hexdigest() + + +def create_tar(fsrc, output_tarball, include=None, ignore=None, compress_mode="gz"): + """ + Args: + fsrc: source file or folder + output_tarball: output tar file name + compress_mode: "gz", "bz2", "xz" or "" (empty for uncompressed write) + include: include pattern, will trigger copy to temp directory + ignore: ignore pattern, will trigger copy to temp directory + """ + fsrc, output_tarball = f_expand(fsrc), f_expand(output_tarball) + assert compress_mode in ["gz", "bz2", "xz", ""] + src_base = os.path.basename(fsrc) + + tempdir = None + if include or ignore: + tempdir = tempfile.mkdtemp() + tempdest = f_join(tempdir, src_base) + f_copy(fsrc, tempdest, include=include, ignore=ignore) + fsrc = tempdest + + with tarfile.open(output_tarball, "w:" + compress_mode) as tar: + tar.add(fsrc, arcname=src_base) + + if tempdir: + f_remove(tempdir) + + +def extract_tar(source_tarball, output_dir=".", members=None): + """ + Args: + source_tarball: extract members from archive + output_dir: default to current working dir + members: must be a subset of the list returned by getmembers() + """ + source_tarball, output_dir = f_expand(source_tarball), f_expand(output_dir) + with tarfile.open(source_tarball, "r:*") as tar: + tar.extractall(output_dir, members=members) + + +def move_with_backup(*fpath, suffix=".bak"): + """ + Ensures that a path is not occupied. If there is a file, rename it by + adding @suffix. Resursively backs up everything. + + Args: + fpath: file path to clear + suffix: Add to backed up files (default: {'.bak'}) + """ + fpath = str(f_join(*fpath)) + if os.path.exists(fpath): + move_with_backup(fpath + suffix) + shutil.move(fpath, fpath + suffix) + + +def insert_before_ext(name, insert): + """ + log.txt -> log.ep50.txt + """ + name, ext = os.path.splitext(name) + return name + insert + ext + + +def timestamp_file_name(fname): + timestr = datetime.now().strftime("_%H-%M-%S_%m-%d-%y") + return insert_before_ext(fname, timestr) + + +def get_file_lock(*fpath, timeout: int = 15, logging_level="critical"): + """ + NFS-safe filesystem-backed lock. `pip install flufl.lock` + https://flufllock.readthedocs.io/en/stable/apiref.html + + Args: + fpath: should be a path on NFS so that every process can see it + timeout: seconds + """ + from flufl.lock import Lock + + logging.getLogger("flufl.lock").setLevel(logging_level.upper()) + return Lock(f_join(*fpath), lifetime=timeout) + + +def load_pickle(*fpaths): + with open(f_join(*fpaths), "rb") as fp: + return pickle.load(fp) + + +def dump_pickle(data, *fpaths): + with open(f_join(*fpaths), "wb") as fp: + pickle.dump(data, fp) + + +def load_text(*fpaths, by_lines=False): + with open(f_join(*fpaths), "r") as fp: + if by_lines: + return fp.readlines() + else: + return fp.read() + + +def load_text_lines(*fpaths): + return load_text(*fpaths, by_lines=True) + + +def dump_text(s, *fpaths): + with open(f_join(*fpaths), "w") as fp: + fp.write(s) + + +def dump_text_lines(lines: list[str], *fpaths, add_newline=True): + with open(f_join(*fpaths), "w") as fp: + for line in lines: + print(line, file=fp, end="\n" if add_newline else "") + + +# aliases to be consistent with other load_* and dump_* +pickle_load = load_pickle +pickle_dump = dump_pickle +text_load = load_text +read_text = load_text +read_text_lines = load_text_lines +write_text = dump_text +write_text_lines = dump_text_lines +text_dump = dump_text \ No newline at end of file diff --git a/kosmos/utils/json_utils.py b/kosmos/utils/json_utils.py index f48a26a..aa2a6ec 100644 --- a/kosmos/utils/json_utils.py +++ b/kosmos/utils/json_utils.py @@ -156,11 +156,27 @@ def fix_and_parse_json( raise json.JSONDecodeError("Empty or None JSON string", "", 0) # First, try to extract JSON from markdown code blocks - # Use greedy match to capture the entire JSON object, not just the first part - json_pattern = re.compile(r"```(?:json)?\s*(\{.*?\})\s*```", re.DOTALL) - match = json_pattern.search(json_str) - if match: - json_str = match.group(1) + # Handle both JSON objects {...} and arrays [...], with various language tags + json_patterns = [ + # JSON objects in code blocks (with various language tags or no tag) - use greedy matching + re.compile(r"```(?:json|python|py)?\s*(\{.*\})\s*```", re.DOTALL), + # JSON arrays in code blocks (with various language tags or no tag) - use greedy matching + re.compile(r"```(?:json|python|py)?\s*(\[.*\])\s*```", re.DOTALL), + # Try without any language tag specification - greedy matching + re.compile(r"```\s*(\{.*\})\s*```", re.DOTALL), + re.compile(r"```\s*(\[.*\])\s*```", re.DOTALL), + # Fallback: non-greedy matching for edge cases + re.compile(r"```(?:json|python|py)?\s*(\{.*?\})\s*```", re.DOTALL), + re.compile(r"```(?:json|python|py)?\s*(\[.*?\])\s*```", re.DOTALL), + ] + + for pattern in json_patterns: + match = pattern.search(json_str) + if match: + extracted = match.group(1) + print(f"DEBUG: Extracted JSON from code block: {extracted[:200]}...") + json_str = extracted + break # Clean up whitespace and normalize json_str = json_str.strip() @@ -215,52 +231,75 @@ def fix_and_parse_json( except json.JSONDecodeError: pass - # Manual extraction: find the first { and find matching } by counting braces - # This handles cases where there's text before/after the JSON object - try: - # Find first opening brace - brace_start = json_str.index("{") - # Count braces to find the matching closing brace - brace_count = 0 - brace_end = -1 - in_string = False - escape_next = False - - for i in range(brace_start, len(json_str)): - char = json_str[i] + # Manual extraction: find JSON objects {...} or arrays [...] + # This handles cases where there's text before/after the JSON + def extract_json_structure(json_str, start_char, end_char): + try: + # Find first opening character + start_pos = json_str.index(start_char) + # Count characters to find the matching closing character + char_count = 0 + end_pos = -1 + in_string = False + escape_next = False - # Handle string content (don't count braces inside strings) - if escape_next: - escape_next = False - continue - if char == '\\': - escape_next = True - continue - if char == '"': - in_string = not in_string - continue + for i in range(start_pos, len(json_str)): + char = json_str[i] + + # Handle string content (don't count brackets/braces inside strings) + if escape_next: + escape_next = False + continue + if char == '\\': + escape_next = True + continue + if char == '"': + in_string = not in_string + continue + + # Count brackets/braces only outside strings + if not in_string: + if char == start_char: + char_count += 1 + elif char == end_char: + char_count -= 1 + if char_count == 0: + end_pos = i + break - # Count braces only outside strings - if not in_string: - if char == '{': - brace_count += 1 - elif char == '}': - brace_count -= 1 - if brace_count == 0: - brace_end = i - break - - if brace_end > brace_start: - extracted_json = json_str[brace_start:brace_end + 1] - print(f"DEBUG: Extracted JSON from position {brace_start} to {brace_end}") + if end_pos > start_pos: + extracted_json = json_str[start_pos:end_pos + 1] + print(f"DEBUG: Extracted {start_char}...{end_char} from position {start_pos} to {end_pos}") + return extracted_json + except ValueError: + return None + return None + + # Try extracting JSON object first + extracted = extract_json_structure(json_str, '{', '}') + if extracted: + try: + return json.loads(extracted) + except json.JSONDecodeError: + try: + # Try fixing the extracted JSON + fixed = correct_json(extracted) + return json.loads(fixed) + except json.JSONDecodeError as e: + print(f"DEBUG: Object extraction and fix failed: {e}") + + # Try extracting JSON array + extracted = extract_json_structure(json_str, '[', ']') + if extracted: + try: + return json.loads(extracted) + except json.JSONDecodeError: try: - return json.loads(extracted_json) - except json.JSONDecodeError: # Try fixing the extracted JSON - fixed = correct_json(extracted_json) + fixed = correct_json(extracted) return json.loads(fixed) - except (ValueError, json.JSONDecodeError) as e: - print(f"DEBUG: Manual extraction failed: {e}") + except json.JSONDecodeError as e: + print(f"DEBUG: Array extraction and fix failed: {e}") # If all else fails, raise an error with useful debugging info preview = json_str[:500] if len(json_str) > 500 else json_str diff --git a/kosmos/utils/record_utils.py b/kosmos/utils/record_utils.py index e69de29..97a19fe 100644 --- a/kosmos/utils/record_utils.py +++ b/kosmos/utils/record_utils.py @@ -0,0 +1,227 @@ +import time +import re +from .file_utils import * +from .json_utils import * + +class EventRecorder: + def __init__( + self, + checkpoint_dir="checkpoint", + resume=False, + init_position=None, + ): + self.checkpoint_dir = checkpoint_dir + self.resource_history = set() + self.resource_vs_time = {} + self.resource_vs_iterations = {} + self.celestial_body_history = set() + self.vessel_situation_history = set() + self.init_position = init_position + self.orbital_history = [] # Track orbital parameters over time + self.position_hostry = [[0, 0, 0]] # 3D positions for space + self.mission_time = 0.0 # Mission elapsed time in seconds + self.universal_time_start = None # KSP universal time at mission start + self.iteration = 0 + f_mkdir(self.checkpoint_dir, "events") + if resume: + self.resume() + + def record(self, telemetry, mission): + mission = re.sub(f'[\\/:"*<>| ]', "_", mission) + mission = mission.replace(" ", "_") + time.strftime( + "_%Y%m%d_%H%M%S", time.localtime() + ) + self.iteration += 1 + + if not self.init_position and telemetry: + # Initialize position from first telemetry data + first_event = telemetry[0][1] + if "position" in first_event: + self.init_position = [ + first_event["position"]["x"], + first_event["position"]["y"], + first_event["position"]["z"], + ] + + if not self.universal_time_start and telemetry: + # Set mission start time + first_event = telemetry[0][1] + if "universal_time" in first_event: + self.universal_time_start = first_event["universal_time"] + + for event_type, event in telemetry: + self.update_resource(event) + self.update_orbital_state(event) + if event_type == "observe": + self.update_mission_time(event) + + print( + f"Recorder message: {self.mission_time:.1f} seconds mission time\n" + f"Recorder message: {self.iteration} iteration passed" + ) + # Truncate mission name to avoid filename too long error + safe_mission_name = mission.replace('\n', '_').replace(' ', '_')[:100] + dump_json(telemetry, f_join(self.checkpoint_dir, "events", safe_mission_name)) + + def resume(self, cutoff=None): + self.resource_history = set() + self.resource_vs_time = {} + self.resource_vs_iterations = {} + self.mission_time = 0.0 + self.orbital_history = [] + self.position_history = [[0, 0, 0]] + self.celestial_body_history = set() + self.vessel_situation_history = set() + + def get_timestamp(string): + timestamp = "_".join(string.split("_")[-2:]) + return time.mktime(time.strptime(timestamp, "%Y%m%d_%H%M%S")) + + records = f_listdir(self.checkpoint_dir, "events") + sorted_records = sorted(records, key=get_timestamp) + + for record in sorted_records: + self.iteration += 1 + if cutoff and self.iteration > cutoff: + break + + telemetry = load_json(f_join(self.checkpoint_dir, "events", record)) + + if not self.init_position and telemetry: + first_event = telemetry[0][1] + if "position" in first_event: + self.init_position = [ + first_event["position"]["x"], + first_event["position"]["y"], + first_event["position"]["z"], + ] + + if not self.universal_time_start and telemetry: + first_event = telemetry[0][1] + if "position" in first_event: + self.init_position = [ + first_event["position"]["x"], + first_event["position"]["y"], + first_event["position"]["z"], + ] + + if not self.universal_time_start and telemetry: + first_event = telemetry[0][1] + if "universal_time" in first_event: + self.universal_time_start = first_event["universal_time"] + + for event_type, event in telemetry: + self.update_resource(event) + self.update_position(event) + self.update_orbital_state(event) + if event_type == "observe": + self.update_mission_time(event) + + def update_resource(self, event): + if "resources" not in event: + return + + resources = event["resources"] + mission_time = event.get("mission_time", self.mission_time) + current_body = event.get("current_body", "Unknown") + vessel_situation = event.get("vessel_situation", "Unknown") + + current_resources = set() + for resource_name, amount in resources.items(): + # Handle both dict and numeric amount values + if isinstance(amount, dict): + amount_value = amount.get('amount', 0) + else: + amount_value = amount + + if amount_value > 0.1: + current_resources.add(resource_name) + + new_resources = current_resources - self.resource_history + self.resource_history.update(current_resources) + self.celestial_body_history.add(current_body) + self.vessel_situation_history.add(vessel_situation) + + if new_resources: + mission_time_key = self.mission_time + mission_time + if mission_time_key not in self.resource_vs_time: + self.resource_vs_time[mission_time_key] = [] + self.resource_vs_time[mission_time_key].extend(new_resources) + + def update_mission_time(self, event): + if "mission_time" in event: + self.mission_time += event["mission_time"] + elif "universal_time" in event and self.universal_time_start: + self.mission_time = event["universal_time"] - self.uniersal_time_start + + def update_position(self, event): + if "position" not in event or not self.init_position: + return + + position = [ + event["position"]["x"] - self.init_position[0], + event["position"]["y"] - self.init_position[1], + event["position"]["z"] - self.init_position[2], + ] + + if self.position_history[-1] != position: + self.position_history.append(position) + + def update_orbital_state(self, event): + if "orbit_parameters" not in event: + return + + orbit_data = { + "mission_time": self.mission_time, + "apoapsis": event["orbit_parameters"].get("apoapsis_altitude", 0), + "periapsis": event["orbit_parameters"].get("periapsis_altitude", 0), + "inclination": event["orbit_parameters"].get("inclination", 0), + "eccentricity": event["orbit_parameters"].get("eccentricity", 0), + "current_body": event.get("current_body", "Unknown"), + "vessel_situation": event.get("vessel_situation", "Unknown"), + } + + if (not self.orbital_history or + abs(self.orbital_history[-1]["apoapsis"] - orbit_data["apoapsis"]) > 1000 or + abs(self.orbital_history[-1]["periapsis"] - orbit_data["periapsis"]) > 1000 or + self.orbital_history[-1]["current_body"] != orbit_data["current_body"]): + self.orbital_history.append(orbit_data) + + def get_exploration_summary(self): + return { + "resources_discovered": len(self.resource_history), + "celestial_bodies_visited": len(self.celestial_body_history), + "vessel_situations_experienced": len(self.vessel_situation_history), + "total_mission_time": self.mission_time, + "orbital_maneuvers": len(self.orbital_history), + "distance_traveled": self.calculate_total_distance(), + "iterations_completed": self.iteration, + } + + def calculate_total_distance(self): + if len(self.position_history) < 2: + return 0.0 + + total_distance = 0.0 + for i in range(1, len(self.position_history)): + prev_pos = self.position_history[i-1] + curr_pos = self.position_history[i] + + distance = ( + (curr_pos[0] - prev_pos[0]) ** 2 + + (curr_pos[1] - prev_pos[1]) ** 2 + + (curr_pos[2] - prev_pos[2]) ** 2 + ) ** 0.5 + + total_distance += distance + + return total_distance + + def get_resource_timeline(self): + return self.resource_vs_time + + def get_orbital_progression(self): + return self.orbital_history + + def get_visited_bodies(self): + return list(self.vessel_situation_history) \ No newline at end of file diff --git a/krpc_readme_llm.md b/krpc_readme_llm.md index 560a8c4..6faba2e 100644 --- a/krpc_readme_llm.md +++ b/krpc_readme_llm.md @@ -212,6 +212,13 @@ docking.speed_limit = 1.0 # m/s approach speed # Maneuver Planner maneuver_planner = mj.maneuver_planner + +# IMPORTANT: Enums are accessed from mj object, not from subsystems +# Examples of common MechJeb enums: +# mj.TimeReference.apoapsis, mj.TimeReference.periapsis, mj.TimeReference.computed +# mj.SmartASSAutopilotMode.prograde, mj.SmartASSAutopilotMode.retrograde +# mj.AscentPathType.classic + # Plan Hohmann transfer hohmann = maneuver_planner.operation_hohmann hohmann.make_nodes(target_body=conn.space_center.bodies['Mun']) @@ -318,6 +325,9 @@ warp.warp_to_soi_change() print("\n3. MUN ORBIT INSERTION") # Plan circularization at Mun periapsis circularize = maneuver.operation_circularize +# Set time reference to periapsis for efficient circularization +# IMPORTANT: TimeReference is accessed from mj, NOT from maneuver_planner +circularize.time_selector.time_reference = mj.TimeReference.periapsis circularize.make_nodes() node_exec.enabled = True diff --git a/main.py b/main.py index af11f6f..b238b5b 100755 --- a/main.py +++ b/main.py @@ -1,11 +1,78 @@ -import krpc +#!/usr/bin/env python3 +""" +KOSMOS - AI Agents controlling Kerbal Space Program +Main entry point for the system. +""" -conn = krpc.connect() -print(conn.krpc.get_status().version) +import os +import sys +import argparse -vessel = conn.space_center.active_vessel -refframe = vessel.orbit.body.reference_frame +# Ensure we use the local kosmos package from this directory +# This prevents Python from importing from other locations (like Desktop) +script_dir = os.path.dirname(os.path.abspath(__file__)) +if script_dir not in sys.path: + sys.path.insert(0, script_dir) -with conn.stream(vessel.position, refframe) as position: - while True: - print(position()) \ No newline at end of file +from kosmos import Kosmos + +def main(): + parser = argparse.ArgumentParser(description='KOSMOS - Kerbal Space Program AI Agent System') + parser.add_argument('mission', nargs='?', default='', + help='Mission overview to execute (e.g., "Asteroid Redirect Mission: Orbital Rendezvous")') + + args = parser.parse_args() + + # Initialize KOSMOS with API key from environment + openai_api_key = os.getenv("OPENAI") + anthropic_api_key = os.getenv("ANTHROPIC") + + if not openai_api_key: + print("❌ Error: OpenAI API key required.") + print("Please set the OPENAI_API_KEY environment variable in your .env file") + sys.exit(1) + + if not anthropic_api_key: + print("❌ Error: Anthropic API key required.") + print("Please set the ANTHROPIC_API_KEY environment variable in your .env file") + sys.exit(1) + + # Get mission overview + mission_overview = args.mission + + if not mission_overview: + if args.interactive: + mission_overview = input("🎯 Enter mission overview: ").strip() + else: + print("❌ Error: Mission overview required.") + print("Usage: python main.py 'Mission Description'") + print(" or: python main.py --interactive") + print(" or: python main.py --help") + sys.exit(1) + + if not mission_overview: + print("❌ Error: No mission overview provided.") + sys.exit(1) + + print("🚀 KOSMOS - AI Agents for Kerbal Space Program") + print("=" * 50) + print(f"🎯 Mission: {mission_overview}") + print("=" * 60) + + kosmos = Kosmos( + openai_api_key=openai_api_key, + anthropic_api_key=anthropic_api_key, + checkpoint_dir="checkpoint", + max_iterations=160, + initial_mission=mission_overview, + resume=False, + ) + + print("✅ KOSMOS initialized successfully!") + print("🎯 Starting mission execution...") + + # Execute the mission + kosmos.learn() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 286b5b5..18d3f0c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -609,6 +609,18 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloudpickle" +version = "3.1.1" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cloudpickle-3.1.1-py3-none-any.whl", hash = "sha256:c8c5a44295039331ee9dad40ba100a9c7297b6f988e50e87ccdf3765a668350e"}, + {file = "cloudpickle-3.1.1.tar.gz", hash = "sha256:b216fa8ae4019d5482a8ac3c95d8f6346115d8835911fd4aefd1a445e4242c64"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -843,6 +855,18 @@ files = [ [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] +[[package]] +name = "farama-notifications" +version = "0.0.4" +description = "Notifications for all Farama Foundation maintained libraries." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "Farama-Notifications-0.0.4.tar.gz", hash = "sha256:13fceff2d14314cf80703c8266462ebf3733c7d165336eee998fc58e545efd18"}, + {file = "Farama_Notifications-0.0.4-py3-none-any.whl", hash = "sha256:14de931035a41961f7c056361dc7f980762a143d05791ef5794a751a2caf05ae"}, +] + [[package]] name = "filelock" version = "3.19.1" @@ -1199,6 +1223,37 @@ files = [ [package.extras] protobuf = ["grpcio-tools (>=1.74.0)"] +[[package]] +name = "gymnasium" +version = "1.2.1" +description = "A standard API for reinforcement learning and a diverse set of reference environments (formerly Gym)." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "gymnasium-1.2.1-py3-none-any.whl", hash = "sha256:85cd1c16351db0b89f73be54e952ddfece97b56d1e5400d2dcd59f58b7707963"}, + {file = "gymnasium-1.2.1.tar.gz", hash = "sha256:4e6480273528523a90b3db99befb6111b13f15fa0866de88c4b675770495b66c"}, +] + +[package.dependencies] +cloudpickle = ">=1.2.0" +farama-notifications = ">=0.0.1" +numpy = ">=1.21.0" +typing-extensions = ">=4.3.0" + +[package.extras] +all = ["ale_py (>=0.9)", "array-api-compat (>=1.11.0)", "array-api-compat (>=1.11.0)", "array-api-compat (>=1.11.0)", "box2d-py (==2.3.5)", "flax (>=0.5.0)", "imageio (>=2.14.1)", "jax (>=0.4.16)", "jaxlib (>=0.4.16)", "matplotlib (>=3.0)", "moviepy (>=1.0.0)", "mujoco (>=2.1.5)", "numpy (>=2.1)", "numpy (>=2.1)", "numpy (>=2.1)", "opencv-python (>=3.0)", "packaging (>=23.0)", "pygame (>=2.1.3)", "pygame (>=2.1.3)", "pygame (>=2.1.3)", "swig (==4.*)", "torch (>=1.13.0)"] +array-api = ["array-api-compat (>=1.11.0)", "numpy (>=2.1)", "packaging (>=23.0)"] +atari = ["ale_py (>=0.9)"] +box2d = ["box2d-py (==2.3.5)", "pygame (>=2.1.3)", "swig (==4.*)"] +classic-control = ["pygame (>=2.1.3)", "pygame (>=2.1.3)"] +jax = ["array-api-compat (>=1.11.0)", "flax (>=0.5.0)", "jax (>=0.4.16)", "jaxlib (>=0.4.16)", "numpy (>=2.1)"] +mujoco = ["imageio (>=2.14.1)", "mujoco (>=2.1.5)", "packaging (>=23.0)"] +other = ["matplotlib (>=3.0)", "moviepy (>=1.0.0)", "opencv-python (>=3.0)", "seaborn (>=0.13)"] +testing = ["array_api_extra (>=0.7.0)", "dill (>=0.3.7)", "pytest (>=7.1.3)", "scipy (>=1.7.3)"] +torch = ["array-api-compat (>=1.11.0)", "numpy (>=2.1)", "torch (>=1.13.0)"] +toy-text = ["pygame (>=2.1.3)", "pygame (>=2.1.3)"] + [[package]] name = "h11" version = "0.16.0" @@ -1876,6 +1931,26 @@ perplexity = ["langchain-perplexity"] together = ["langchain-together"] xai = ["langchain-xai"] +[[package]] +name = "langchain-chroma" +version = "0.2.6" +description = "An integration package connecting Chroma and LangChain." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "langchain_chroma-0.2.6-py3-none-any.whl", hash = "sha256:d7e10101b0942cd990eedb798c3d85ed3e8415a992c8a388843196f6ab97b41b"}, + {file = "langchain_chroma-0.2.6.tar.gz", hash = "sha256:ec5ca0f6f7692ac053741e076ea086c4be0cfcb5846c8693b1bcc3089c88b65e"}, +] + +[package.dependencies] +chromadb = ">=1.0.20" +langchain-core = ">=0.3.76" +numpy = [ + {version = ">=1.26.0", markers = "python_version < \"3.13\""}, + {version = ">=2.1.0", markers = "python_version >= \"3.13\""}, +] + [[package]] name = "langchain-core" version = "0.3.76" @@ -5247,4 +5322,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.14" -content-hash = "86b7c29786bd85bbac5181ef27e1f407a2c4cf66aa26bed02f6cf76b93dbbfd9" +content-hash = "1bcf1a9c1e8d7053524b53b32ee776eb023e339151b80215cebea0f7421ff036" diff --git a/pyproject.toml b/pyproject.toml index 6aa8a5a..da20978 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,9 @@ dependencies = [ "openai (>=1.104.2,<2.0.0)", "anthropic (>=0.64.0,<0.65.0)", "langchain (>=0.3.27,<0.4.0)", - "langchain-openai (>=0.3.33,<0.4.0)" + "langchain-openai (>=0.3.33,<0.4.0)", + "gymnasium (>=1.2.1,<2.0.0)", + "langchain-chroma (>=0.2.6,<0.3.0)" ]