|
"""
|
|
This is the main file for the runtime client.
|
|
It is responsible for executing actions received from OpenHands backend and producing observations.
|
|
|
|
NOTE: this will be executed inside the docker sandbox.
|
|
"""
|
|
|
|
import argparse
|
|
import asyncio
|
|
import base64
|
|
import io
|
|
import json
|
|
import mimetypes
|
|
import os
|
|
import re
|
|
import shutil
|
|
import tempfile
|
|
import time
|
|
import traceback
|
|
from contextlib import asynccontextmanager
|
|
from pathlib import Path
|
|
from zipfile import ZipFile
|
|
|
|
from fastapi import Depends, FastAPI, HTTPException, Request, UploadFile
|
|
from fastapi.exceptions import RequestValidationError
|
|
from fastapi.responses import JSONResponse, StreamingResponse
|
|
from fastapi.security import APIKeyHeader
|
|
from openhands_aci.utils.diff import get_diff
|
|
from pydantic import BaseModel
|
|
from starlette.exceptions import HTTPException as StarletteHTTPException
|
|
from uvicorn import run
|
|
|
|
from openhands.core.logger import openhands_logger as logger
|
|
from openhands.events.action import (
|
|
Action,
|
|
BrowseInteractiveAction,
|
|
BrowseURLAction,
|
|
CmdRunAction,
|
|
FileReadAction,
|
|
FileWriteAction,
|
|
IPythonRunCellAction,
|
|
)
|
|
from openhands.events.event import FileEditSource, FileReadSource
|
|
from openhands.events.observation import (
|
|
CmdOutputObservation,
|
|
ErrorObservation,
|
|
FileEditObservation,
|
|
FileReadObservation,
|
|
FileWriteObservation,
|
|
IPythonRunCellObservation,
|
|
Observation,
|
|
)
|
|
from openhands.events.serialization import event_from_dict, event_to_dict
|
|
from openhands.runtime.browser import browse
|
|
from openhands.runtime.browser.browser_env import BrowserEnv
|
|
from openhands.runtime.plugins import ALL_PLUGINS, JupyterPlugin, Plugin, VSCodePlugin
|
|
from openhands.runtime.utils.bash import BashSession
|
|
from openhands.runtime.utils.files import insert_lines, read_lines
|
|
from openhands.runtime.utils.runtime_init import init_user_and_working_directory
|
|
from openhands.runtime.utils.system_stats import get_system_stats
|
|
from openhands.utils.async_utils import call_sync_from_async, wait_all
|
|
|
|
|
|
class ActionRequest(BaseModel):
|
|
action: dict
|
|
|
|
|
|
ROOT_GID = 0
|
|
INIT_COMMANDS = [
|
|
'git config --global user.name "openhands" && git config --global user.email "[email protected]" && alias git="git --no-pager"',
|
|
]
|
|
|
|
SESSION_API_KEY = os.environ.get('SESSION_API_KEY')
|
|
api_key_header = APIKeyHeader(name='X-Session-API-Key', auto_error=False)
|
|
|
|
|
|
def verify_api_key(api_key: str = Depends(api_key_header)):
|
|
if SESSION_API_KEY and api_key != SESSION_API_KEY:
|
|
raise HTTPException(status_code=403, detail='Invalid API Key')
|
|
return api_key
|
|
|
|
|
|
class ActionExecutor:
|
|
"""ActionExecutor is running inside docker sandbox.
|
|
It is responsible for executing actions received from OpenHands backend and producing observations.
|
|
"""
|
|
|
|
def __init__(
|
|
self,
|
|
plugins_to_load: list[Plugin],
|
|
work_dir: str,
|
|
username: str,
|
|
user_id: int,
|
|
browsergym_eval_env: str | None,
|
|
) -> None:
|
|
self.plugins_to_load = plugins_to_load
|
|
self._initial_cwd = work_dir
|
|
self.username = username
|
|
self.user_id = user_id
|
|
_updated_user_id = init_user_and_working_directory(
|
|
username=username, user_id=self.user_id, initial_cwd=work_dir
|
|
)
|
|
if _updated_user_id is not None:
|
|
self.user_id = _updated_user_id
|
|
|
|
self.bash_session: BashSession | None = None
|
|
self.lock = asyncio.Lock()
|
|
self.plugins: dict[str, Plugin] = {}
|
|
self.browser = BrowserEnv(browsergym_eval_env)
|
|
self.start_time = time.time()
|
|
self.last_execution_time = self.start_time
|
|
self._initialized = False
|
|
|
|
@property
|
|
def initial_cwd(self):
|
|
return self._initial_cwd
|
|
|
|
async def ainit(self):
|
|
|
|
self.bash_session = BashSession(
|
|
work_dir=self._initial_cwd,
|
|
username=self.username,
|
|
no_change_timeout_seconds=int(
|
|
os.environ.get('NO_CHANGE_TIMEOUT_SECONDS', 30)
|
|
),
|
|
)
|
|
self.bash_session.initialize()
|
|
await wait_all(
|
|
(self._init_plugin(plugin) for plugin in self.plugins_to_load),
|
|
timeout=30,
|
|
)
|
|
|
|
|
|
|
|
|
|
if 'agent_skills' in self.plugins and 'jupyter' in self.plugins:
|
|
obs = await self.run_ipython(
|
|
IPythonRunCellAction(
|
|
code='from openhands.runtime.plugins.agent_skills.agentskills import *\n'
|
|
)
|
|
)
|
|
logger.debug(f'AgentSkills initialized: {obs}')
|
|
|
|
await self._init_bash_commands()
|
|
logger.debug('Runtime client initialized.')
|
|
self._initialized = True
|
|
|
|
@property
|
|
def initialized(self) -> bool:
|
|
return self._initialized
|
|
|
|
async def _init_plugin(self, plugin: Plugin):
|
|
assert self.bash_session is not None
|
|
await plugin.initialize(self.username)
|
|
self.plugins[plugin.name] = plugin
|
|
logger.debug(f'Initializing plugin: {plugin.name}')
|
|
|
|
if isinstance(plugin, JupyterPlugin):
|
|
await self.run_ipython(
|
|
IPythonRunCellAction(
|
|
code=f'import os; os.chdir("{self.bash_session.cwd}")'
|
|
)
|
|
)
|
|
|
|
async def _init_bash_commands(self):
|
|
logger.debug(f'Initializing by running {len(INIT_COMMANDS)} bash commands...')
|
|
for command in INIT_COMMANDS:
|
|
action = CmdRunAction(command=command)
|
|
action.set_hard_timeout(300)
|
|
logger.debug(f'Executing init command: {command}')
|
|
obs = await self.run(action)
|
|
assert isinstance(obs, CmdOutputObservation)
|
|
logger.debug(
|
|
f'Init command outputs (exit code: {obs.exit_code}): {obs.content}'
|
|
)
|
|
assert obs.exit_code == 0
|
|
|
|
logger.debug('Bash init commands completed')
|
|
|
|
async def run_action(self, action) -> Observation:
|
|
async with self.lock:
|
|
action_type = action.action
|
|
logger.debug(f'Running action:\n{action}')
|
|
observation = await getattr(self, action_type)(action)
|
|
logger.debug(f'Action output:\n{observation}')
|
|
return observation
|
|
|
|
async def run(
|
|
self, action: CmdRunAction
|
|
) -> CmdOutputObservation | ErrorObservation:
|
|
assert self.bash_session is not None
|
|
obs = await call_sync_from_async(self.bash_session.execute, action)
|
|
return obs
|
|
|
|
async def run_ipython(self, action: IPythonRunCellAction) -> Observation:
|
|
assert self.bash_session is not None
|
|
if 'jupyter' in self.plugins:
|
|
_jupyter_plugin: JupyterPlugin = self.plugins['jupyter']
|
|
|
|
|
|
jupyter_cwd = getattr(self, '_jupyter_cwd', None)
|
|
if self.bash_session.cwd != jupyter_cwd:
|
|
logger.debug(
|
|
f'{self.bash_session.cwd} != {jupyter_cwd} -> reset Jupyter PWD'
|
|
)
|
|
reset_jupyter_cwd_code = (
|
|
f'import os; os.chdir("{self.bash_session.cwd}")'
|
|
)
|
|
_aux_action = IPythonRunCellAction(code=reset_jupyter_cwd_code)
|
|
_reset_obs: IPythonRunCellObservation = await _jupyter_plugin.run(
|
|
_aux_action
|
|
)
|
|
logger.debug(
|
|
f'Changed working directory in IPython to: {self.bash_session.cwd}. Output: {_reset_obs}'
|
|
)
|
|
self._jupyter_cwd = self.bash_session.cwd
|
|
|
|
obs: IPythonRunCellObservation = await _jupyter_plugin.run(action)
|
|
obs.content = obs.content.rstrip()
|
|
matches = re.findall(
|
|
r'<oh_aci_output_[0-9a-f]{32}>(.*?)</oh_aci_output_[0-9a-f]{32}>',
|
|
obs.content,
|
|
re.DOTALL,
|
|
)
|
|
if matches:
|
|
results: list[str] = []
|
|
if len(matches) == 1:
|
|
|
|
match = matches[0]
|
|
try:
|
|
result_dict = json.loads(match)
|
|
if result_dict.get('path'):
|
|
if (
|
|
result_dict['new_content'] is not None
|
|
):
|
|
diff = get_diff(
|
|
old_contents=result_dict['old_content']
|
|
or '',
|
|
new_contents=result_dict['new_content'],
|
|
filepath=result_dict['path'],
|
|
)
|
|
return FileEditObservation(
|
|
content=diff,
|
|
path=result_dict['path'],
|
|
old_content=result_dict['old_content'],
|
|
new_content=result_dict['new_content'],
|
|
prev_exist=result_dict['prev_exist'],
|
|
impl_source=FileEditSource.OH_ACI,
|
|
formatted_output_and_error=result_dict[
|
|
'formatted_output_and_error'
|
|
],
|
|
)
|
|
else:
|
|
return FileReadObservation(
|
|
content=result_dict['formatted_output_and_error'],
|
|
path=result_dict['path'],
|
|
impl_source=FileReadSource.OH_ACI,
|
|
)
|
|
else:
|
|
results.append(result_dict['formatted_output_and_error'])
|
|
except json.JSONDecodeError:
|
|
|
|
results.append(
|
|
f"Invalid JSON in 'openhands-aci' output: {match}"
|
|
)
|
|
else:
|
|
for match in matches:
|
|
try:
|
|
result_dict = json.loads(match)
|
|
results.append(result_dict['formatted_output_and_error'])
|
|
except json.JSONDecodeError:
|
|
|
|
results.append(
|
|
f"Invalid JSON in 'openhands-aci' output: {match}"
|
|
)
|
|
|
|
|
|
obs.content = '\n'.join(str(result) for result in results)
|
|
|
|
if action.include_extra:
|
|
obs.content += (
|
|
f'\n[Jupyter current working directory: {self.bash_session.cwd}]'
|
|
)
|
|
obs.content += f'\n[Jupyter Python interpreter: {_jupyter_plugin.python_interpreter_path}]'
|
|
return obs
|
|
else:
|
|
raise RuntimeError(
|
|
'JupyterRequirement not found. Unable to run IPython action.'
|
|
)
|
|
|
|
def _resolve_path(self, path: str, working_dir: str) -> str:
|
|
filepath = Path(path)
|
|
if not filepath.is_absolute():
|
|
return str(Path(working_dir) / filepath)
|
|
return str(filepath)
|
|
|
|
async def read(self, action: FileReadAction) -> Observation:
|
|
assert self.bash_session is not None
|
|
if action.impl_source == FileReadSource.OH_ACI:
|
|
return await self.run_ipython(
|
|
IPythonRunCellAction(
|
|
code=action.translated_ipython_code,
|
|
include_extra=False,
|
|
)
|
|
)
|
|
|
|
|
|
|
|
working_dir = self.bash_session.cwd
|
|
filepath = self._resolve_path(action.path, working_dir)
|
|
try:
|
|
if filepath.lower().endswith(('.png', '.jpg', '.jpeg', '.bmp', '.gif')):
|
|
with open(filepath, 'rb') as file:
|
|
image_data = file.read()
|
|
encoded_image = base64.b64encode(image_data).decode('utf-8')
|
|
mime_type, _ = mimetypes.guess_type(filepath)
|
|
if mime_type is None:
|
|
mime_type = 'image/png'
|
|
encoded_image = f'data:{mime_type};base64,{encoded_image}'
|
|
|
|
return FileReadObservation(path=filepath, content=encoded_image)
|
|
elif filepath.lower().endswith('.pdf'):
|
|
with open(filepath, 'rb') as file:
|
|
pdf_data = file.read()
|
|
encoded_pdf = base64.b64encode(pdf_data).decode('utf-8')
|
|
encoded_pdf = f'data:application/pdf;base64,{encoded_pdf}'
|
|
return FileReadObservation(path=filepath, content=encoded_pdf)
|
|
elif filepath.lower().endswith(('.mp4', '.webm', '.ogg')):
|
|
with open(filepath, 'rb') as file:
|
|
video_data = file.read()
|
|
encoded_video = base64.b64encode(video_data).decode('utf-8')
|
|
mime_type, _ = mimetypes.guess_type(filepath)
|
|
if mime_type is None:
|
|
mime_type = 'video/mp4'
|
|
encoded_video = f'data:{mime_type};base64,{encoded_video}'
|
|
|
|
return FileReadObservation(path=filepath, content=encoded_video)
|
|
|
|
with open(filepath, 'r', encoding='utf-8') as file:
|
|
lines = read_lines(file.readlines(), action.start, action.end)
|
|
except FileNotFoundError:
|
|
return ErrorObservation(
|
|
f'File not found: {filepath}. Your current working directory is {working_dir}.'
|
|
)
|
|
except UnicodeDecodeError:
|
|
return ErrorObservation(f'File could not be decoded as utf-8: {filepath}.')
|
|
except IsADirectoryError:
|
|
return ErrorObservation(
|
|
f'Path is a directory: {filepath}. You can only read files'
|
|
)
|
|
|
|
code_view = ''.join(lines)
|
|
return FileReadObservation(path=filepath, content=code_view)
|
|
|
|
async def write(self, action: FileWriteAction) -> Observation:
|
|
assert self.bash_session is not None
|
|
working_dir = self.bash_session.cwd
|
|
filepath = self._resolve_path(action.path, working_dir)
|
|
|
|
insert = action.content.split('\n')
|
|
try:
|
|
if not os.path.exists(os.path.dirname(filepath)):
|
|
os.makedirs(os.path.dirname(filepath))
|
|
|
|
file_exists = os.path.exists(filepath)
|
|
if file_exists:
|
|
file_stat = os.stat(filepath)
|
|
else:
|
|
file_stat = None
|
|
|
|
mode = 'w' if not file_exists else 'r+'
|
|
try:
|
|
with open(filepath, mode, encoding='utf-8') as file:
|
|
if mode != 'w':
|
|
all_lines = file.readlines()
|
|
new_file = insert_lines(
|
|
insert, all_lines, action.start, action.end
|
|
)
|
|
else:
|
|
new_file = [i + '\n' for i in insert]
|
|
|
|
file.seek(0)
|
|
file.writelines(new_file)
|
|
file.truncate()
|
|
|
|
|
|
if file_exists:
|
|
assert file_stat is not None
|
|
|
|
os.chmod(filepath, file_stat.st_mode)
|
|
os.chown(filepath, file_stat.st_uid, file_stat.st_gid)
|
|
else:
|
|
|
|
os.chmod(filepath, 0o664)
|
|
os.chown(filepath, self.user_id, self.user_id)
|
|
|
|
except FileNotFoundError:
|
|
return ErrorObservation(f'File not found: {filepath}')
|
|
except IsADirectoryError:
|
|
return ErrorObservation(
|
|
f'Path is a directory: {filepath}. You can only write to files'
|
|
)
|
|
except UnicodeDecodeError:
|
|
return ErrorObservation(
|
|
f'File could not be decoded as utf-8: {filepath}'
|
|
)
|
|
except PermissionError:
|
|
return ErrorObservation(f'Malformed paths not permitted: {filepath}')
|
|
return FileWriteObservation(content='', path=filepath)
|
|
|
|
async def browse(self, action: BrowseURLAction) -> Observation:
|
|
return await browse(action, self.browser)
|
|
|
|
async def browse_interactive(self, action: BrowseInteractiveAction) -> Observation:
|
|
return await browse(action, self.browser)
|
|
|
|
def close(self):
|
|
if self.bash_session is not None:
|
|
self.bash_session.close()
|
|
self.browser.close()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument('port', type=int, help='Port to listen on')
|
|
parser.add_argument('--working-dir', type=str, help='Working directory')
|
|
parser.add_argument('--plugins', type=str, help='Plugins to initialize', nargs='+')
|
|
parser.add_argument(
|
|
'--username', type=str, help='User to run as', default='openhands'
|
|
)
|
|
parser.add_argument('--user-id', type=int, help='User ID to run as', default=1000)
|
|
parser.add_argument(
|
|
'--browsergym-eval-env',
|
|
type=str,
|
|
help='BrowserGym environment used for browser evaluation',
|
|
default=None,
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
plugins_to_load: list[Plugin] = []
|
|
if args.plugins:
|
|
for plugin in args.plugins:
|
|
if plugin not in ALL_PLUGINS:
|
|
raise ValueError(f'Plugin {plugin} not found')
|
|
plugins_to_load.append(ALL_PLUGINS[plugin]())
|
|
|
|
client: ActionExecutor | None = None
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
global client
|
|
client = ActionExecutor(
|
|
plugins_to_load,
|
|
work_dir=args.working_dir,
|
|
username=args.username,
|
|
user_id=args.user_id,
|
|
browsergym_eval_env=args.browsergym_eval_env,
|
|
)
|
|
await client.ainit()
|
|
yield
|
|
|
|
client.close()
|
|
|
|
app = FastAPI(lifespan=lifespan)
|
|
|
|
|
|
|
|
@app.exception_handler(Exception)
|
|
async def global_exception_handler(request: Request, exc: Exception):
|
|
logger.exception('Unhandled exception occurred:')
|
|
return JSONResponse(
|
|
status_code=500,
|
|
content={'detail': 'An unexpected error occurred. Please try again later.'},
|
|
)
|
|
|
|
@app.exception_handler(StarletteHTTPException)
|
|
async def http_exception_handler(request: Request, exc: StarletteHTTPException):
|
|
logger.error(f'HTTP exception occurred: {exc.detail}')
|
|
return JSONResponse(status_code=exc.status_code, content={'detail': exc.detail})
|
|
|
|
@app.exception_handler(RequestValidationError)
|
|
async def validation_exception_handler(
|
|
request: Request, exc: RequestValidationError
|
|
):
|
|
logger.error(f'Validation error occurred: {exc}')
|
|
return JSONResponse(
|
|
status_code=422,
|
|
content={'detail': 'Invalid request parameters', 'errors': exc.errors()},
|
|
)
|
|
|
|
@app.middleware('http')
|
|
async def authenticate_requests(request: Request, call_next):
|
|
if request.url.path != '/alive' and request.url.path != '/server_info':
|
|
try:
|
|
verify_api_key(request.headers.get('X-Session-API-Key'))
|
|
except HTTPException as e:
|
|
return e
|
|
response = await call_next(request)
|
|
return response
|
|
|
|
@app.get('/server_info')
|
|
async def get_server_info():
|
|
assert client is not None
|
|
current_time = time.time()
|
|
uptime = current_time - client.start_time
|
|
idle_time = current_time - client.last_execution_time
|
|
|
|
response = {
|
|
'uptime': uptime,
|
|
'idle_time': idle_time,
|
|
'resources': get_system_stats(),
|
|
}
|
|
logger.info('Server info endpoint response: %s', response)
|
|
return response
|
|
|
|
@app.post('/execute_action')
|
|
async def execute_action(action_request: ActionRequest):
|
|
assert client is not None
|
|
try:
|
|
action = event_from_dict(action_request.action)
|
|
if not isinstance(action, Action):
|
|
raise HTTPException(status_code=400, detail='Invalid action type')
|
|
client.last_execution_time = time.time()
|
|
observation = await client.run_action(action)
|
|
return event_to_dict(observation)
|
|
except Exception as e:
|
|
logger.error(f'Error while running /execute_action: {str(e)}')
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=traceback.format_exc(),
|
|
)
|
|
|
|
@app.post('/upload_file')
|
|
async def upload_file(
|
|
file: UploadFile, destination: str = '/', recursive: bool = False
|
|
):
|
|
assert client is not None
|
|
|
|
try:
|
|
|
|
if not os.path.isabs(destination):
|
|
raise HTTPException(
|
|
status_code=400, detail='Destination must be an absolute path'
|
|
)
|
|
|
|
full_dest_path = destination
|
|
if not os.path.exists(full_dest_path):
|
|
os.makedirs(full_dest_path, exist_ok=True)
|
|
|
|
if recursive or file.filename.endswith('.zip'):
|
|
|
|
if not file.filename.endswith('.zip'):
|
|
raise HTTPException(
|
|
status_code=400, detail='Recursive uploads must be zip files'
|
|
)
|
|
|
|
zip_path = os.path.join(full_dest_path, file.filename)
|
|
with open(zip_path, 'wb') as buffer:
|
|
shutil.copyfileobj(file.file, buffer)
|
|
|
|
|
|
shutil.unpack_archive(zip_path, full_dest_path)
|
|
os.remove(zip_path)
|
|
|
|
logger.debug(
|
|
f'Uploaded file {file.filename} and extracted to {destination}'
|
|
)
|
|
else:
|
|
|
|
file_path = os.path.join(full_dest_path, file.filename)
|
|
with open(file_path, 'wb') as buffer:
|
|
shutil.copyfileobj(file.file, buffer)
|
|
logger.debug(f'Uploaded file {file.filename} to {destination}')
|
|
|
|
return JSONResponse(
|
|
content={
|
|
'filename': file.filename,
|
|
'destination': destination,
|
|
'recursive': recursive,
|
|
},
|
|
status_code=200,
|
|
)
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.get('/download_files')
|
|
async def download_file(path: str):
|
|
logger.debug('Downloading files')
|
|
try:
|
|
if not os.path.isabs(path):
|
|
raise HTTPException(
|
|
status_code=400, detail='Path must be an absolute path'
|
|
)
|
|
|
|
if not os.path.exists(path):
|
|
raise HTTPException(status_code=404, detail='File not found')
|
|
|
|
with tempfile.TemporaryFile() as temp_zip:
|
|
with ZipFile(temp_zip, 'w') as zipf:
|
|
for root, _, files in os.walk(path):
|
|
for file in files:
|
|
file_path = os.path.join(root, file)
|
|
zipf.write(
|
|
file_path, arcname=os.path.relpath(file_path, path)
|
|
)
|
|
temp_zip.seek(0)
|
|
content = temp_zip.read()
|
|
|
|
|
|
zip_stream = io.BytesIO(content)
|
|
return StreamingResponse(
|
|
content=zip_stream,
|
|
media_type='application/zip',
|
|
headers={'Content-Disposition': f'attachment; filename={path}.zip'},
|
|
)
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@app.get('/alive')
|
|
async def alive():
|
|
if client is None or not client.initialized:
|
|
return {'status': 'not initialized'}
|
|
return {'status': 'ok'}
|
|
|
|
|
|
|
|
|
|
|
|
@app.get('/vscode/connection_token')
|
|
async def get_vscode_connection_token():
|
|
assert client is not None
|
|
if 'vscode' in client.plugins:
|
|
plugin: VSCodePlugin = client.plugins['vscode']
|
|
return {'token': plugin.vscode_connection_token}
|
|
else:
|
|
return {'token': None}
|
|
|
|
|
|
|
|
|
|
|
|
@app.post('/list_files')
|
|
async def list_files(request: Request):
|
|
"""List files in the specified path.
|
|
|
|
This function retrieves a list of files from the agent's runtime file store,
|
|
excluding certain system and hidden files/directories.
|
|
|
|
To list files:
|
|
```sh
|
|
curl http://localhost:3000/api/list-files
|
|
```
|
|
|
|
Args:
|
|
request (Request): The incoming request object.
|
|
path (str, optional): The path to list files from. Defaults to '/'.
|
|
|
|
Returns:
|
|
list: A list of file names in the specified path.
|
|
|
|
Raises:
|
|
HTTPException: If there's an error listing the files.
|
|
"""
|
|
assert client is not None
|
|
|
|
|
|
request_dict = await request.json()
|
|
path = request_dict.get('path', None)
|
|
|
|
|
|
if path is None:
|
|
full_path = client.initial_cwd
|
|
elif os.path.isabs(path):
|
|
full_path = path
|
|
else:
|
|
full_path = os.path.join(client.initial_cwd, path)
|
|
|
|
if not os.path.exists(full_path):
|
|
|
|
return []
|
|
|
|
try:
|
|
|
|
if not os.path.exists(full_path) or not os.path.isdir(full_path):
|
|
return []
|
|
|
|
entries = os.listdir(full_path)
|
|
|
|
|
|
directories = []
|
|
files = []
|
|
for entry in entries:
|
|
|
|
entry_relative = entry.lstrip('/').split('/')[-1]
|
|
|
|
|
|
full_entry_path = os.path.join(full_path, entry_relative)
|
|
if os.path.exists(full_entry_path):
|
|
is_dir = os.path.isdir(full_entry_path)
|
|
if is_dir:
|
|
|
|
|
|
entry = entry.rstrip('/') + '/'
|
|
directories.append(entry)
|
|
else:
|
|
files.append(entry)
|
|
|
|
|
|
directories.sort(key=lambda s: s.lower())
|
|
files.sort(key=lambda s: s.lower())
|
|
|
|
|
|
sorted_entries = directories + files
|
|
return sorted_entries
|
|
|
|
except Exception as e:
|
|
logger.error(f'Error listing files: {e}')
|
|
return []
|
|
|
|
logger.debug(f'Starting action execution API on port {args.port}')
|
|
run(app, host='0.0.0.0', port=args.port)
|
|
|