|
| 1 | +from fastapi import FastAPI, Request, APIRouter, HTTPException |
| 2 | +from flooddns.external.simulation.main import local_run |
| 3 | +from models.simulation import SimulationPayload, TemporyPayload |
| 4 | +import os |
| 5 | +import pandas as pd |
| 6 | +import subprocess |
| 7 | +from schemas.simulation import deleteSimulation, popSimulation, insertSimulation, update_simulation_db, popResults |
| 8 | +from datetime import datetime |
| 9 | +from bson.objectid import ObjectId |
| 10 | +from pymongo import MongoClient |
| 11 | +import logging |
| 12 | + |
| 13 | +router = APIRouter() |
| 14 | + |
| 15 | +# MongoDB connection |
| 16 | +client = MongoClient("mongodb://localhost:27017") |
| 17 | +db = client.db_simulation |
| 18 | +collection = db.jobs |
| 19 | + |
| 20 | +# Check the connection |
| 21 | +try: |
| 22 | + client.server_info() # Forces a call to the server |
| 23 | + print("MongoDB connection successful") |
| 24 | +except Exception as e: |
| 25 | + print(f"Error connecting to MongoDB: {e}") |
| 26 | + |
| 27 | + |
| 28 | +@router.post("/simulate_flood_dns") |
| 29 | +async def run_simulation(request: Request): |
| 30 | + payload = await request.json() |
| 31 | + payload_data = SimulationPayload( |
| 32 | + simulation_name=payload['params']['simulation_name'], |
| 33 | + num_jobs=payload['params']['num_jobs'], |
| 34 | + num_tors=payload['params']['num_tors'], |
| 35 | + n_cores=payload['params']['num_cores'], |
| 36 | + ring_size=payload['params']['ring_size'], |
| 37 | + routing=payload['params']['routing'], |
| 38 | + path=payload['params']['path'], |
| 39 | + seed=payload['params']['seed'] |
| 40 | + ) |
| 41 | + simulation_data = TemporyPayload( |
| 42 | + simulation_name=payload['params']['simulation_name'], |
| 43 | + num_jobs=payload['params']['num_jobs'], |
| 44 | + num_tors=payload['params']['num_tors'], |
| 45 | + n_cores=payload['params']['num_cores'], |
| 46 | + ring_size=payload['params']['ring_size'], |
| 47 | + routing=payload['params']['routing'], |
| 48 | + path=payload['params']['path'], |
| 49 | + seed=payload['params']['seed'] |
| 50 | + ) |
| 51 | + |
| 52 | + start_time = datetime.now() |
| 53 | + result = await local_run( |
| 54 | + num_jobs=payload_data.num_jobs, |
| 55 | + num_tors=payload_data.num_tors, |
| 56 | + n_cores=payload_data.n_cores, |
| 57 | + ring_size=payload_data.ring_size, |
| 58 | + routing=payload_data.routing, |
| 59 | + seed=payload_data.seed |
| 60 | + ) |
| 61 | + end_time = datetime.now() |
| 62 | + |
| 63 | + print(result) |
| 64 | + if result == "You can`t create simulation with your parameters": |
| 65 | + print(result) |
| 66 | + current_date = datetime.today().strftime('%Y-%m-%d') |
| 67 | + |
| 68 | + simulation = { |
| 69 | + "simulation_name": str(simulation_data.simulation_name), |
| 70 | + "path": "", |
| 71 | + "date": current_date, |
| 72 | + "params": str(payload_data.num_jobs) + "," + str(payload_data.n_cores) + "," + str(payload_data.ring_size) + "," + str(payload_data.routing) + "," + str(payload_data.seed), |
| 73 | + "user_id": str(payload['user_id']), |
| 74 | + "result": result, |
| 75 | + "start_time": start_time.isoformat(), |
| 76 | + "end_time": end_time.isoformat() |
| 77 | + } |
| 78 | + await insertSimulation(simulation) |
| 79 | + simulations1 = await popSimulation(str(payload['user_id'])) |
| 80 | + return {"data": simulations1, "progress": result} |
| 81 | + |
| 82 | + job_info_path = os.path.join(".", "flooddns", "runs", f"seed_{payload_data.seed}", f"concurrent_jobs_{payload_data.num_jobs}", |
| 83 | + f"{payload_data.n_cores}_core_failures", f"ring_size_{payload_data.ring_size}", payload_data.routing, "logs_floodns", "job_info.csv") |
| 84 | + job_path = os.path.join(".", "flooddns", "runs", f"seed_{payload_data.seed}", f"concurrent_jobs_{payload_data.num_jobs}", |
| 85 | + f"{payload_data.n_cores}_core_failures", f"ring_size_{payload_data.ring_size}", payload_data.routing) |
| 86 | + job_info_path = job_info_path.replace("\\", "/") |
| 87 | + job_path = job_path.replace("\\", "/") |
| 88 | + job_info = pd.read_csv(job_info_path, header=None) |
| 89 | + if job_info.empty: |
| 90 | + print("No jobs found.") |
| 91 | + return |
| 92 | + job_csv = os.path.join(".", "flooddns", "runs", |
| 93 | + "headers", "job_info.header") |
| 94 | + job_columns = pd.read_csv(job_csv) |
| 95 | + job_info.columns = job_columns.columns |
| 96 | + current_date = datetime.today().strftime('%Y-%m-%d') |
| 97 | + |
| 98 | + simulation = { |
| 99 | + "simulation_name": str(simulation_data.simulation_name), |
| 100 | + "path": job_path, |
| 101 | + "date": current_date, |
| 102 | + "params": str(payload_data.num_jobs) + "," + str(payload_data.n_cores) + "," + str(payload_data.ring_size) + "," + str(payload_data.routing) + "," + str(payload_data.seed), |
| 103 | + "user_id": str(payload['user_id']), |
| 104 | + "result": result, |
| 105 | + "start_time": start_time.isoformat(), |
| 106 | + "end_time": end_time.isoformat() |
| 107 | + } |
| 108 | + await insertSimulation(simulation) |
| 109 | + simulations1 = await popSimulation(str(payload['user_id'])) |
| 110 | + return {"data": simulations1, "progress": result, "tempID": payload["params"]["tempID"]} |
| 111 | + |
| 112 | + |
| 113 | +@router.post("/get_simulate_flood_dns") |
| 114 | +async def get_simulation(request: Request): |
| 115 | + user_id = await request.json() |
| 116 | + simulations1 = await popSimulation(str(user_id["state"])) |
| 117 | + return simulations1 |
| 118 | +logging.basicConfig(level=logging.INFO) |
| 119 | +logger = logging.getLogger(__name__) |
| 120 | + |
| 121 | + |
| 122 | +@router.post("/re_run_simulation") |
| 123 | +async def re_run_simulation(request: Request): |
| 124 | + params = await request.json() |
| 125 | + payload = params['data'] |
| 126 | + simulation_id = params['simulation_id'] |
| 127 | + user_id = params['user_id'] |
| 128 | + |
| 129 | + # Extract the parameters from the current simulation |
| 130 | + current_simulation = collection.find_one( |
| 131 | + {"_id": ObjectId(simulation_id), "user_id": user_id}) |
| 132 | + if not current_simulation: |
| 133 | + raise HTTPException(status_code=404, detail="Simulation not found") |
| 134 | + |
| 135 | + # Extracting parameters from the current simulation |
| 136 | + current_params = current_simulation['params'].split(',') |
| 137 | + num_jobs = current_params[0] |
| 138 | + num_cores = current_params[1] |
| 139 | + ring_size = current_params[2] |
| 140 | + routing = current_params[3] |
| 141 | + seed = current_params[4] |
| 142 | + |
| 143 | + # Running the simulation with the current parameters |
| 144 | + start_time = datetime.now() |
| 145 | + process = subprocess.Popen(["java", "-jar", "./flooddns/floodns-basic-sim.jar", |
| 146 | + payload], stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
| 147 | + output, _ = process.communicate() |
| 148 | + end_time = datetime.now() |
| 149 | + output = output.decode('utf-8') |
| 150 | + |
| 151 | + # Prepare the updated simulation details |
| 152 | + new_path = f"./flooddns/runs/seed_{seed}/concurrent_jobs_{num_jobs}/{num_cores}_core_failures/ring_size_{ring_size}/{routing}" |
| 153 | + simulation = { |
| 154 | + "simulation_name": current_simulation['simulation_name'], |
| 155 | + "path": new_path, |
| 156 | + "date": current_simulation['date'], |
| 157 | + "params": f"{num_jobs},{num_cores},{ring_size},{routing},{seed}", |
| 158 | + "user_id": user_id, |
| 159 | + "result": output, |
| 160 | + "start_time": start_time.isoformat(), |
| 161 | + "end_time": end_time.isoformat(), |
| 162 | + "seed": seed |
| 163 | + } |
| 164 | + |
| 165 | + # Update the simulation in the database with new start and end times |
| 166 | + await update_simulation_db(simulation, simulation_id, user_id) |
| 167 | + |
| 168 | + return {"result": output, "start_time": start_time.isoformat(), "end_time": end_time.isoformat()} |
| 169 | + |
| 170 | + |
| 171 | +@router.post("/delte_simulation") |
| 172 | +async def delte_simulation(request: Request): |
| 173 | + params = await request.json() |
| 174 | + payload = params['data'] |
| 175 | + await deleteSimulation({"_id": ObjectId(payload)}) |
| 176 | + simulations1 = await popSimulation(str(params['user_id'])) |
| 177 | + return simulations1 |
| 178 | + |
| 179 | + |
| 180 | +@router.post("/simulation_update") |
| 181 | +async def update_simulation(request: Request): |
| 182 | + payload = await request.json() |
| 183 | + |
| 184 | + # Extracting parameters from the payload |
| 185 | + params = payload['params'] |
| 186 | + simulation_id = payload['simulationID'] |
| 187 | + user_id = payload['user_id'] |
| 188 | + |
| 189 | + # Creating payload_data and simulation_data from the parameters |
| 190 | + payload_data = SimulationPayload( |
| 191 | + simulation_name=params['simulation_name'], |
| 192 | + num_jobs=params['num_jobs'], |
| 193 | + num_tors=params['num_tors'], |
| 194 | + n_cores=params['num_cores'], |
| 195 | + ring_size=params['ring_size'], |
| 196 | + routing=params['routing'], |
| 197 | + path=params['path'], |
| 198 | + seed=params['seed'] |
| 199 | + ) |
| 200 | + simulation_data = TemporyPayload( |
| 201 | + simulation_name=params['simulation_name'], |
| 202 | + num_jobs=params['num_jobs'], |
| 203 | + num_tors=params['num_tors'], |
| 204 | + n_cores=params['num_cores'], |
| 205 | + ring_size=params['ring_size'], |
| 206 | + routing=params['routing'], |
| 207 | + path=params['path'], |
| 208 | + seed=params['seed'] |
| 209 | + ) |
| 210 | + |
| 211 | + # Running the simulation with the new parameters |
| 212 | + start_time = datetime.now() |
| 213 | + result = await local_run( |
| 214 | + num_jobs=payload_data.num_jobs, |
| 215 | + num_tors=payload_data.num_tors, |
| 216 | + n_cores=payload_data.n_cores, |
| 217 | + ring_size=payload_data.ring_size, |
| 218 | + routing=payload_data.routing, |
| 219 | + seed=payload_data.seed |
| 220 | + ) |
| 221 | + end_time = datetime.now() |
| 222 | + |
| 223 | + # Prepare the updated simulation details |
| 224 | + current_date = datetime.today().strftime('%Y-%m-%d') |
| 225 | + simulation = { |
| 226 | + "simulation_name": str(simulation_data.simulation_name), |
| 227 | + "path": "", |
| 228 | + "date": current_date, |
| 229 | + "params": f"{payload_data.num_jobs},{payload_data.n_cores},{payload_data.ring_size},{payload_data.routing},{payload_data.seed}", |
| 230 | + "user_id": str(user_id), |
| 231 | + "result": result, |
| 232 | + "start_time": start_time.isoformat(), |
| 233 | + "end_time": end_time.isoformat(), |
| 234 | + "seed": simulation_data.seed |
| 235 | + } |
| 236 | + |
| 237 | + # Checking if the result indicates a failure to create the simulation |
| 238 | + if result == "You can't create simulation with your parameters": |
| 239 | + await update_simulation_db(simulation, simulation_id, user_id) |
| 240 | + simulations1 = await popSimulation(str(user_id)) |
| 241 | + return {"data": simulations1, "progress": result} |
| 242 | + |
| 243 | + # If the simulation ran successfully, update the path and other details |
| 244 | + job_info_path = os.path.join( |
| 245 | + ".", "flooddns", "runs", f"seed_{payload_data.seed}", |
| 246 | + f"concurrent_jobs_{payload_data.num_jobs}", |
| 247 | + f"{payload_data.n_cores}_core_failures", |
| 248 | + f"ring_size_{payload_data.ring_size}", payload_data.routing, "logs_floodns", "job_info.csv" |
| 249 | + ) |
| 250 | + job_path = os.path.join( |
| 251 | + ".", "flooddns", "runs", f"seed_{payload_data.seed}", |
| 252 | + f"concurrent_jobs_{payload_data.num_jobs}", |
| 253 | + f"{payload_data.n_cores}_core_failures", |
| 254 | + f"ring_size_{payload_data.ring_size}", payload_data.routing |
| 255 | + ) |
| 256 | + job_info_path = job_info_path.replace("\\", "/") |
| 257 | + job_path = job_path.replace("\\", "/") |
| 258 | + job_info = pd.read_csv(job_info_path, header=None) |
| 259 | + |
| 260 | + if job_info.empty: |
| 261 | + print("No jobs found.") |
| 262 | + return |
| 263 | + |
| 264 | + job_csv = os.path.join(".", "flooddns", "runs", |
| 265 | + "headers", "job_info.header") |
| 266 | + job_columns = pd.read_csv(job_csv) |
| 267 | + job_info.columns = job_columns.columns |
| 268 | + |
| 269 | + simulation["path"] = job_path |
| 270 | + |
| 271 | + # Update the simulation in the database |
| 272 | + await update_simulation_db(simulation, simulation_id, user_id) |
| 273 | + simulations1 = await popSimulation(str(user_id)) |
| 274 | + return {"data": simulations1, "progress": result} |
| 275 | + |
| 276 | + |
| 277 | +@router.post("/show_result") |
| 278 | +async def show_results(request: Request): |
| 279 | + payload = await request.json() |
| 280 | + result = await popResults(str(payload["user_id"]), str(payload['data'])) |
| 281 | + return result |
| 282 | + |
| 283 | + |
| 284 | +@router.get("/get_experiment/{id}") |
| 285 | +async def get_experiment(id: str): |
| 286 | + logging.info(f"Fetching experiment with ID: {id}") |
| 287 | + try: |
| 288 | + obj_id = ObjectId(id) |
| 289 | + experiment = collection.find_one({"_id": obj_id}) |
| 290 | + if experiment is None: |
| 291 | + logging.error(f"Experiment with ID {id} not found in collection.") |
| 292 | + raise HTTPException( |
| 293 | + status_code=404, detail="Experiment not found in collection") |
| 294 | + # Convert ObjectId to string for JSON serialization |
| 295 | + experiment['_id'] = str(experiment['_id']) |
| 296 | + logging.info(f"Found experiment: {experiment}") |
| 297 | + return experiment |
| 298 | + except Exception as e: |
| 299 | + logging.error(f"Error fetching experiment with ID {id}: {e}") |
| 300 | + raise HTTPException(status_code=500, detail="Internal server error") |
| 301 | + |
| 302 | + |
| 303 | +@router.get("/get_files/{path:path}") |
| 304 | +async def get_files(path: str): |
| 305 | + try: |
| 306 | + # Ensure the path is absolute |
| 307 | + base_path = os.path.join(os.getcwd(), path) |
| 308 | + logging.info(f"Checking path: {base_path}") |
| 309 | + |
| 310 | + if not os.path.exists(base_path): |
| 311 | + raise HTTPException(status_code=404, detail="Path not found") |
| 312 | + |
| 313 | + files_list = [] |
| 314 | + for root, dirs, files in os.walk(base_path): |
| 315 | + for file in files: |
| 316 | + files_list.append(os.path.join(root, file).replace("\\", "/")) |
| 317 | + |
| 318 | + return {"files": files_list} |
| 319 | + except Exception as e: |
| 320 | + logging.error(f"Error fetching files from path {path}: {e}") |
| 321 | + raise HTTPException(status_code=500, detail="Internal server error") |
| 322 | + |
| 323 | + |
| 324 | +@router.get("/get_file_content/{path:path}") |
| 325 | +async def get_file_content(path: str): |
| 326 | + try: |
| 327 | + # Correct the path |
| 328 | + base_path = os.path.join(os.getcwd(), path).replace("/./", "/") |
| 329 | + logging.info(f"Fetching file content from: {base_path}") |
| 330 | + |
| 331 | + if not os.path.exists(base_path): |
| 332 | + raise HTTPException(status_code=404, detail="File not found") |
| 333 | + |
| 334 | + with open(base_path, 'r') as file: |
| 335 | + content = file.read() |
| 336 | + |
| 337 | + return {"content": content} |
| 338 | + except Exception as e: |
| 339 | + logging.error(f"Error fetching file content from {path}: {e}") |
| 340 | + raise HTTPException(status_code=500, detail="Internal server error") |
0 commit comments