# Import necessary libraries import numpy as np import joblib # For loading the serialized machine learning model import pandas as pd # For data manipulation and analysis from flask import Flask, request, jsonify # Flask framework for creating the web API # Initialize the Flask application with a descriptive name total_sales_predictor_api = Flask("SuperKart Product Total Sales Prediction") # Load the pre-trained regression model from a Joblib file model = joblib.load("product_store_sales_total_prediction_model_v1_0.joblib") # ----------------------------------------------- # Define a GET route for the root endpoint # ----------------------------------------------- @total_sales_predictor_api.get('/') def home(): """ Handles GET requests to the root URL ('/'). Returns a simple welcome message to confirm the API is running. """ return "Welcome to the SuperKart Product Total Sales Prediction API!" # ----------------------------------------------- # Define a POST route for single record prediction # ----------------------------------------------- @total_sales_predictor_api.post('/v1/totalsales') def predict_total_sales(): """ Handles POST requests to '/v1/totalsales' endpoint. Accepts JSON input with product and store features, and returns the predicted 'Product_Store_Sales_Total' as JSON. """ # Get JSON data from request body input_data = request.get_json() # Extract necessary input features from JSON sample = { 'Product_Weight': input_data['Product_Weight'], # Product weight 'Product_Allocated_Area': input_data['Product_Allocated_Area'], # Shelf/display area allocated 'Product_MRP': input_data['Product_MRP'], # Maximum Retail Price 'Store_Establishment_Year': input_data['Store_Establishment_Year'], # Store's opening year 'Product_Sugar_Content': input_data['Product_Sugar_Content'], # Sugar category (Low/Medium/High) 'Product_Type': input_data['Product_Type'], # Category/type of product 'Store_Id': input_data['Store_Id'], # Store identifier 'Store_Size': input_data['Store_Size'], # Store size category 'Store_Location_City_Type': input_data['Store_Location_City_Type'], # Urban/rural/metro 'Store_Type': input_data['Store_Type'] # Type of store } # Convert the single input sample into a DataFrame (expected by model) input_df = pd.DataFrame([sample]) # Make prediction using the trained model predicted_sales = model.predict(input_df)[0] # Predict returns a NumPy float # Round off the result to 2 decimal places and convert to Python float predicted_sales = round(float(predicted_sales), 2) # Return the prediction result as a JSON response return jsonify({'Predicted Product Store Sales Total': predicted_sales}) # --------------------------------------------------- # Define a POST route for batch prediction via CSV # --------------------------------------------------- @total_sales_predictor_api.post('/v1/totalsalesbatch') def predict_total_sales_batch(): """ Handles POST requests to '/v1/totalsalesbatch' endpoint. Accepts a CSV file of multiple records and returns a dictionary mapping 'Product_Id' to predicted total sales value. """ # Retrieve the uploaded file from the request file = request.files['file'] # Read the uploaded CSV into a DataFrame input_df = pd.read_csv(file) # Generate predictions using the trained model predicted_prices = model.predict(input_df).tolist() # Extract Product IDs to identify predictions product_ids = input_df['Product_Id'].tolist() # Pair each prediction with its corresponding Product ID output_dict = dict(zip(product_ids, [round(float(p), 2) for p in predicted_prices])) # Return the results as a dictionary in JSON response return output_dict # --------------------------------------------------- # Run the Flask app in debug mode if called directly # --------------------------------------------------- if __name__ == '__main__': total_sales_predictor_api.run(debug=True)