[dbf742]: / COLAB_Inference_Video.ipynb

Download this file

1 lines (1 with data), 26.1 kB

{"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.6.8"},"colab":{"name":"COLAB_Inference_Video.ipynb","provenance":[],"collapsed_sections":[]}},"cells":[{"cell_type":"markdown","metadata":{"id":"5RFOAcSkZsGz","colab_type":"text"},"source":["# First you need to mount your Google Drive so that the contents are available for use here. To do this, run the cell below and follow the instructions (press enter after inserting the authorization code)"]},{"cell_type":"code","metadata":{"id":"jDPpTyzMZRpS","colab_type":"code","outputId":"2b2459b9-f45b-4d21-9547-2e211a15b474","executionInfo":{"status":"ok","timestamp":1590652044070,"user_tz":-180,"elapsed":15483,"user":{"displayName":"Neil Cronin","photoUrl":"","userId":"01265486732983127864"}},"colab":{"base_uri":"https://localhost:8080/","height":121}},"source":["from google.colab import drive\n","drive.mount('/content/drive')"],"execution_count":1,"outputs":[{"output_type":"stream","text":["Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n","\n","Enter your authorization code:\n","··········\n","Mounted at /content/drive\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"Rmik8bm9aBko","colab_type":"text"},"source":["# Then change the current directory so it's pointing to the main DL_track folder. Check this is correct by looking at the file contents to the left."]},{"cell_type":"code","metadata":{"id":"9awow4bpZd8U","colab_type":"code","outputId":"31138bc7-9f37-48fb-b49b-63f16d61cea3","executionInfo":{"status":"ok","timestamp":1590652075737,"user_tz":-180,"elapsed":1980,"user":{"displayName":"Neil Cronin","photoUrl":"","userId":"01265486732983127864"}},"colab":{"base_uri":"https://localhost:8080/","height":50}},"source":["%cd drive/My Drive/DL_Track\n","!pwd"],"execution_count":2,"outputs":[{"output_type":"stream","text":["/content/drive/My Drive/DL_Track\n","/content/drive/My Drive/DL_Track\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"KhYBHhNrZMhO","colab_type":"text"},"source":["# Next import necessary packages (run once upon startup)"]},{"cell_type":"code","metadata":{"id":"qK71GRnHZMhP","colab_type":"code","outputId":"832dcf83-6fdb-4a5d-a44a-36ab6f4aea9e","executionInfo":{"status":"ok","timestamp":1590652086716,"user_tz":-180,"elapsed":3539,"user":{"displayName":"Neil Cronin","photoUrl":"","userId":"01265486732983127864"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["from __future__ import division \n","import os\n","import pandas as pd\n","from pandas import ExcelWriter\n","import numpy as np\n","import matplotlib.pyplot as plt\n","plt.style.use(\"ggplot\")\n","%matplotlib inline\n","\n","\n","from skimage.transform import resize\n","from skimage.morphology import skeletonize\n","from scipy.signal import resample, savgol_filter, butter, filtfilt\n","from PIL import Image, ImageDraw\n","import cv2\n","from google.colab.patches import cv2_imshow\n","\n","import tensorflow as tf\n","\n","from keras import backend as K\n","from keras.models import Model, load_model\n","from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img"],"execution_count":3,"outputs":[{"output_type":"stream","text":["Using TensorFlow backend.\n"],"name":"stderr"}]},{"cell_type":"markdown","metadata":{"id":"qmkvxT6GZMhT","colab_type":"text"},"source":["## Define custom functions and load trained models (run once upon startup)"]},{"cell_type":"code","metadata":{"id":"xpvkr9ZRZMhU","colab_type":"code","colab":{}},"source":["# Intersection over union (IoU), a measure of labelling accuracy (sometimes also called Jaccard score)\n","def IoU(y_true, y_pred, smooth=1):\n","    intersection = K.sum(K.abs(y_true * y_pred), axis=-1)\n","    union = K.sum(y_true,-1) + K.sum(y_pred,-1) - intersection\n","    iou = (intersection + smooth) / ( union + smooth)\n","    return iou\n","\n","# Function to sort contours from proximal to distal (the bounding boxes are not used)\n","def sort_contours(cnts):\n","    # initialize the reverse flag and sort index\n","    i = 1\n","    # construct the list of bounding boxes and sort them from top to bottom\n","    boundingBoxes = [cv2.boundingRect(c) for c in cnts]\n","    (cnts, boundingBoxes) = zip(*sorted(zip(cnts, boundingBoxes), key=lambda b:b[1][i], reverse=False))\n"," \n","    return (cnts, boundingBoxes)\n","\n","# Find only the coordinates representing one edge of a contour. edge: T (top) or B (bottom)\n","def contour_edge(edge, contour):\n","    pts = list(contour)\n","    ptsT = sorted(pts, key=lambda k: [k[0][0], k[0][1]])\n","    allx = []\n","    ally = []\n","    for a in range(0,len(ptsT)):\n","        allx.append(ptsT[a][0,0])\n","        ally.append(ptsT[a][0,1])\n","    un = np.unique(allx)\n","    #sumA = 0\n","    leng = len(un)-1\n","    x = []\n","    y = []\n","    for each in range(5,leng-5): # Ignore 1st and last 5 points to avoid any curves\n","        indices = [i for i, x in enumerate(allx) if x == un[each]]\n","        if edge == 'T':\n","            loc = indices[0]\n","        else:\n","            loc = indices[-1]\n","        x.append(ptsT[loc][0,0])\n","        y.append(ptsT[loc][0,1])\n","    return np.array(x),np.array(y)\n","\n","def intersection(L1, L2):\n","    D  = L1[0] * L2[1] - L1[1] * L2[0]\n","    Dx = L1[2] * L2[1] - L1[1] * L2[2]\n","    Dy = L1[0] * L2[2] - L1[2] * L2[0]\n","    if D != 0:\n","        x = Dx / D\n","        y = Dy / D\n","        return x,y\n","    else:\n","        return False\n","\n","# Function to detect mouse clicks for the purpose of image calibration\n","def mclick(event, x, y, flags, param):\n","    # grab references to the global variables\n","    global mlocs\n","\n","    # if the left mouse button was clicked, record the (x, y) coordinates\n","    if event == cv2.EVENT_LBUTTONDOWN:\n","        mlocs.append(y)\n","        \n","# Function to compute the distance between 2 x,y points\n","def distFunc(x1, y1, x2, y2):\n","    xdist = (x2 - x1)**2\n","    ydist = (y2 - y1)**2\n","    return np.sqrt(xdist + ydist)\n","\n","###############################################################################\n","\n","# IMPORT THE TRAINED MODELS\n","\n","# load the aponeurosis model\n","model_apo = load_model('./models/model-apo2-nc.h5', custom_objects={'IoU': IoU})\n","\n","# load the fascicle model\n","modelF = load_model('./models/model-fascSnippets2-nc.h5', custom_objects={'IoU': IoU})"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"qNt_KuVixNRm","colab_type":"text"},"source":["# Define some processing settings"]},{"cell_type":"code","metadata":{"id":"_Cuu5U0UZMhd","colab_type":"code","colab":{}},"source":["apo_threshold = 0.3                     # Sensitivity threshold for detecting aponeuroses\n","fasc_threshold = 0.10                   # Sensitivity threshold for detecting fascicles\n","fasc_cont_thresh = 40                   # Minimum accepted contour length for fascicles (px)\n","flip = 1                                # If fascicles are oriented bottom-left to top-right, leave as 0. Otherwise set to 1\n","min_width = 40                          # Minimum acceptable distance between aponeuroses\n","curvature = 1                           # Set to 3 for curved fascicles or 1 for a straight line\n","min_pennation = 10                      # Minimum and maximum acceptable pennation angles\n","max_pennation = 40"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"G-Qx8upjZMhX","colab_type":"text"},"source":["## Import Video that you want to analyse (set location under 'vpath'). Note that you need to have the videos you want to analyse stored on Google Drive, in this case in a folder called 'videos'"]},{"cell_type":"code","metadata":{"id":"BejXdOz5ZMhX","colab_type":"code","colab":{}},"source":["# DEFINE THE PATH OF YOUR VIDEO \n","vpath = './videos/test_vid.avi'\n","\n","# Video properties (do not edit)\n","cap = cv2.VideoCapture(vpath)\n","vid_len = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))\n","vid_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\n","vid_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\n","vid_fps = int(cap.get(cv2.CAP_PROP_FPS))\n","count = 0\n","dataout = []\n","indices = [i for i, a in enumerate(vpath) if a == '/']\n","dots = [i for i, a in enumerate(vpath) if a == '.']\n","filename = './analysedVideos/' + vpath[indices[-1]+1:dots[-1]] + '.avi' # indices\n","vid_out = cv2.VideoWriter(filename,cv2.VideoWriter_fourcc(*'DIVX'), vid_fps, (vid_width, vid_height))\n","calibDist = []"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"WJ5nknUdyWD3","colab_type":"code","colab":{}},"source":["# NORMALLY THERE WOULD BE AN OPTION HERE TO SCALE YOUR IMAGE BUT UNFORTUNATELY \n","# NOT ALL OPENCV FUNCTIONALITY IS AVAILABLE IN COLAB \n","# IF I FIND A GOOD WORKAROUND I WILL ADD IT LATER \n","# FOR NOW I SUGGEST COMPUTING A SCALING FACTOR MANUALLY, E.G. IN FIJI/IMAGEJ"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"EU1Ejq9pIPNe","colab_type":"text"},"source":["## Here the actual analysis starts. Note that the processed video will automatically be saved to the 'analysedVideos' folder, which you can find from the left side of this page (click the 'files' icon), or from your Google Drive directly. BEFORE RUNNING THIS CELL, check that the 'flip' option above is set correctly, as well as the name of your video.\n","\n","NOTE: Normally it would display the processed frames but it's very messy in colab so I commented these lines out, l286-95"]},{"cell_type":"code","metadata":{"id":"8N9hLq1oZMhl","colab_type":"code","colab":{}},"source":["fasc_l_all = []\n","pennation_all = []\n","x_lows_all = []\n","x_highs_all = []\n","thickness_all = []\n","\n","for a in range(0, vid_len-1):\n","# for a in range(0, 10):\n","\n","    # FORMAT EACH FRAME, RESHAPE AND COMPUTE NN PREDICTIONS\n","    _,frame = cap.read()\n","    img = img_to_array(frame)\n","    if flip == 1:\n","        img = np.fliplr(img)\n","    img_orig = img # Make a copy\n","    img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n","    h = img.shape[0]\n","    w = img.shape[1]\n","    img = np.reshape(img,[-1, h, w,1])\n","    img = resize(img, (1, 512, 512, 1), mode = 'constant', preserve_range = True)\n","    img = img/255.0\n","    \n","    pred_apo = model_apo.predict(img)\n","    pred_apo_t = (pred_apo > apo_threshold).astype(np.uint8) \n","    pred_fasc = modelF.predict(img)\n","    pred_fasc_t = (pred_fasc > fasc_threshold).astype(np.uint8) \n","\n","    img = resize(img, (1, h, w,1))\n","    img = np.reshape(img, (h, w))\n","    pred_apo = resize(pred_apo, (1, h, w,1))\n","    pred_apo = np.reshape(pred_apo, (h, w))\n","    pred_apo_t = resize(pred_apo_t, (1, h, w,1))\n","    pred_apo_t = np.reshape(pred_apo_t, (h, w))\n","    pred_fasc = resize(pred_fasc, (1, h, w,1))\n","    pred_fasc = np.reshape(pred_fasc, (h, w))\n","    pred_fasc_t = resize(pred_fasc_t, (1, h, w,1))\n","    pred_fasc_t = np.reshape(pred_fasc_t, (h, w))\n","    \n","    #############################################\n","    \n","    # COMPUTE CONTOURS TO IDENTIFY THE APONEUROSES\n","    _, thresh = cv2.threshold(pred_apo_t, 0, 255, cv2.THRESH_BINARY) # Or set lower threshold above and use pred_ind_t here\n","    thresh = thresh.astype('uint8')\n","    contours, hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n","    \n","#     contours_re = []\n","#     for contour in contours: # Remove any contours that are very small\n","#         if len(contour) > 600:\n","#             contours_re.append(contour)\n","    contours = [i for i in contours if len(i) > 600]\n","#     contours = contours_re\n","    contours,_ = sort_contours(contours) # Sort contours from top to bottom\n","    \n","#     mask_apo = np.zeros(thresh.shape,np.uint8)\n","    contours_re2 = []\n","    for contour in contours:\n","#         cv2.drawContours(mask_apo,[contour],0,255,-1)\n","        pts = list(contour)\n","        ptsT = sorted(pts, key=lambda k: [k[0][0], k[0][1]]) # Sort each contour based on x values\n","        allx = []\n","        ally = []\n","        for aa in range(0,len(ptsT)):\n","            allx.append(ptsT[aa][0,0])\n","            ally.append(ptsT[aa][0,1])\n","        app = np.array(list(zip(allx,ally)))\n","        contours_re2.append(app)\n","\n","    # Merge nearby contours\n","#     countU = 0\n","    xs1 = []\n","    xs2 = []\n","    ys1 = []\n","    ys2 = []\n","    maskT = np.zeros(thresh.shape,np.uint8)\n","    for cnt in contours_re2:\n","        ys1.append(cnt[0][1])\n","        ys2.append(cnt[-1][1])\n","        xs1.append(cnt[0][0])\n","        xs2.append(cnt[-1][0])\n","        cv2.drawContours(maskT,[cnt],0,255,-1)\n","\n","    for countU in range(0,len(contours_re2)-1):\n","        if xs1[countU+1] > xs2[countU]: # Check if x of contour2 is higher than x of contour 1\n","            y1 = ys2[countU]\n","            y2 = ys1[countU+1]\n","            if y1-10 <= y2 <= y1+10:\n","                m = np.vstack((contours_re2[countU], contours_re2[countU+1]))\n","                cv2.drawContours(maskT,[m],0,255,-1)\n","        countU += 1\n","\n","    maskT[maskT > 0] = 1\n","    skeleton = skeletonize(maskT).astype(np.uint8)\n","    kernel = np.ones((3,7), np.uint8) \n","    dilate = cv2.dilate(skeleton, kernel, iterations=15)\n","    erode = cv2.erode(dilate, kernel, iterations=10)\n","\n","    contoursE, hierarchy = cv2.findContours(erode, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n","    mask_apoE = np.zeros(thresh.shape,np.uint8)\n","#     contours_reE = []\n","#     for contour in contoursE: # Remove any contours that are very small\n","#         if len(contour) > 600:\n","#             contours_reE.append(contour)\n","#     contoursE = contours_reE\n","    contoursE = [i for i in contoursE if len(i) > 600]\n","    for contour in contoursE:\n","        cv2.drawContours(mask_apoE,[contour],0,255,-1)\n","    contoursE,_ = sort_contours(contoursE)\n","\n","    if len(contoursE) >= 2:\n","    \n","        # Get the x,y coordinates of the upper/lower edge of the 2 aponeuroses\n","        # NOTE: THE APONEUROSES DISPLAYED IN THE IMAGES ARE FROM mask_apoE, NOT THE CONTOUR EDGES BELOW\n","        upp_x,upp_y = contour_edge('B', contoursE[0])\n","        if contoursE[1][0,0,1] > (contoursE[0][0,0,1] + min_width):\n","            low_x,low_y = contour_edge('T', contoursE[1])\n","        else:\n","            low_x,low_y = contour_edge('T', contoursE[2])\n","\n","        upp_y_new = savgol_filter(upp_y, 81, 2) # window size, polynomial order\n","        low_y_new = savgol_filter(low_y, 81, 2)\n","\n","        # Make a binary mask to only include fascicles within the region between the 2 aponeuroses\n","        ex_mask = np.zeros(thresh.shape,np.uint8)\n","        ex_1 = 0\n","        ex_2 = np.minimum(len(low_x), len(upp_x))\n","        for ii in range(ex_1, ex_2):\n","            ymin = int(np.floor(upp_y_new[ii]))\n","            ymax = int(np.ceil(low_y_new[ii]))\n","\n","            ex_mask[:ymin, ii] = 0\n","            ex_mask[ymax:, ii] = 0\n","            ex_mask[ymin:ymax, ii] = 255\n","\n","        # Calculate slope of central portion of each aponeurosis & use this to compute muscle thickness\n","        Alist = list(set(upp_x).intersection(low_x))\n","        Alist = sorted(Alist)\n","        Alen = len(list(set(upp_x).intersection(low_x))) # How many values overlap between x-axes\n","        A1 = int(Alist[0] + (.33 * Alen))\n","        A2 = int(Alist[0] + (.66 * Alen)) \n","        mid = int((A2-A1) / 2 + A1)\n","        mindist = 10000\n","        upp_ind = np.where(upp_x==mid)\n","\n","        if upp_ind == len(upp_x):\n","            upp_ind -= 1\n","\n","        for val in range(A1, A2):\n","            if val >= len(low_x):\n","                continue\n","            else:\n","                dist = distFunc(upp_x[upp_ind], upp_y_new[upp_ind], low_x[val], low_y_new[val])\n","                if dist < mindist:\n","                    mindist = dist\n","\n","        # Add aponeuroses to a mask for display\n","        imgT = np.zeros((h,w,3), np.uint8)\n","\n","        # Compute functions to approximate the shape of the aponeuroses\n","        zUA = np.polyfit(upp_x, upp_y_new, 2) # 2nd order polynomial\n","        g = np.poly1d(zUA)\n","        zLA = np.polyfit(low_x, low_y_new, 2)\n","        h = np.poly1d(zLA)\n","\n","        mid = (low_x[-1]-low_x[0])/2 + low_x[0] # Find middle of the aponeurosis\n","        x1 = np.linspace(low_x[0]-700, low_x[-1]+700, 10000) # Extrapolate polynomial fits to either side of the mid-point\n","        y_UA = g(x1)\n","        y_LA = h(x1)\n","\n","        new_X_UA = np.linspace(mid-700, mid+700, 5000) # Extrapolate x,y data using f function\n","        new_Y_UA = g(new_X_UA)\n","        new_X_LA = np.linspace(mid-700, mid+700, 5000) # Extrapolate x,y data using f function\n","        new_Y_LA = h(new_X_LA)\n","\n","        #############################################\n","\n","        # COMPUTE CONTOURS TO IDENTIFY FASCICLES/FASCICLE ORIENTATION\n","        _, threshF = cv2.threshold(pred_fasc_t, 0, 255, cv2.THRESH_BINARY) \n","        threshF = threshF.astype('uint8')\n","        contoursF, hierarchy = cv2.findContours(threshF, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n","\n","        # Remove any contours that are very small\n","#         contours_re = []\n","        maskF = np.zeros(threshF.shape,np.uint8)\n","        for contour in contoursF: # Remove any contours that are very small\n","            if len(contour) > fasc_cont_thresh:\n","#                 contours_re.append(contour)\n","                cv2.drawContours(maskF,[contour],0,255,-1)       \n","\n","        # Only include fascicles within the region of the 2 aponeuroses  \n","        mask_Fi = maskF & ex_mask \n","        contoursF2, hierarchy = cv2.findContours(mask_Fi, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n","\n","        contoursF3 = []\n","        for contour in contoursF2:\n","            if len(contour) > fasc_cont_thresh:\n","                contoursF3.append(contour)\n","\n","        xs = []\n","        ys = []\n","        fas_ext = []\n","        fasc_l = []\n","        pennation = []\n","        x_low1 = []\n","        x_high1 = []\n","#         counter = 0\n","\n","        for cnt in contoursF3:\n","    #         x,y = contour_edge('B', contoursF2[counter])\n","            x,y = contour_edge('B', cnt)\n","            if len(x) == 0:\n","                continue\n","            z = np.polyfit(np.array(x), np.array(y), 1)\n","            f = np.poly1d(z)\n","            newX = np.linspace(-400, w+400, 5000) # Extrapolate x,y data using f function\n","            newY = f(newX)\n","\n","            # Find intersection between each fascicle and the aponeuroses.\n","            diffU = newY-new_Y_UA # Find intersections\n","            locU = np.where(diffU == min(diffU, key=abs))[0]\n","            diffL = newY-new_Y_LA\n","            locL = np.where(diffL == min(diffL, key=abs))[0]\n","\n","            coordsX = newX[int(locL):int(locU)]\n","            coordsY = newY[int(locL):int(locU)]\n","\n","            if locL >= 4950:\n","                Apoangle = int(np.arctan((new_Y_LA[locL-50]-new_Y_LA[locL-50])/(new_X_LA[locL]-new_X_LA[locL-50]))*180/np.pi)\n","            else:\n","                Apoangle = int(np.arctan((new_Y_LA[locL]-new_Y_LA[locL+50])/(new_X_LA[locL+50]-new_X_LA[locL]))*180/np.pi) # Angle relative to horizontal\n","            Apoangle = 90 + abs(Apoangle)\n","\n","            # Don't include fascicles that are completely outside of the field of view or\n","            # those that don't pass through central 1/3 of the image\n","    #         if np.sum(coordsX) > 0 and coordsX[-1] > 0 and coordsX[0] < np.maximum(upp_x[-1],low_x[-1]) and coordsX[-1] - coordsX[0] < w:\n","            if np.sum(coordsX) > 0 and coordsX[-1] > 0 and coordsX[0] < np.maximum(upp_x[-1],low_x[-1]) and Apoangle != float('nan'):\n","                FascAng = float(np.arctan((coordsX[0]-coordsX[-1])/(new_Y_LA[locL]-new_Y_UA[locU]))*180/np.pi)*-1\n","                ActualAng = Apoangle-FascAng\n","\n","                if ActualAng <= max_pennation and ActualAng >= min_pennation: # Don't include 'fascicles' beyond a range of pennation angles\n","                    length1 = np.sqrt((newX[locU] - newX[locL])**2 + (y_UA[locU] - y_LA[locL])**2)\n","                    fasc_l.append(length1[0]) # Calculate fascicle length\n","                    pennation.append(Apoangle-FascAng)\n","                    x_low1.append(coordsX[0].astype('int32'))\n","                    x_high1.append(coordsX[-1].astype('int32'))\n","                    coords = np.array(list(zip(coordsX.astype('int32'), coordsY.astype('int32'))))\n","                    cv2.polylines(imgT, [coords], False, (20, 15, 200), 3)\n","#             counter += 1 \n","\n","        # Store the results for each frame and normalise using scale factor (if calibration was done above)\n","        try:\n","            midthick = mindist[0] # Muscle thickness\n","        except:\n","            midthick = mindist\n","\n","        if 'calibDist' in locals() and len(calibDist) >0:\n","            fasc_l = fasc_l / (calibDist[0]/10)\n","            midthick = midthick / (calibDist[0]/10)\n","            \n","    else:\n","        fasc_l = []\n","        pennation = []\n","        x_low1 = []\n","        x_high1 = []\n","        imgT = np.zeros((h,w,3), np.uint8)\n","        fasc_l.append(float(\"nan\"))\n","        pennation.append(float(\"nan\"))\n","        x_low1.append(float(\"nan\"))\n","        x_high1.append(float(\"nan\"))\n","        midthick = float(\"nan\")\n","    \n","    fasc_l_all.append(fasc_l)\n","    pennation_all.append(pennation)\n","    x_lows_all.append(x_low1)\n","    x_highs_all.append(x_high1)\n","    \n","    thickness_all.append(midthick)\n","    \n","    #############################################\n","    \n","    # DISPLAY EACH PROCESSED IMAGE AND METRICS\n","    \n","    img_orig[mask_apoE > 0] = (235, 25, 42)\n","    \n","    comb = cv2.addWeighted(img_orig.astype(np.uint8), 1, imgT, 0.8, 0) \n","    vid_out.write(comb) # Write each image to video file\n","    # cv2.putText(comb, ('Frame: ' + str(a+1) + ' of ' + str(vid_len)), (125,350), cv2.FONT_HERSHEY_DUPLEX, 1, (249,249,249))\n","    # cv2.putText(comb, ('Pennation angle: ' + str('%.1f' % np.median(pennation_all[-1])) + ' deg'), (125,410), cv2.FONT_HERSHEY_DUPLEX, 1, (249,249,249))\n","    # if calibDist:\n","    #     cv2.putText(comb, ('Fascicle length: ' + str('%.2f' % np.median(fasc_l_all[-1]) + ' mm')), (125,380), cv2.FONT_HERSHEY_DUPLEX, 1, (249,249,249))\n","    #     cv2.putText(comb, ('Thickness at centre: ' + str('%.1f' % thickness_all[-1]) + ' mm'), (125,440), cv2.FONT_HERSHEY_DUPLEX, 1, (249,249,249))\n","    # else:\n","    #     cv2.putText(comb, ('Fascicle length: ' + str('%.2f' % np.median(fasc_l_all[-1]) + ' px')), (125,380), cv2.FONT_HERSHEY_DUPLEX, 1, (249,249,249))\n","    #     cv2.putText(comb, ('Thickness at centre: ' + str('%.1f' % thickness_all[-1]) + ' px'), (125,440), cv2.FONT_HERSHEY_DUPLEX, 1, (249,249,249))\n","    \n","    # cv2_imshow(comb)\n","\n","cap.release()\n","vid_out.release()\n","cv2.destroyAllWindows() "],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"kC7LltICZMho","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"scrolled":true,"id":"ZbG-cuezZMhq","colab_type":"text"},"source":["## Prepare data and export to excel file (this can also be found from the 'analysedVideos' folder)"]},{"cell_type":"code","metadata":{"id":"Kh9vxRSaZMhr","colab_type":"code","colab":{}},"source":["fl = np.zeros([len(fasc_l_all),len(max(fasc_l_all, key = lambda x: len(x)))])\n","pe = np.zeros([len(pennation_all),len(max(pennation_all, key = lambda x: len(x)))])\n","xl = np.zeros([len(x_lows_all),len(max(x_lows_all, key = lambda x: len(x)))])\n","xh = np.zeros([len(x_highs_all),len(max(x_highs_all, key = lambda x: len(x)))])\n","\n","for i,j in enumerate(fasc_l_all):\n","    fl[i][0:len(j)] = j\n","fl[fl==0] = np.nan\n","for i,j in enumerate(pennation_all):\n","    pe[i][0:len(j)] = j\n","pe[pe==0] = np.nan\n","for i,j in enumerate(x_lows_all):\n","    xl[i][0:len(j)] = j\n","xl[xl==0] = np.nan\n","for i,j in enumerate(x_highs_all):\n","    xh[i][0:len(j)] = j\n","xh[xh==0] = np.nan\n","\n","df1 = pd.DataFrame(data=fl)\n","df2 = pd.DataFrame(data=pe)\n","df3 = pd.DataFrame(data=xl)\n","df4 = pd.DataFrame(data=xh)\n","df5 = pd.DataFrame(data=thickness_all)\n","\n","# Create a Pandas Excel writer and your xlsx filename (same as the video filename)\n","writer = ExcelWriter('./analysedVideos/' + vpath[indices[-1]+1:dots[-1]] + '.xlsx')\n","\n","# Write each dataframe to a different worksheet.\n","df1.to_excel(writer, sheet_name='Fasc_length')\n","df2.to_excel(writer, sheet_name='Pennation')\n","df3.to_excel(writer, sheet_name='X_low')\n","df4.to_excel(writer, sheet_name='X_high')\n","df5.to_excel(writer, sheet_name='Thickness')\n","\n","# Close the Pandas Excel writer and output the Excel file\n","writer.save()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"bAmrVVvoZMht","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]}]}