--- a
+++ b/CaraNet/lib/axial_atten.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Tue Aug 10 17:17:13 2021
+
+@author: angelou
+"""
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from lib.conv_layer import Conv
+from lib.self_attention import self_attn
+import math
+
+
+class AA_kernel(nn.Module):
+    def __init__(self, in_channel, out_channel):
+        super(AA_kernel, self).__init__()
+        self.conv0 = Conv(in_channel, out_channel, kSize=1,stride=1,padding=0)
+        self.conv1 = Conv(out_channel, out_channel, kSize=(3, 3),stride = 1, padding=1)
+        self.Hattn = self_attn(out_channel, mode='h')
+        self.Wattn = self_attn(out_channel, mode='w')
+
+    def forward(self, x):
+        x = self.conv0(x)
+        x = self.conv1(x)
+
+        Hx = self.Hattn(x)
+        Wx = self.Wattn(Hx)
+
+        return Wx
\ No newline at end of file