diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..0b4e70380641c236a60123a97157c2c574f061d3
--- /dev/null
+++ b/README.md
@@ -0,0 +1,14 @@
+# camera calbration的结果必须转换为struct才能存
+
+# 必须通过合适的编译,得到/libdarknet.so 才能用python调用
+
+
+To test darknet_images.py
+```
+python ./*darknet_images.py --input ~/farmbot/img --weights ~/farmbot/weights/yolov3-vattenhallen_best.weights --dont_show --ext_output --save_labels --config_file ~/farmbot/cfg/yolov3-vattenhallen-test.cfg --data_file ~/farmbot/data/vattenhallen.data
+```
+Default values are used for the rest. 
+
+save label去了哪里? 存到了和img同一个路径下 同名.txt文件,所以可以给一folder的图片同时检测
+
+最好修改一下save label的地址,单独放一个folder
\ No newline at end of file
diff --git a/cfg/yolov3-vattenhallen-test.cfg b/cfg/yolov3-vattenhallen-test.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..f8befdce110671f0a54acb69d4fb847fee13ea73
--- /dev/null
+++ b/cfg/yolov3-vattenhallen-test.cfg
@@ -0,0 +1,785 @@
+[net]
+# Testing
+batch=1
+subdivisions=1
+# Training
+# batch=64
+# subdivisions=32
+width=416
+height=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 14000
+policy=steps
+steps=11200,12600
+scales=.1,.1
+
+
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+######################
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=36
+activation=linear
+
+[yolo]
+mask = 6,7,8
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=7
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 61
+
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=36
+activation=linear
+
+[yolo]
+mask = 3,4,5
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=7
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 36
+
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=36
+activation=linear
+
+[yolo]
+mask = 0,1,2
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=7
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
diff --git a/cfg/yolov3-vattenhallen.cfg b/cfg/yolov3-vattenhallen.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..28479292ff41e822800e9c8982557d02b9af4ff8
--- /dev/null
+++ b/cfg/yolov3-vattenhallen.cfg
@@ -0,0 +1,785 @@
+[net]
+# Testing
+# batch=1
+# subdivisions=1
+# Training
+batch=64
+subdivisions=32
+width=416
+height=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 14000
+policy=steps
+steps=11200,12600
+scales=.1,.1
+
+
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+######################
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=36
+activation=linear
+
+[yolo]
+mask = 6,7,8
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=7
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 61
+
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=36
+activation=linear
+
+[yolo]
+mask = 3,4,5
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=7
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 36
+
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=36
+activation=linear
+
+[yolo]
+mask = 0,1,2
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=7
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
diff --git a/cfg/yolov3-veges-test.cfg b/cfg/yolov3-veges-test.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..241b9602385208d2ad1decb839834fe164936d1c
--- /dev/null
+++ b/cfg/yolov3-veges-test.cfg
@@ -0,0 +1,785 @@
+[net]
+# Testing
+ batch=1
+ subdivisions=1
+# Training
+# batch=64
+# subdivisions=32
+width=416
+height=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 20000
+policy=steps
+steps=16000,18000
+scales=.1,.1
+
+
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+######################
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=45
+activation=linear
+
+[yolo]
+mask = 6,7,8
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=10
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 61
+
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=45
+activation=linear
+
+[yolo]
+mask = 3,4,5
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=10
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 36
+
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=45
+activation=linear
+
+[yolo]
+mask = 0,1,2
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=10
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
diff --git a/cfg/yolov3-veges.cfg b/cfg/yolov3-veges.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..393efb60c6c81192bf6c6abd4c0421631a667314
--- /dev/null
+++ b/cfg/yolov3-veges.cfg
@@ -0,0 +1,785 @@
+[net]
+# Testing
+# batch=1
+# subdivisions=1
+# Training
+batch=64
+subdivisions=32
+width=416
+height=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 20000
+policy=steps
+steps=16000,18000
+scales=.1,.1
+
+
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+######################
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=45
+activation=linear
+
+[yolo]
+mask = 6,7,8
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=10
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 61
+
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=45
+activation=linear
+
+[yolo]
+mask = 3,4,5
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=10
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 36
+
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=45
+activation=linear
+
+[yolo]
+mask = 0,1,2
+anchors = 10,13,  16,30,  33,23,  30,61,  62,45,  59,119,  116,90,  156,198,  373,326
+classes=10
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
diff --git a/data/vattenhallen.data b/data/vattenhallen.data
new file mode 100644
index 0000000000000000000000000000000000000000..f6812d76178a9ea3c20b6fa7cc7c65e627a710b8
--- /dev/null
+++ b/data/vattenhallen.data
@@ -0,0 +1,6 @@
+classes= 7
+train  = ../dataset/train.list
+valid  = ../dataset/test.list
+names = /home/xzleo/farmbot/dataset/classes.txt
+backup = backup
+
diff --git a/data/veges.data b/data/veges.data
new file mode 100644
index 0000000000000000000000000000000000000000..174c19ac27ce2c9a824c158610674924055be803
--- /dev/null
+++ b/data/veges.data
@@ -0,0 +1,6 @@
+classes= 10
+train  = /home/xzleo/farmbot/dataset/train.txt
+valid  = /home/xzleo/farmbot/dataset/test.txt
+names = /home/xzleo/farmbot/dataset/classes.txt
+backup = backup
+
diff --git a/dataset/WIN_20210929_16_08_31_Pro.jpg:Zone.Identifier b/dataset/WIN_20210929_16_08_31_Pro.jpg:Zone.Identifier
new file mode 100644
index 0000000000000000000000000000000000000000..744d15fb2c7e0460223a0f3e635f4837382693bc
--- /dev/null
+++ b/dataset/WIN_20210929_16_08_31_Pro.jpg:Zone.Identifier
@@ -0,0 +1,3 @@
+[ZoneTransfer]
+LastWriterPackageFamilyName=Microsoft.WindowsCamera_8wekyb3d8bbwe
+ZoneId=3
diff --git a/dataset/WIN_20210929_16_14_16_Pro.jpg:Zone.Identifier b/dataset/WIN_20210929_16_14_16_Pro.jpg:Zone.Identifier
new file mode 100644
index 0000000000000000000000000000000000000000..744d15fb2c7e0460223a0f3e635f4837382693bc
--- /dev/null
+++ b/dataset/WIN_20210929_16_14_16_Pro.jpg:Zone.Identifier
@@ -0,0 +1,3 @@
+[ZoneTransfer]
+LastWriterPackageFamilyName=Microsoft.WindowsCamera_8wekyb3d8bbwe
+ZoneId=3
diff --git a/dataset/classes.txt b/dataset/classes.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1a6be544e489fe0952b260052802f2a27feb4f89
--- /dev/null
+++ b/dataset/classes.txt
@@ -0,0 +1,7 @@
+tomato
+mushroom
+potato
+carrot
+beetroot
+zucchini
+hand
diff --git a/dataset/test.list b/dataset/test.list
new file mode 100644
index 0000000000000000000000000000000000000000..5dbdcbf603ee7592570b09af9511228485fdc947
--- /dev/null
+++ b/dataset/test.list
@@ -0,0 +1,80 @@
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_22_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_39_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_41_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_40_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_44_41_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_27_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_05_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_02_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_08_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_22_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_58_31_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_55_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_54_31_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_22_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_06_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_20_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_41_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_04_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_23_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_41_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_05_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_00_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_09_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_19_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_22_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_28_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_06_31_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_05_41_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_30_31_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_07_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_29_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_20_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_05_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_40_54_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_39_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_21_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_40_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_40_41_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_29_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_07_37_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_59_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_42_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_16_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_24_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_24_37_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_43_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_05_41_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_41_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_41_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_08_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_08_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_07_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_40_37_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_04_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_07_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_58_43_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_08_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_54_54_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_17_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_29_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_39_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_42_25_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_43_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_30_15_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_39_31_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_23_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_23_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_19_37_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_04_31_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_41_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_24_10_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_39_35_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_05_37_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_06_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_20_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_16_06_27_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_15_22_37_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_54_45_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_18_01_55_Pro.jpg
+/export/work/ziliang/vattenhallen/test/PNGimages/WIN_20210807_17_58_27_Pro.jpg
diff --git a/dataset/test.txt b/dataset/test.txt
new file mode 100644
index 0000000000000000000000000000000000000000..c8383318bf2c81161e43961eca194b9714f45506
--- /dev/null
+++ b/dataset/test.txt
@@ -0,0 +1,21 @@
+/export/work/ziliang/veges/test/PNGimages/100_d.png
+/export/work/ziliang/veges/test/PNGimages/10_a.png
+/export/work/ziliang/veges/test/PNGimages/120_e.png
+/export/work/ziliang/veges/test/PNGimages/131_e.png
+/export/work/ziliang/veges/test/PNGimages/140_f.png
+/export/work/ziliang/veges/test/PNGimages/155_f.png
+/export/work/ziliang/veges/test/PNGimages/160_g.png
+/export/work/ziliang/veges/test/PNGimages/172_g.png
+/export/work/ziliang/veges/test/PNGimages/185_h.png
+/export/work/ziliang/veges/test/PNGimages/206_h.png
+/export/work/ziliang/veges/test/PNGimages/222_i.png
+/export/work/ziliang/veges/test/PNGimages/231_i.png
+/export/work/ziliang/veges/test/PNGimages/242_j.png
+/export/work/ziliang/veges/test/PNGimages/250_j.png
+/export/work/ziliang/veges/test/PNGimages/25_b.png
+/export/work/ziliang/veges/test/PNGimages/261_j.png
+/export/work/ziliang/veges/test/PNGimages/42_b.png
+/export/work/ziliang/veges/test/PNGimages/4_a.png
+/export/work/ziliang/veges/test/PNGimages/53_c.png
+/export/work/ziliang/veges/test/PNGimages/65_c.png
+/export/work/ziliang/veges/test/PNGimages/79_d.png
diff --git a/dataset/train.list b/dataset/train.list
new file mode 100644
index 0000000000000000000000000000000000000000..c80870a323fd82857ce4f25e0f773381dbf5f5d3
--- /dev/null
+++ b/dataset/train.list
@@ -0,0 +1,317 @@
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_25_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_14_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_20_38_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_30_24_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_44_32_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_01_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_09_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_58_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_21_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_21_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_02_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_24_28_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_24_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_36_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_42_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_03_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_20_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_23_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_32_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_05_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_16_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_27_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_27_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_05_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_07_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_20_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_19_51_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_23_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_25_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_21_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_49_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_04_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_04_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_04_03_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_23_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_36_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_30_05_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_04_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_54_49_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_00_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_05_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_24_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_18_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_42_14_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_44_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_46_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_59_09_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_42_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_00_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_09_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_24_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_44_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_21_30_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_45_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_00_49_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_36_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_26_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_58_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_25_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_07_04_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_53_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_04_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_21_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_45_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_10_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_46_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_30_20_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_33_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_18_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_04_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_19_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_26_46_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_25_34_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_44_51_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_36_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_32_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_51_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_49_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_21_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_20_26_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_18_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_24_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_44_23_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_24_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_15_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_46_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_39_38_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_42_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_30_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_00_18_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_05_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_59_20_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_19_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_26_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_24_42_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_16_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_02_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_33_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_20_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_03_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_02_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_28_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_53_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_42_05_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_23_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_42_21_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_04_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_01_40_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_18_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_26_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_46_14_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_58_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_16_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_20_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_19_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_07_28_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_23_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_24_18_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_34_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_36_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_24_03_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_33_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_58_00_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_26_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_47_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_07_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_24_38_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_54_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_24_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_26_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_42_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_03_51_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_21_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_24_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_56_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_54_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_03_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_24_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_32_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_30_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_33_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_38_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_34_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_42_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_23_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_29_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_20_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_33_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_08_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_21_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_54_40_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_07_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_20_49_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_26_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_18_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_38_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_25_49_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_05_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_00_33_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_05_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_07_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_40_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_47_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_24_21_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_13_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_39_22_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_27_09_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_02_28_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_04_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_53_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_41_59_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_41_23_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_00_46_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_09_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_16_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_30_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_57_47_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_17_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_43_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_05_11_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_47_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_29_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_30_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_02_48_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_08_53_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_17_55_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_05_39_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_44_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_21_03_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_01_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_23_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_19_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_06_06_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_40_30_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_56_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_42_57_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_28_26_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_01_50_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_18_05_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_26_58_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_09_09_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_22_52_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_22_08_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_24_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_26_12_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_18_05_23_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_16_42_02_Pro.jpg
+/export/work/ziliang/vattenhallen/train/PNGimages/WIN_20210807_15_45_47_Pro.jpg
diff --git a/dataset/train.txt b/dataset/train.txt
new file mode 100644
index 0000000000000000000000000000000000000000..c7df570cd87b4c5eab874c7da150ec4143dc0583
--- /dev/null
+++ b/dataset/train.txt
@@ -0,0 +1,212 @@
+/export/work/ziliang/veges/train/PNGimages/0_a.png
+/export/work/ziliang/veges/train/PNGimages/101_d.png
+/export/work/ziliang/veges/train/PNGimages/102_d.png
+/export/work/ziliang/veges/train/PNGimages/104_e.png
+/export/work/ziliang/veges/train/PNGimages/105_e.png
+/export/work/ziliang/veges/train/PNGimages/106_e.png
+/export/work/ziliang/veges/train/PNGimages/107_e.png
+/export/work/ziliang/veges/train/PNGimages/108_e.png
+/export/work/ziliang/veges/train/PNGimages/109_e.png
+/export/work/ziliang/veges/train/PNGimages/110_e.png
+/export/work/ziliang/veges/train/PNGimages/111_e.png
+/export/work/ziliang/veges/train/PNGimages/112_e.png
+/export/work/ziliang/veges/train/PNGimages/113_e.png
+/export/work/ziliang/veges/train/PNGimages/114_e.png
+/export/work/ziliang/veges/train/PNGimages/115_e.png
+/export/work/ziliang/veges/train/PNGimages/116_e.png
+/export/work/ziliang/veges/train/PNGimages/117_e.png
+/export/work/ziliang/veges/train/PNGimages/118_e.png
+/export/work/ziliang/veges/train/PNGimages/119_e.png
+/export/work/ziliang/veges/train/PNGimages/11_a.png
+/export/work/ziliang/veges/train/PNGimages/121_e.png
+/export/work/ziliang/veges/train/PNGimages/123_e.png
+/export/work/ziliang/veges/train/PNGimages/124_e.png
+/export/work/ziliang/veges/train/PNGimages/125_e.png
+/export/work/ziliang/veges/train/PNGimages/127_e.png
+/export/work/ziliang/veges/train/PNGimages/128_e.png
+/export/work/ziliang/veges/train/PNGimages/129_e.png
+/export/work/ziliang/veges/train/PNGimages/12_a.png
+/export/work/ziliang/veges/train/PNGimages/133_f.png
+/export/work/ziliang/veges/train/PNGimages/134_f.png
+/export/work/ziliang/veges/train/PNGimages/135_f.png
+/export/work/ziliang/veges/train/PNGimages/136_f.png
+/export/work/ziliang/veges/train/PNGimages/137_f.png
+/export/work/ziliang/veges/train/PNGimages/138_f.png
+/export/work/ziliang/veges/train/PNGimages/139_f.png
+/export/work/ziliang/veges/train/PNGimages/141_f.png
+/export/work/ziliang/veges/train/PNGimages/143_f.png
+/export/work/ziliang/veges/train/PNGimages/144_f.png
+/export/work/ziliang/veges/train/PNGimages/145_f.png
+/export/work/ziliang/veges/train/PNGimages/146_f.png
+/export/work/ziliang/veges/train/PNGimages/147_f.png
+/export/work/ziliang/veges/train/PNGimages/148_f.png
+/export/work/ziliang/veges/train/PNGimages/149_f.png
+/export/work/ziliang/veges/train/PNGimages/150_f.png
+/export/work/ziliang/veges/train/PNGimages/151_f.png
+/export/work/ziliang/veges/train/PNGimages/152_f.png
+/export/work/ziliang/veges/train/PNGimages/153_f.png
+/export/work/ziliang/veges/train/PNGimages/154_f.png
+/export/work/ziliang/veges/train/PNGimages/156_f.png
+/export/work/ziliang/veges/train/PNGimages/157_f.png
+/export/work/ziliang/veges/train/PNGimages/158_g.png
+/export/work/ziliang/veges/train/PNGimages/159_g.png
+/export/work/ziliang/veges/train/PNGimages/15_a.png
+/export/work/ziliang/veges/train/PNGimages/161_g.png
+/export/work/ziliang/veges/train/PNGimages/162_g.png
+/export/work/ziliang/veges/train/PNGimages/163_g.png
+/export/work/ziliang/veges/train/PNGimages/164_g.png
+/export/work/ziliang/veges/train/PNGimages/165_g.png
+/export/work/ziliang/veges/train/PNGimages/166_g.png
+/export/work/ziliang/veges/train/PNGimages/168_g.png
+/export/work/ziliang/veges/train/PNGimages/169_g.png
+/export/work/ziliang/veges/train/PNGimages/16_a.png
+/export/work/ziliang/veges/train/PNGimages/170_g.png
+/export/work/ziliang/veges/train/PNGimages/171_g.png
+/export/work/ziliang/veges/train/PNGimages/173_g.png
+/export/work/ziliang/veges/train/PNGimages/174_g.png
+/export/work/ziliang/veges/train/PNGimages/175_g.png
+/export/work/ziliang/veges/train/PNGimages/176_g.png
+/export/work/ziliang/veges/train/PNGimages/177_g.png
+/export/work/ziliang/veges/train/PNGimages/178_g.png
+/export/work/ziliang/veges/train/PNGimages/179_g.png
+/export/work/ziliang/veges/train/PNGimages/17_a.png
+/export/work/ziliang/veges/train/PNGimages/180_g.png
+/export/work/ziliang/veges/train/PNGimages/181_g.png
+/export/work/ziliang/veges/train/PNGimages/182_h.png
+/export/work/ziliang/veges/train/PNGimages/183_h.png
+/export/work/ziliang/veges/train/PNGimages/184_h.png
+/export/work/ziliang/veges/train/PNGimages/186_h.png
+/export/work/ziliang/veges/train/PNGimages/187_h.png
+/export/work/ziliang/veges/train/PNGimages/188_h.png
+/export/work/ziliang/veges/train/PNGimages/189_h.png
+/export/work/ziliang/veges/train/PNGimages/18_a.png
+/export/work/ziliang/veges/train/PNGimages/191_h.png
+/export/work/ziliang/veges/train/PNGimages/192_h.png
+/export/work/ziliang/veges/train/PNGimages/193_h.png
+/export/work/ziliang/veges/train/PNGimages/194_h.png
+/export/work/ziliang/veges/train/PNGimages/195_h.png
+/export/work/ziliang/veges/train/PNGimages/196_h.png
+/export/work/ziliang/veges/train/PNGimages/197_h.png
+/export/work/ziliang/veges/train/PNGimages/198_h.png
+/export/work/ziliang/veges/train/PNGimages/19_a.png
+/export/work/ziliang/veges/train/PNGimages/1_a.png
+/export/work/ziliang/veges/train/PNGimages/200_h.png
+/export/work/ziliang/veges/train/PNGimages/201_h.png
+/export/work/ziliang/veges/train/PNGimages/202_h.png
+/export/work/ziliang/veges/train/PNGimages/203_h.png
+/export/work/ziliang/veges/train/PNGimages/204_h.png
+/export/work/ziliang/veges/train/PNGimages/205_h.png
+/export/work/ziliang/veges/train/PNGimages/207_h.png
+/export/work/ziliang/veges/train/PNGimages/210_i.png
+/export/work/ziliang/veges/train/PNGimages/211_i.png
+/export/work/ziliang/veges/train/PNGimages/212_i.png
+/export/work/ziliang/veges/train/PNGimages/213_i.png
+/export/work/ziliang/veges/train/PNGimages/214_i.png
+/export/work/ziliang/veges/train/PNGimages/215_i.png
+/export/work/ziliang/veges/train/PNGimages/216_i.png
+/export/work/ziliang/veges/train/PNGimages/217_i.png
+/export/work/ziliang/veges/train/PNGimages/218_i.png
+/export/work/ziliang/veges/train/PNGimages/219_i.png
+/export/work/ziliang/veges/train/PNGimages/220_i.png
+/export/work/ziliang/veges/train/PNGimages/221_i.png
+/export/work/ziliang/veges/train/PNGimages/222_i.png
+/export/work/ziliang/veges/train/PNGimages/223_i.png
+/export/work/ziliang/veges/train/PNGimages/224_i.png
+/export/work/ziliang/veges/train/PNGimages/225_i.png
+/export/work/ziliang/veges/train/PNGimages/226_i.png
+/export/work/ziliang/veges/train/PNGimages/227_i.png
+/export/work/ziliang/veges/train/PNGimages/228_i.png
+/export/work/ziliang/veges/train/PNGimages/229_i.png
+/export/work/ziliang/veges/train/PNGimages/230_i.png
+/export/work/ziliang/veges/train/PNGimages/231_i.png
+/export/work/ziliang/veges/train/PNGimages/232_i.png
+/export/work/ziliang/veges/train/PNGimages/233_i.png
+/export/work/ziliang/veges/train/PNGimages/234_j.png
+/export/work/ziliang/veges/train/PNGimages/235_j.png
+/export/work/ziliang/veges/train/PNGimages/237_j.png
+/export/work/ziliang/veges/train/PNGimages/238_j.png
+/export/work/ziliang/veges/train/PNGimages/239_j.png
+/export/work/ziliang/veges/train/PNGimages/23_b.png
+/export/work/ziliang/veges/train/PNGimages/240_j.png
+/export/work/ziliang/veges/train/PNGimages/241_j.png
+/export/work/ziliang/veges/train/PNGimages/243_j.png
+/export/work/ziliang/veges/train/PNGimages/245_j.png
+/export/work/ziliang/veges/train/PNGimages/246_j.png
+/export/work/ziliang/veges/train/PNGimages/247_j.png
+/export/work/ziliang/veges/train/PNGimages/248_j.png
+/export/work/ziliang/veges/train/PNGimages/249_j.png
+/export/work/ziliang/veges/train/PNGimages/24_b.png
+/export/work/ziliang/veges/train/PNGimages/251_j.png
+/export/work/ziliang/veges/train/PNGimages/252_j.png
+/export/work/ziliang/veges/train/PNGimages/254_j.png
+/export/work/ziliang/veges/train/PNGimages/255_j.png
+/export/work/ziliang/veges/train/PNGimages/256_j.png
+/export/work/ziliang/veges/train/PNGimages/257_j.png
+/export/work/ziliang/veges/train/PNGimages/258_j.png
+/export/work/ziliang/veges/train/PNGimages/259_j.png
+/export/work/ziliang/veges/train/PNGimages/260_j.png
+/export/work/ziliang/veges/train/PNGimages/262_j.png
+/export/work/ziliang/veges/train/PNGimages/26_b.png
+/export/work/ziliang/veges/train/PNGimages/27_b.png
+/export/work/ziliang/veges/train/PNGimages/2_a.png
+/export/work/ziliang/veges/train/PNGimages/32_b.png
+/export/work/ziliang/veges/train/PNGimages/33_b.png
+/export/work/ziliang/veges/train/PNGimages/34_b.png
+/export/work/ziliang/veges/train/PNGimages/35_b.png
+/export/work/ziliang/veges/train/PNGimages/36_b.png
+/export/work/ziliang/veges/train/PNGimages/3_a.png
+/export/work/ziliang/veges/train/PNGimages/40_b.png
+/export/work/ziliang/veges/train/PNGimages/41_b.png
+/export/work/ziliang/veges/train/PNGimages/43_b.png
+/export/work/ziliang/veges/train/PNGimages/44_b.png
+/export/work/ziliang/veges/train/PNGimages/45_b.png
+/export/work/ziliang/veges/train/PNGimages/46_b.png
+/export/work/ziliang/veges/train/PNGimages/49_c.png
+/export/work/ziliang/veges/train/PNGimages/50_c.png
+/export/work/ziliang/veges/train/PNGimages/51_c.png
+/export/work/ziliang/veges/train/PNGimages/52_c.png
+/export/work/ziliang/veges/train/PNGimages/54_c.png
+/export/work/ziliang/veges/train/PNGimages/55_c.png
+/export/work/ziliang/veges/train/PNGimages/56_c.png
+/export/work/ziliang/veges/train/PNGimages/57_c.png
+/export/work/ziliang/veges/train/PNGimages/58_c.png
+/export/work/ziliang/veges/train/PNGimages/59_c.png
+/export/work/ziliang/veges/train/PNGimages/5_a.png
+/export/work/ziliang/veges/train/PNGimages/60_c.png
+/export/work/ziliang/veges/train/PNGimages/61_c.png
+/export/work/ziliang/veges/train/PNGimages/62_c.png
+/export/work/ziliang/veges/train/PNGimages/63_c.png
+/export/work/ziliang/veges/train/PNGimages/64_c.png
+/export/work/ziliang/veges/train/PNGimages/66_c.png
+/export/work/ziliang/veges/train/PNGimages/67_c.png
+/export/work/ziliang/veges/train/PNGimages/68_c.png
+/export/work/ziliang/veges/train/PNGimages/69_c.png
+/export/work/ziliang/veges/train/PNGimages/6_a.png
+/export/work/ziliang/veges/train/PNGimages/70_c.png
+/export/work/ziliang/veges/train/PNGimages/71_c.png
+/export/work/ziliang/veges/train/PNGimages/72_c.png
+/export/work/ziliang/veges/train/PNGimages/73_c.png
+/export/work/ziliang/veges/train/PNGimages/74_c.png
+/export/work/ziliang/veges/train/PNGimages/75_c.png
+/export/work/ziliang/veges/train/PNGimages/76_d.png
+/export/work/ziliang/veges/train/PNGimages/77_d.png
+/export/work/ziliang/veges/train/PNGimages/78_d.png
+/export/work/ziliang/veges/train/PNGimages/80_d.png
+/export/work/ziliang/veges/train/PNGimages/81_d.png
+/export/work/ziliang/veges/train/PNGimages/82_d.png
+/export/work/ziliang/veges/train/PNGimages/83_d.png
+/export/work/ziliang/veges/train/PNGimages/85_d.png
+/export/work/ziliang/veges/train/PNGimages/86_d.png
+/export/work/ziliang/veges/train/PNGimages/87_d.png
+/export/work/ziliang/veges/train/PNGimages/89_d.png
+/export/work/ziliang/veges/train/PNGimages/8_a.png
+/export/work/ziliang/veges/train/PNGimages/90_d.png
+/export/work/ziliang/veges/train/PNGimages/91_d.png
+/export/work/ziliang/veges/train/PNGimages/92_d.png
+/export/work/ziliang/veges/train/PNGimages/93_d.png
+/export/work/ziliang/veges/train/PNGimages/95_d.png
+/export/work/ziliang/veges/train/PNGimages/96_d.png
+/export/work/ziliang/veges/train/PNGimages/97_d.png
+/export/work/ziliang/veges/train/PNGimages/98_d.png
+/export/work/ziliang/veges/train/PNGimages/99_d.png
+/export/work/ziliang/veges/train/PNGimages/9_a.png
diff --git a/src/cal_location.py b/src/cal_location.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec7fcee3ab82c6fedd7493d80a1af2f8defd7ed9
--- /dev/null
+++ b/src/cal_location.py
@@ -0,0 +1,110 @@
+# load darknet weights to predict locations
+# weights 可以hardcode
+# 1. 研究清楚darknet的输出怎么读取!
+# 2. 根据相机内餐外参,换算出相机坐标系中目标的位置
+# 3. 根据每张图对应encoder的量,计算全局位置
+from argparse import ArgumentParser, Namespace
+from logging import basicConfig, DEBUG, INFO, error, getLogger
+from pathlib import Path
+from numpy import array, ndarray
+from scipy.io import loadmat
+from typing import Dict, List, Tuple, Iterable, Optional, Int
+
+
+_CAM_EXTENSIONS = 'mat'
+
+"""Logger for log file"""
+_LOG = getLogger(__name__)
+
+"""Data type for camera matrix"""
+CAM_MATRIX = ""
+
+"""Constant sweeping height"""
+SWEEP_Z = 0
+
+
+def load_camera(cam_path: Path) -> ndarray(shape=(3, 3)): # 这个type hint应该这么写吗?
+    '''
+    load the mat file that contains camera calibration result, read the intrinsic matrix of the camera
+    :param cam_path: path of the mat file 
+    :return intrinsic_matrix: K matrix of the camera
+    '''
+    if not cam_path.suffix.lstrip('.') == _CAM_EXTENSIONS:
+        _LOG.error('{} has an illegal extension'.format(cam_path))
+        return None
+
+    try:
+        data = loadmat(cam_path)
+    except FileNotFoundError:
+        _LOG.error(' No such file')
+        return None
+        
+    intrinsic_matrix = data['camera_no_distortion'][0, 0][11] 
+    _LOG.info('Load intrinsic_matrix of the camera {}'.format(intrinsic_matrix))
+    return intrinsic_matrix
+
+# def download_imgs():
+# 由Alex提供的包解决
+#     return
+
+def detect():
+
+def read_detect():
+    '''
+    也许需要读入一张图的所有目标的box
+    '''
+
+def read_camera_locations()-> Tuple[float, float, float]:
+    '''
+    Read the camera's global positions from a txt file,
+    every position has a corresponding image
+    '''
+    return (x_camera, y_camera, z_camera)
+
+def project(pixel_x: Int, pixel_y: Int, cam_matrix: )->Tuple[float, float]:
+    '''
+    Project image coordinate to floor coordinate in the camera coordinate system
+    '''
+    
+
+def cal_obj_location(inner_matrix) -> Tuple[float, float]:
+    '''
+    Input: camera location
+           box
+           Camera_matrix
+    Output: global location of a box
+    '''
+    x_camera, y_camera, z_camera = read_camera_locations()
+
+
+    return
+
+def paser():
+    return
+
+
+if __name__ == 'main':
+    parser = ArgumentParser()
+    parser.add_argument(
+        '-cam',
+        '--camera_matrix',
+        type=Path,
+        default='../static/camera_no_distortion.mat',
+        help='Path to mat file that contains intrinsic camera matrix K'
+    )
+    parser.add_argument(
+        '-l',
+        '--log',
+        type=Path,
+        default='../log/location.log',
+        help='Path to the log file'
+    )
+
+    parser.add_argument('-v', '--verbose', action='store_true', help='Verbose mode')
+    arguments = parser.parse_args()
+    if arguments.verbose:
+        basicConfig(filename=arguments.log, level=DEBUG)
+    else:
+        basicConfig(filename=arguments.log, level=INFO)
+
+    
\ No newline at end of file
diff --git a/src/client.py b/src/client.py
new file mode 100644
index 0000000000000000000000000000000000000000..884e96e0961ee907cc50d8d1d1ad354339270ce5
--- /dev/null
+++ b/src/client.py
@@ -0,0 +1,137 @@
+import paho.mqtt.client as mqtt
+import json
+import time
+from uuid import uuid4 # 通用唯一标识符 ( Universally Unique Identifier )
+import logging #日志模块
+
+# values over max (and under min) will be clipped
+MAX_X = 2400
+MAX_Y = 1200
+MIN_Z = -460  # TODO test this one!
+
+def coord(x, y, z):
+  return {"kind": "coordinate", "args": {"x": x, "y": y, "z": z}} # 返回json 嵌套对象
+
+def move_request(x, y, z):
+  return {"kind": "rpc_request",  # 返回 json对象,对象内含数组
+          "args": {"label": ""},
+          "body": [{"kind": "move_absolute",
+                    "args": {"location": coord(x, y, z),
+                             "offset": coord(0, 0, 0),
+                             "speed": 100}}]}
+
+def take_photo_request():
+  return {"kind": "rpc_request",
+          "args": {"label": ""}, #label空着是为了在blocking_request中填上uuid,唯一识别码
+          "body": [{"kind": "take_photo", "args": {}}]}
+
+def clip(v, min_v, max_v):
+  if v < min_v: return min_v
+  if v > max_v: return max_v
+  return v
+
+class FarmbotClient(object):
+
+  def __init__(self, device_id, token):
+
+    self.device_id = device_id
+    self.client = mqtt.Client() # 类元素继承了另一个对象
+    self.client.username_pw_set(self.device_id, token) #传入 用户名和密码
+    self.client.on_connect = self._on_connect  #???
+    self.client.on_message = self._on_message
+
+    logging.basicConfig(level=logging.DEBUG,
+                        format="%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s",
+                        filename='farmbot_client.log',
+                        filemode='a')
+    console = logging.StreamHandler()
+    console.setLevel(logging.INFO)
+    console.setFormatter(logging.Formatter("%(asctime)s\t%(message)s"))
+    logging.getLogger('').addHandler(console)
+
+    self.connected = False
+    self.client.connect("clever-octopus.rmq.cloudamqp.com", 1883, 60)  #前面的url要运行按README.md中request_token.py 后面俩是TCP Port, Websocket Port
+    self.client.loop_start()
+    # 初始化函数里就会连接到服务器上,所以每次实例化一个新的client时,就已经连上了
+
+
+  def shutdown(self):
+    self.client.disconnect()
+    self.client.loop_stop()
+
+  def move(self, x, y, z):
+    x = clip(x, 0, MAX_X)
+    y = clip(y, 0, MAX_Y)
+    z = clip(z, MIN_Z, 0)
+    status_ok = self._blocking_request(move_request(x, y, z)) # 发请求
+    logging.info("MOVE (%s,%s,%s) [%s]", x, y, z, status_ok) #存日志,包括执行了什么“move x y z +返回值 ”
+
+  def take_photo(self):
+    # TODO: is this enough? it's issue a request for the photo, but is the actual capture async?
+    status_ok = self._blocking_request(take_photo_request())
+    logging.info("TAKE_PHOTO [%s]", status_ok)
+
+  def _blocking_request(self, request, retries_remaining=3):
+    if retries_remaining==0:
+      logging.error("< blocking request [%s] OUT OF RETRIES", request) #尝试3次,然后在日志中记录错误
+      return False
+
+    self._wait_for_connection() #在哪定义的?
+
+    # assign a new uuid for this attempt
+    self.pending_uuid = str(uuid4())
+    request['args']['label'] = self.pending_uuid #接收move_request函数的json对象
+    logging.debug("> blocking request [%s] retries=%d", request, retries_remaining)
+
+    # send request off 发送请求
+    self.rpc_status = None
+    self.client.publish("bot/" + self.device_id + "/from_clients", json.dumps(request))
+
+    # wait for response
+    timeout_counter = 600  # ~1min 等待1s
+    while self.rpc_status is None:          #这个self.rpc_status 是应答的flag
+      time.sleep(0.1)
+      timeout_counter -= 1
+      if timeout_counter == 0:
+        logging.warn("< blocking request TIMEOUT [%s]", request) #时间到了,无应答
+        return self._blocking_request(request, retries_remaining-1)
+    self.pending_uuid = None
+
+    # if it's ok, we're done!
+    if self.rpc_status == 'rpc_ok':
+      logging.debug("< blocking request OK [%s]", request)
+      return True
+
+    # if it's not ok, wait a bit and retry
+    if self.rpc_status == 'rpc_error':
+      logging.warn("< blocking request ERROR [%s]", request)
+      time.sleep(1)
+      return self._blocking_request(request, retries_remaining-1)
+
+    # unexpected state (???)
+    msg = "unexpected rpc_status [%s]" % self.rpc_status
+    logging.error(msg)
+    raise Exception(msg)
+
+
+  def _wait_for_connection(self):
+    # TODO: better way to do all this async event driven rather than with polling :/
+    timeout_counter = 600  # ~1min
+    while not self.connected: #用一个self.connected判断连上了没有,若没连上,等待
+      time.sleep(0.1)
+      timeout_counter -= 1
+      if timeout_counter == 0:
+        raise Exception("unable to connect")
+
+  def _on_connect(self, client, userdata, flags, rc):
+    logging.debug("> _on_connect")
+    self.client.subscribe("bot/" + self.device_id + "/from_device")
+    self.connected = True
+    logging.debug("< _on_connect")
+
+  def _on_message(self, client, userdata, msg):
+    resp = json.loads(msg.payload.decode())
+    if resp['args']['label'] != 'ping':
+      logging.debug("> _on_message [%s] [%s]", msg.topic, resp)
+    if msg.topic.endswith("/from_device") and resp['args']['label'] == self.pending_uuid:
+      self.rpc_status = resp['kind']
diff --git a/src/darknet.py b/src/darknet.py
new file mode 100644
index 0000000000000000000000000000000000000000..7f9c4169e39e2e916313726fc740a7986eb47ece
--- /dev/null
+++ b/src/darknet.py
@@ -0,0 +1,340 @@
+#!/usr/bin/env python3
+
+"""
+Python 3 wrapper for identifying objects in images
+
+Running the script requires opencv-python to be installed (`pip install opencv-python`)
+Directly viewing or returning bounding-boxed images requires scikit-image to be installed (`pip install scikit-image`)
+Use pip3 instead of pip on some systems to be sure to install modules for python3
+"""
+
+from ctypes import *
+import math
+import random
+import os
+
+
+class BOX(Structure):
+    _fields_ = [("x", c_float),
+                ("y", c_float),
+                ("w", c_float),
+                ("h", c_float)]
+
+
+class DETECTION(Structure):
+    _fields_ = [("bbox", BOX),
+                ("classes", c_int),
+                ("best_class_idx", c_int),
+                ("prob", POINTER(c_float)),
+                ("mask", POINTER(c_float)),
+                ("objectness", c_float),
+                ("sort_class", c_int),
+                ("uc", POINTER(c_float)),
+                ("points", c_int),
+                ("embeddings", POINTER(c_float)),
+                ("embedding_size", c_int),
+                ("sim", c_float),
+                ("track_id", c_int)]
+
+class DETNUMPAIR(Structure):
+    _fields_ = [("num", c_int),
+                ("dets", POINTER(DETECTION))]
+
+
+class IMAGE(Structure):
+    _fields_ = [("w", c_int),
+                ("h", c_int),
+                ("c", c_int),
+                ("data", POINTER(c_float))]
+
+
+class METADATA(Structure):
+    _fields_ = [("classes", c_int),
+                ("names", POINTER(c_char_p))]
+
+
+def network_width(net):
+    return lib.network_width(net)
+
+
+def network_height(net):
+    return lib.network_height(net)
+
+
+def bbox2points(bbox):
+    """
+    From bounding box yolo format
+    to corner points cv2 rectangle
+    """
+    x, y, w, h = bbox
+    xmin = int(round(x - (w / 2)))
+    xmax = int(round(x + (w / 2)))
+    ymin = int(round(y - (h / 2)))
+    ymax = int(round(y + (h / 2)))
+    return xmin, ymin, xmax, ymax
+
+
+def class_colors(names):
+    """
+    Create a dict with one random BGR color for each
+    class name
+    """
+    return {name: (
+        random.randint(0, 255),
+        random.randint(0, 255),
+        random.randint(0, 255)) for name in names}
+
+
+def load_network(config_file, data_file, weights, batch_size=1):
+    """
+    load model description and weights from config files
+    args:
+        config_file (str): path to .cfg model file
+        data_file (str): path to .data model file
+        weights (str): path to weights
+    returns:
+        network: trained model
+        class_names
+        class_colors
+    """
+    network = load_net_custom(
+        config_file.encode("ascii"),
+        weights.encode("ascii"), 0, batch_size)
+    metadata = load_meta(data_file.encode("ascii"))
+    class_names = [metadata.names[i].decode("ascii") for i in range(metadata.classes)]
+    colors = class_colors(class_names)
+    return network, class_names, colors
+
+
+def print_detections(detections, coordinates=False):
+    print("\nObjects:")
+    for label, confidence, bbox in detections:
+        x, y, w, h = bbox
+        if coordinates:
+            print("{}: {}%    (left_x: {:.0f}   top_y:  {:.0f}   width:   {:.0f}   height:  {:.0f})".format(label, confidence, x, y, w, h))
+        else:
+            print("{}: {}%".format(label, confidence))
+
+
+def draw_boxes(detections, image, colors):
+    import cv2
+    for label, confidence, bbox in detections:
+        left, top, right, bottom = bbox2points(bbox)
+        cv2.rectangle(image, (left, top), (right, bottom), colors[label], 1)
+        cv2.putText(image, "{} [{:.2f}]".format(label, float(confidence)),
+                    (left, top - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
+                    colors[label], 2)
+    return image
+
+
+def decode_detection(detections):
+    decoded = []
+    for label, confidence, bbox in detections:
+        confidence = str(round(confidence * 100, 2))
+        decoded.append((str(label), confidence, bbox))
+    return decoded
+
+# https://www.pyimagesearch.com/2015/02/16/faster-non-maximum-suppression-python/
+# Malisiewicz et al.
+def non_max_suppression_fast(detections, overlap_thresh):
+    boxes = []
+    for detection in detections:
+        _, _, _, (x, y, w, h) = detection
+        x1 = x - w / 2
+        y1 = y - h / 2
+        x2 = x + w / 2
+        y2 = y + h / 2
+        boxes.append(np.array([x1, y1, x2, y2]))
+    boxes_array = np.array(boxes)
+
+    # initialize the list of picked indexes
+    pick = []
+    # grab the coordinates of the bounding boxes
+    x1 = boxes_array[:, 0]
+    y1 = boxes_array[:, 1]
+    x2 = boxes_array[:, 2]
+    y2 = boxes_array[:, 3]
+    # compute the area of the bounding boxes and sort the bounding
+    # boxes by the bottom-right y-coordinate of the bounding box
+    area = (x2 - x1 + 1) * (y2 - y1 + 1)
+    idxs = np.argsort(y2)
+    # keep looping while some indexes still remain in the indexes
+    # list
+    while len(idxs) > 0:
+        # grab the last index in the indexes list and add the
+        # index value to the list of picked indexes
+        last = len(idxs) - 1
+        i = idxs[last]
+        pick.append(i)
+        # find the largest (x, y) coordinates for the start of
+        # the bounding box and the smallest (x, y) coordinates
+        # for the end of the bounding box
+        xx1 = np.maximum(x1[i], x1[idxs[:last]])
+        yy1 = np.maximum(y1[i], y1[idxs[:last]])
+        xx2 = np.minimum(x2[i], x2[idxs[:last]])
+        yy2 = np.minimum(y2[i], y2[idxs[:last]])
+        # compute the width and height of the bounding box
+        w = np.maximum(0, xx2 - xx1 + 1)
+        h = np.maximum(0, yy2 - yy1 + 1)
+        # compute the ratio of overlap
+        overlap = (w * h) / area[idxs[:last]]
+        # delete all indexes from the index list that have
+        idxs = np.delete(idxs, np.concatenate(([last],
+                                               np.where(overlap > overlap_thresh)[0])))
+        # return only the bounding boxes that were picked using the
+        # integer data type
+    return [detections[i] for i in pick]
+
+def remove_negatives(detections, class_names, num):
+    """
+    Remove all classes with 0% confidence within the detection
+    """
+    predictions = []
+    for j in range(num):
+        for idx, name in enumerate(class_names):
+            if detections[j].prob[idx] > 0:
+                bbox = detections[j].bbox
+                bbox = (bbox.x, bbox.y, bbox.w, bbox.h)
+                predictions.append((name, detections[j].prob[idx], (bbox)))
+    return predictions
+
+
+def remove_negatives_faster(detections, class_names, num):
+    """
+    Faster version of remove_negatives (very useful when using yolo9000)
+    """
+    predictions = []
+    for j in range(num):
+        if detections[j].best_class_idx == -1:
+            continue
+        name = class_names[detections[j].best_class_idx]
+        bbox = detections[j].bbox
+        bbox = (bbox.x, bbox.y, bbox.w, bbox.h)
+        predictions.append((name, detections[j].prob[detections[j].best_class_idx], bbox))
+    return predictions
+
+
+def detect_image(network,  class_names, image, thresh=.5, hier_thresh=.5, nms=.45): #
+    """
+        Returns a list with highest confidence class and their bbox
+    """
+    pnum = pointer(c_int(0))
+    predict_image(network, image)  # image 需要什么类型
+    detections = get_network_boxes(network, image.w, image.h,
+                                   thresh, hier_thresh, None, 0, pnum, 0)
+    #print_detections(detections, coordinates=True)                 
+    num = pnum[0]
+    if nms:
+        do_nms_sort(detections, num, len(class_names), nms)
+    predictions = remove_negatives(detections, class_names, num)
+    predictions = decode_detection(predictions)
+    free_detections(detections, num)
+    return sorted(predictions, key=lambda x: x[1])
+
+
+if os.name == "posix":
+    cwd = os.path.abspath(os.path.join(os.getcwd(), ".."))
+    lib = CDLL(cwd + "/darknet/libdarknet.so", RTLD_GLOBAL)
+elif os.name == "nt":
+    cwd = os.path.dirname(__file__)
+    os.environ['PATH'] = cwd + ';' + os.environ['PATH']
+    lib = CDLL("darknet.dll", RTLD_GLOBAL)
+else:
+    print("Unsupported OS")
+    exit
+
+lib.network_width.argtypes = [c_void_p]
+lib.network_width.restype = c_int
+lib.network_height.argtypes = [c_void_p]
+lib.network_height.restype = c_int
+
+copy_image_from_bytes = lib.copy_image_from_bytes
+copy_image_from_bytes.argtypes = [IMAGE,c_char_p]
+
+predict = lib.network_predict_ptr
+predict.argtypes = [c_void_p, POINTER(c_float)]
+predict.restype = POINTER(c_float)
+
+set_gpu = lib.cuda_set_device
+init_cpu = lib.init_cpu
+
+make_image = lib.make_image
+make_image.argtypes = [c_int, c_int, c_int]
+make_image.restype = IMAGE
+
+get_network_boxes = lib.get_network_boxes
+get_network_boxes.argtypes = [c_void_p, c_int, c_int, c_float, c_float, POINTER(c_int), c_int, POINTER(c_int), c_int]
+get_network_boxes.restype = POINTER(DETECTION)
+
+make_network_boxes = lib.make_network_boxes
+make_network_boxes.argtypes = [c_void_p]
+make_network_boxes.restype = POINTER(DETECTION)
+
+free_detections = lib.free_detections
+free_detections.argtypes = [POINTER(DETECTION), c_int]
+
+free_batch_detections = lib.free_batch_detections
+free_batch_detections.argtypes = [POINTER(DETNUMPAIR), c_int]
+
+free_ptrs = lib.free_ptrs
+free_ptrs.argtypes = [POINTER(c_void_p), c_int]
+
+network_predict = lib.network_predict_ptr
+network_predict.argtypes = [c_void_p, POINTER(c_float)]
+
+reset_rnn = lib.reset_rnn
+reset_rnn.argtypes = [c_void_p]
+
+load_net = lib.load_network
+load_net.argtypes = [c_char_p, c_char_p, c_int]
+load_net.restype = c_void_p
+
+load_net_custom = lib.load_network_custom
+load_net_custom.argtypes = [c_char_p, c_char_p, c_int, c_int]
+load_net_custom.restype = c_void_p
+
+free_network_ptr = lib.free_network_ptr
+free_network_ptr.argtypes = [c_void_p]
+free_network_ptr.restype = c_void_p
+
+do_nms_obj = lib.do_nms_obj
+do_nms_obj.argtypes = [POINTER(DETECTION), c_int, c_int, c_float]
+
+do_nms_sort = lib.do_nms_sort
+do_nms_sort.argtypes = [POINTER(DETECTION), c_int, c_int, c_float]
+
+free_image = lib.free_image
+free_image.argtypes = [IMAGE]
+
+letterbox_image = lib.letterbox_image
+letterbox_image.argtypes = [IMAGE, c_int, c_int]
+letterbox_image.restype = IMAGE
+
+load_meta = lib.get_metadata
+lib.get_metadata.argtypes = [c_char_p]
+lib.get_metadata.restype = METADATA
+
+load_image = lib.load_image_color
+load_image.argtypes = [c_char_p, c_int, c_int]
+load_image.restype = IMAGE
+
+rgbgr_image = lib.rgbgr_image
+rgbgr_image.argtypes = [IMAGE]
+
+predict_image = lib.network_predict_image
+predict_image.argtypes = [c_void_p, IMAGE]
+predict_image.restype = POINTER(c_float)
+
+predict_image_letterbox = lib.network_predict_image_letterbox
+predict_image_letterbox.argtypes = [c_void_p, IMAGE]
+predict_image_letterbox.restype = POINTER(c_float)
+
+network_predict_batch = lib.network_predict_batch
+network_predict_batch.argtypes = [c_void_p, IMAGE, c_int, c_int, c_int,
+                                   c_float, c_float, POINTER(c_int), c_int, c_int]
+network_predict_batch.restype = POINTER(DETNUMPAIR)
+
+if __name__ == "__main__":
+    net = load_network("/home/xzleo/farmbot/darknet/cfg/yolov3-veges-test.cfg", "/home/xzleo/farmbot/darknet/data/veges.data", "/home/xzleo/farmbot/darknet/backup/yolov3-veges_best.weights")
+    img =  load_image(b"/home/xzleo/farmbot/dataset/2_a.png", 0, 0)
+    predictions = detect_image(net, img, thresh=.5, hier_thresh=.5, nms=.45)
\ No newline at end of file
diff --git a/src/darknet_images.py b/src/darknet_images.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6704418095209bd6401c53fe60f3417273f4f73
--- /dev/null
+++ b/src/darknet_images.py
@@ -0,0 +1,236 @@
+import argparse
+import os
+import glob
+import random
+import darknet  # darknet.py
+import time
+import cv2
+import numpy as np
+
+
+def parser():
+    parser = argparse.ArgumentParser(description="YOLO Object Detection")
+    parser.add_argument("--input", type=str, default="",
+                        help="image source. It can be a single image, a"
+                        "txt with paths to them, or a folder. Image valid"
+                        " formats are jpg, jpeg or png."
+                        "If no input is given, ")
+    parser.add_argument("--batch_size", default=1, type=int,
+                        help="number of images to be processed at the same time")
+    parser.add_argument("--weights", default="yolov4.weights",
+                        help="yolo weights path")
+    parser.add_argument("--dont_show", action='store_true',
+                        help="windown inference display. For headless systems")
+    parser.add_argument("--ext_output", action='store_true',
+                        help="display bbox coordinates of detected objects")
+    parser.add_argument("--save_labels", action='store_true',
+                        help="save detections bbox for each image in yolo format")
+    parser.add_argument("--config_file", default="./cfg/yolov4.cfg",
+                        help="path to config file")
+    parser.add_argument("--data_file", default="./cfg/coco.data",
+                        help="path to data file")
+    parser.add_argument("--thresh", type=float, default=.25,
+                        help="remove detections with lower confidence")
+    return parser.parse_args()
+
+
+def check_arguments_errors(args):
+    assert 0 < args.thresh < 1, "Threshold should be a float between zero and one (non-inclusive)"
+    if not os.path.exists(args.config_file):
+        raise(ValueError("Invalid config path {}".format(os.path.abspath(args.config_file))))
+    if not os.path.exists(args.weights):
+        raise(ValueError("Invalid weight path {}".format(os.path.abspath(args.weights))))
+    if not os.path.exists(args.data_file):
+        raise(ValueError("Invalid data file path {}".format(os.path.abspath(args.data_file))))
+    if args.input and not os.path.exists(args.input):
+        raise(ValueError("Invalid image path {}".format(os.path.abspath(args.input))))
+
+
+# def check_batch_shape(images, batch_size):
+#     """
+#         Image sizes should be the same width and height
+#     """
+#     shapes = [image.shape for image in images]
+#     if len(set(shapes)) > 1:
+#         raise ValueError("Images don't have same shape")
+#     if len(shapes) > batch_size:
+#         raise ValueError("Batch size higher than number of images")
+#     return shapes[0]
+
+
+def load_images(images_path):
+    """
+    If image path is given, return it directly
+    For txt file, read it and return each line as image path
+    In other case, it's a folder, return a list with names of each
+    jpg, jpeg and png file
+    """
+    input_path_extension = images_path.split('.')[-1]
+    if input_path_extension in ['jpg', 'jpeg', 'png']:
+        # single image
+        return [images_path]
+    elif input_path_extension == "txt":
+        with open(images_path, "r") as f:
+            return f.read().splitlines()
+    else:
+        # folders
+        return glob.glob(
+            os.path.join(images_path, "*.jpg")) + \
+            glob.glob(os.path.join(images_path, "*.png")) + \
+            glob.glob(os.path.join(images_path, "*.jpeg"))
+
+
+# def prepare_batch(images, network, channels=3):
+#     width = darknet.network_width(network)
+#     height = darknet.network_height(network)
+
+#     darknet_images = []
+#     for image in images:
+#         image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
+#         image_resized = cv2.resize(image_rgb, (width, height),
+#                                    interpolation=cv2.INTER_LINEAR)
+#         custom_image = image_resized.transpose(2, 0, 1)
+#         darknet_images.append(custom_image)
+
+#     batch_array = np.concatenate(darknet_images, axis=0)
+#     batch_array = np.ascontiguousarray(batch_array.flat, dtype=np.float32)/255.0
+#     darknet_images = batch_array.ctypes.data_as(darknet.POINTER(darknet.c_float))
+#     return darknet.IMAGE(width, height, channels, darknet_images)
+
+
+def image_detection(image_path, network, class_names, class_colors, thresh):
+    # Darknet doesn't accept numpy images.
+    # Create one with image we reuse for each detect
+    width = darknet.network_width(network)
+    height = darknet.network_height(network)
+    darknet_image = darknet.make_image(width, height, 3)
+
+    image = cv2.imread(image_path)
+    image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
+    image_resized = cv2.resize(image_rgb, (width, height),
+                               interpolation=cv2.INTER_LINEAR)
+
+    darknet.copy_image_from_bytes(darknet_image, image_resized.tobytes())
+    detections = darknet.detect_image(network, class_names, darknet_image, thresh=thresh)
+    darknet.free_image(darknet_image)
+    image = darknet.draw_boxes(detections, image_resized, class_colors)
+    return cv2.cvtColor(image, cv2.COLOR_BGR2RGB), detections
+
+
+# def batch_detection(network, images, class_names, class_colors,
+#                     thresh=0.25, hier_thresh=.5, nms=.45, batch_size=4):
+#     image_height, image_width, _ = check_batch_shape(images, batch_size)
+#     darknet_images = prepare_batch(images, network)
+#     batch_detections = darknet.network_predict_batch(network, darknet_images, batch_size, image_width,
+#                                                      image_height, thresh, hier_thresh, None, 0, 0)
+#     batch_predictions = []
+#     for idx in range(batch_size):
+#         num = batch_detections[idx].num
+#         detections = batch_detections[idx].dets
+#         if nms:
+#             darknet.do_nms_obj(detections, num, len(class_names), nms)
+#         predictions = darknet.remove_negatives(detections, class_names, num)
+#         images[idx] = darknet.draw_boxes(predictions, images[idx], class_colors)
+#         batch_predictions.append(predictions)
+#     darknet.free_batch_detections(batch_detections, batch_size)
+#     return images, batch_predictions
+
+
+# def image_classification(image, network, class_names):
+#     width = darknet.network_width(network)
+#     height = darknet.network_height(network)
+#     image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
+#     image_resized = cv2.resize(image_rgb, (width, height),
+#                                 interpolation=cv2.INTER_LINEAR)
+#     darknet_image = darknet.make_image(width, height, 3)
+#     darknet.copy_image_from_bytes(darknet_image, image_resized.tobytes())
+#     detections = darknet.predict_image(network, darknet_image)
+#     predictions = [(name, detections[idx]) for idx, name in enumerate(class_names)]
+#     darknet.free_image(darknet_image)
+#     return sorted(predictions, key=lambda x: -x[1])
+
+
+def convert2relative(image, bbox):
+    """
+    YOLO format use relative coordinates for annotation
+    """
+    x, y, w, h = bbox
+    height, width, _ = image.shape
+    return x/width, y/height, w/width, h/height
+
+
+def save_annotations(name, image, detections, class_names):
+    """
+    Files saved with image_name.txt and relative coordinates
+    """
+    file_name = os.path.splitext(name)[0] + ".txt"
+    with open(file_name, "w") as f:
+        for label, confidence, bbox in detections:
+            x, y, w, h = convert2relative(image, bbox)
+            label = class_names.index(label)
+            f.write("{} {:.4f} {:.4f} {:.4f} {:.4f} {:.4f}\n".format(label, x, y, w, h, float(confidence)))
+
+
+""" def batch_detection_example():
+    args = parser()
+    check_arguments_errors(args)
+    batch_size = 3
+    random.seed(3)  # deterministic bbox colors
+    network, class_names, class_colors = darknet.load_network(
+        args.config_file,
+        args.data_file,
+        args.weights,
+        batch_size=batch_size
+    )
+    image_names = ['data/horses.jpg', 'data/horses.jpg', 'data/eagle.jpg']
+    images = [cv2.imread(image) for image in image_names]
+    images, detections,  = batch_detection(network, images, class_names,
+                                           class_colors, batch_size=batch_size)
+    for name, image in zip(image_names, images):
+        cv2.imwrite(name.replace("data/", ""), image)
+    print(detections) """
+
+
+def main():
+    args = parser()
+    check_arguments_errors(args)
+
+    random.seed(3)  # deterministic bbox colors
+    network, class_names, class_colors = darknet.load_network(
+        args.config_file,
+        args.data_file,
+        args.weights,
+        batch_size=args.batch_size
+    )
+
+    images = load_images(args.input)
+
+    index = 0
+    while True:
+        # loop asking for new image paths if no list is given
+        if args.input:
+            if index >= len(images):
+                break
+            image_name = images[index]
+        else:
+            image_name = input("Enter Image Path: ")
+        prev_time = time.time()
+        image, detections = image_detection(
+            image_name, network, class_names, class_colors, args.thresh
+            )
+        if args.save_labels:
+            save_annotations(image_name, image, detections, class_names)
+        darknet.print_detections(detections, args.ext_output)
+        fps = int(1/(time.time() - prev_time))
+        print("FPS: {}".format(fps))
+        if not args.dont_show:
+            cv2.imshow('Inference', image)
+            if cv2.waitKey() & 0xFF == ord('q'):
+                break
+        index += 1
+
+
+if __name__ == "__main__":
+    # unconmment next line for an example of batch processing
+    # batch_detection_example()
+    main()
diff --git a/src/detect.py b/src/detect.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf52bed95fccca372b29250e63a9bdb7bf91e3a6
--- /dev/null
+++ b/src/detect.py
@@ -0,0 +1,116 @@
+'''
+load images taken by the camera, return bounding boxes
+'''
+import argparse
+import os
+import glob
+from pathlib import Path
+import random
+import darknet #darknet.py  
+import time
+import cv2
+import numpy as np
+from logging import basicConfig, DEBUG, INFO, error, getLogger
+
+# IMG_EXTENSION = ['jpg', 'jpeg', 'png']
+
+"""Logger for printing."""
+_LOG = getLogger(__name__)
+
+
+def load_images(images_path):
+    """
+    load all images in a folder for detection
+
+    :param images_path: the path folder
+    :return list of image paths
+    """
+    
+    return glob.glob(
+       os.path.join(images_path, "*.jpg")) + \
+       glob.glob(os.path.join(images_path, "*.png")) + \
+       glob.glob(os.path.join(images_path, "*.jpeg"))
+
+
+def image_detection(image_path, network, class_names, class_colors, thresh):
+    '''
+    Accept single image
+    '''
+    # Darknet doesn't accept numpy images.
+    # Create one with image we reuse for each detect
+    width = darknet.network_width(network)
+    height = darknet.network_height(network)
+    darknet_image = darknet.make_image(width, height, 3)
+
+    image = cv2.imread(image_path)
+    image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
+    image_resized = cv2.resize(image_rgb, (width, height),
+                               interpolation=cv2.INTER_LINEAR)
+
+    darknet.copy_image_from_bytes(darknet_image, image_resized.tobytes())
+    detections = darknet.detect_image(network, class_names, darknet_image, thresh=thresh)
+    darknet.free_image(darknet_image)
+    image = darknet.draw_boxes(detections, image_resized, class_colors)
+    return cv2.cvtColor(image, cv2.COLOR_BGR2RGB), detections
+
+
+def convert2relative(image, bbox):
+    """
+    YOLO format use relative coordinates for annotation
+    """
+    x, y, w, h = bbox
+    height, width, _ = image.shape
+    return x/width, y/height, w/width, h/height
+
+
+def save_annotations(name, image, detections, class_names) -> None:
+    """
+    Files saved with image_name.txt and relative coordinates
+    """
+    file_name = os.path.splitext(name)[0] + ".txt"
+    with open(file_name, "w") as f:
+        for label, confidence, bbox in detections:
+            x, y, w, h = convert2relative(image, bbox)
+            label = class_names.index(label)
+            f.write("{} {:.4f} {:.4f} {:.4f} {:.4f} {:.4f}\n".format(label, x, y, w, h, float(confidence)))
+    return None
+
+
+def detect(img_dir: Path, weight_path: Path, cfg_path: Path, data_path: Path, thresh: float =0.25, save_labels: bool =True) -> None:
+    """
+    main function for this script
+    """
+    random.seed(3)  # deterministic bbox colors
+    network, class_names, class_colors = darknet.load_network(
+        cfg_path,
+        data_path,
+        weight_path
+    )
+
+    images = load_images(img_dir)
+
+    num_img = len(images)
+
+    if num_img == 0:
+        _LOG.error("No images with legal extensions in assigned dir")
+        raise ValueError("No images, please sweep the bed again.")
+
+    index = 0
+    while index < num_img:
+        image_name = images[index] # search the folder for single images
+        prev_time = time.time()
+        # run yolo on a single img 
+        image, detections = image_detection(
+            image_name, network, class_names, class_colors, thresh
+            )
+        if save_labels:
+            save_annotations(image_name, image, detections, class_names) # 应该换个路径存
+        # darknet.print_detections(detections, true)
+        
+        fps = int(1/(time.time() - prev_time))
+        _LOG.debug("FPS: {}".format(fps))
+
+        index += 1
+    return None
+
+
diff --git a/src/load_cam.py b/src/load_cam.py
new file mode 100644
index 0000000000000000000000000000000000000000..afba1c352b12cce1a707f291f049d03050f3fa85
--- /dev/null
+++ b/src/load_cam.py
@@ -0,0 +1,11 @@
+'''
+load camera intrinsic matrix as .mat file
+'''
+import scipy.io as io
+
+data = io.loadmat('../static/camera_no_distortion.mat')
+print(data['camera_no_distortion'][0, 0][11]) # intrinsic matrix
+print(type(data['camera_no_distortion'][0, 0][11]))
+print(data['camera_no_distortion'][0, 0][11].shape)
+
+# 加上argparser
diff --git a/src/move.py b/src/move.py
new file mode 100644
index 0000000000000000000000000000000000000000..2da316ac1a001040fe16e08b282b81b65c519053
--- /dev/null
+++ b/src/move.py
@@ -0,0 +1,182 @@
+'''
+Author: Ziliang Xiong
+This script is for al the functions that drive Farmbot to Move, including:
+1. Taking Photos 2. Move to an assigned point (x, y, z)
+3. Sweep the planting bed 4. Grip a target
+'''
+
+from argparse import ArgumentParser
+from logging import getLogger
+from os import path, makedirs
+from time  import sleep, time
+from serial import Serial, PARITY_NONE, STOPBITS_ONE, EIGHTBITS 
+# from requests.api import delete
+from typing import List
+from pathlib import Path
+from logging import basicConfig, DEBUG, INFO, error, getLogger
+
+from datetime import timezone
+from dateutil.parser import parse
+from requests import get, delete
+
+import creds
+from client import FarmbotClient
+
+
+_SWEEEP_HEIGHT = -200
+
+Logger = getLogger(__name__)
+
+class Opts:
+    def __init__(self, min_x, max_x, min_y, max_y, delta, offset, flag):
+        self.min_x = min_x
+        self.max_x = max_x
+        self.min_y = min_y
+        self.max_y = max_y
+        self.delta = delta
+        self.offset = offset
+        self.flag = flag
+    
+
+def sweep(min_x=0, max_x=1300, min_y=0, max_y=1000, delta=500, offset=0, flag=True) -> List:
+    '''
+    Sweep the bed at a certain height, first move along x axis, then y, like a zig zag;
+    Taking pictures and record the location of the camera that corresponds to the picture
+    Input: min_x: left most point on x axis
+           max_x: right most point on x axis
+           min_y: front most point on y axis
+           max_y: back most point on y axis
+           delta: the interval for scaning
+           offset:
+           flag: for degging, if true, don't actually drive FarmBot
+    Output: none
+    '''
+    opts = Opts(min_x, max_x, min_y, max_y, delta, offset, flag)
+
+    pts = []
+    sweep_y_negative = False
+    for x in range(opts.min_x, opts.max_x, opts.delta):
+        y_range = range(opts.min_y, opts.max_y, opts.delta)
+        if sweep_y_negative:
+            y_range = reversed(y_range)
+        sweep_y_negative = not sweep_y_negative
+        for y in y_range:
+            pts.append((x+opts.offset, y+opts.offset))
+
+    Logger.info('Moving pattern generated')
+
+    if opts.flag:
+        Logger.info('Run without sweep')
+        exit()
+
+    client = FarmbotClient(creds.device_id, creds.token)
+    client.move(0, 0, _SWEEEP_HEIGHT) # ensure moving from original 
+    for x, y in pts:
+        client.move(x, y, _SWEEEP_HEIGHT) # move camera
+        #client       #需要添加一个函数,读取当前位置,需要吗?
+        client.take_photo() 
+    client.shutdown()
+    return pts
+
+
+def download_images() -> Path:
+    REQUEST_HEADERS = {'Authorization': 'Bearer ' + creds.token, 'content-type': "application/json"}
+
+    while True:
+        response = get('https://my.farmbot.io/api/images', headers=REQUEST_HEADERS)  # http rquest
+        images = response.json()
+        Logger.info("Download {} Images".format(len(images)))
+        if len(images) == 0:  # cannot receive images
+            break # leave the loop
+
+        at_least_one_dup = False
+        for image_info in images:
+
+            if 'placehold.it' in image_info['attachment_url']:  
+                Logger.debug("IGNORE! placeholder", image_info['id'])
+                continue
+
+            # convert date time of capture from UTC To AEDT and extract
+            # a simple string version for local image filename
+            dts = parse(image_info['attachment_processed_at'])
+            dts = dts.replace(tzinfo=timezone.utc).astimezone(tz=None)
+            local_img_dir = "imgs/%s" % dts.strftime("%Y%m%d") #the name of local pics 新建图片的位置
+            if not path.exists(local_img_dir):
+                makedirs(local_img_dir)
+            local_img_name = "%s/%s.jpg" % (local_img_dir, dts.strftime("%H%M%S"))
+            Logger.debug(">", local_img_name)
+
+            # download image from google storage and save locally
+            captured_img_name = image_info['meta']['name']
+            if captured_img_name.startswith("/tmp/images"):
+                req = get(image_info['attachment_url'], allow_redirects=True)
+                open(local_img_name, 'wb').write(req.content)
+
+            # post delete from cloud storage
+            delete("https://my.farmbot.io/api/images/%d" % image_info['id'],
+                            headers=REQUEST_HEADERS)
+
+        if at_least_one_dup:
+            Logger.debug("only at least one dup; give DELETEs a chance to work")
+            sleep(2)
+    return local_img_dir
+
+
+def simple_move(x: int, y: int, z: int, photo: bool) -> None:
+    '''
+    Move to a place, if flag is true, take a picture
+    Input: x, y,z: destination point
+           photo: take a pic or not
+    '''
+    client = FarmbotClient(creds.device_id, creds.token)
+    client.move(x, y, z)  
+    if photo:
+        # take a picture
+        client.take_photo() 
+    client.shutdown()
+    return None
+
+def gripper(open: bool) -> None:
+    '''
+    Use a serial port to drive the gripper to open or close
+    '''
+    ser = Serial(
+    port = 'COM4',  # 这里不应为hard code
+    baudrate = 9600,
+    parity = PARITY_NONE,
+    stopbits = STOPBITS_ONE,
+    bytesize = EIGHTBITS,
+    timeout = 1
+    )
+    if open:
+        ser.write(str.encode("o"))
+    else:
+        ser.write(str.encode("c"))
+    return None
+
+
+if __name__ == '__main__':
+    parser = ArgumentParser()
+    parser.add_argument(
+        '-m',
+        '--mode',
+        type=int,
+        help='Mode for FarmBot, 1 for simple move with an assigned detination, 2 for Sweeping' 
+    )
+    
+    arguments = parser.parse_args()
+
+    if arguments.mode == 1:
+        Logger.info('Input the destination:')
+        destination_x = int(input('X:'))
+        destination_y = int(input('Y:'))
+        destination_z = int(input('Z:'))
+        photo = True if input('Take a photo or not?[Y/N]:') == 'Y' else False 
+        simple_move_start = time()
+        simple_move(destination_x, destination_y, destination_z, photo)
+        Logger.info(f'time cost {time.time()-simple_move_start}')
+    elif arguments.mode == 2:
+        sweep()
+    else:
+        Logger.error('Wrong mode number {arguments.mode}')
+
diff --git a/static/a.mat b/static/a.mat
new file mode 100644
index 0000000000000000000000000000000000000000..46627a3ee6975b1500deb5043f0ffc19bee1b799
Binary files /dev/null and b/static/a.mat differ
diff --git a/static/camera.mat b/static/camera.mat
new file mode 100644
index 0000000000000000000000000000000000000000..7a202766b610670b223324a7ec87d13ffbfe0513
Binary files /dev/null and b/static/camera.mat differ
diff --git a/static/camera_no_distortion.mat b/static/camera_no_distortion.mat
new file mode 100644
index 0000000000000000000000000000000000000000..a6dfb95acd43f2df93c7da74b44eaec7a467ecc9
Binary files /dev/null and b/static/camera_no_distortion.mat differ
diff --git a/static/distance.txt b/static/distance.txt
new file mode 100644
index 0000000000000000000000000000000000000000..915b1e6d7e5ebcc2a440acf8a20c7bbbf9d0cc72
--- /dev/null
+++ b/static/distance.txt
@@ -0,0 +1,6 @@
+camera's distance to the encoder
+delta_x1
+delta_y1
+gripper's distance to the encoder
+delta_x2
+delta_y2
\ No newline at end of file
diff --git a/test.md b/test.md
new file mode 100644
index 0000000000000000000000000000000000000000..08733bc1022df5419d406503f5c5eb8b8b09c73f
--- /dev/null
+++ b/test.md
@@ -0,0 +1,9 @@
+To test darknet_images.py
+```
+python ./*darknet_images.py --input ~/farmbot/img --weights ~/farmbot/darknet/backup/yolov3-vattenhallen_best.weights --dont_show --ext_output --save_labels --config_file ~/farmbot/darknet/cfg/yolov3-vattenhallen-test.cfg --data_file ~/farmbot/darknet/data/vattenhallen.data
+```
+Default values are used for the rest. 
+
+save label去了哪里? 存到了和img同一个路径下 同名.txt文件,所以可以给一folder的图片同时检测
+
+最好修改一下save label的地址,单独放一个folder
\ No newline at end of file
diff --git a/weights/yolov3-vattenhallen_best.weights b/weights/yolov3-vattenhallen_best.weights
new file mode 100644
index 0000000000000000000000000000000000000000..2be2b03e69d7e7ac8737e939094dfb89b469dadf
--- /dev/null
+++ b/weights/yolov3-vattenhallen_best.weights
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f4c52e0a741d72314e911bc52f237c915b2fba237aebb32d1fd4c8927a9f3ff7
+size 246434628
diff --git a/weights/yolov3-veges_best.weights b/weights/yolov3-veges_best.weights
new file mode 100644
index 0000000000000000000000000000000000000000..ab34a259ce2a61e2edf207f6d0299f58b95e5318
--- /dev/null
+++ b/weights/yolov3-veges_best.weights
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:37dab259468e45f107c0b1f0071c50c035e110346634b089aac8c584c71e36f1
+size 246499248