diff --git a/.gitignore b/.gitignore
new file mode 100644
index 000000000..2a1270985
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+# Ignore these folders
+src/__pycache__/
+var/
diff --git a/doc/para.md b/doc/para.md
new file mode 100644
index 000000000..2628d786c
--- /dev/null
+++ b/doc/para.md
@@ -0,0 +1,41 @@
+## Paravision
+
+#### Creds
+ Username: 2a76e3b5-733a-4c93-98fb-339927b0f90c
+ Passwd: 453a6a2d-2935-430b-a22f-9b2880ca381
+
+#### Setup
+
+Like most things the paravision docs do not work in following setup. Below is how I was able to
+install models and setup my enviorment.
+
+##### To Install a Model
+
+To install a model we ran ~$ pip3 install --no-cache-dir --timeout 60 --no-deps --extra-index-url https://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca3818@paravision.mycloudrepo.io/repositories/python-recognition "paravision-recognition" "paravision-models-gen6-balanced-openvino-2022-3"
+by visting the URL you can view what models are avaliable.
+
+
+
+
+##### To Install the Liveness
+
+First we must install the Liveness SDK:
+
+pip3 install --no-cache-dir \
+ --extra-index-url https://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca3818@paravision.mycloudrepo.io/repositories/python-liveness2d \
+ "paravision-liveness2d-gen6==2.1.1"
+
+Then we must install the Liveness models:
+
+https://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca3818@paravision.mycloudrepo.io/repositories/python-liveness2d "paravision-models-gen6-liveness2d-openvino-2022-3"
+
+Then we must install the Validness models:
+
+pip3 install --no-cache-dir \
+ --extra-index-url https://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca3818@paravision.mycloudrepo.io/repositories/python-validness \
+ "paravision-models-gen6-validness-openvino-2022-3==1.0.0"
+
+pip3 install --no-cache-dir --timeout 60 \
+--extra-index-url https://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca3818@paravision.mycloudrepo.io/repositories/python-recognition \
+--extra-index-url https://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca3818@paravision.mycloudrepo.io/repositories/python-libs \
+"paravision-recognition[openvino]" "paravision-models-gen6-balanced-openvino-2022-3"
diff --git a/modules/paravision.libs/libmvec.so.1 b/modules/paravision.libs/libmvec.so.1
new file mode 100644
index 000000000..d87fd08a3
Binary files /dev/null and b/modules/paravision.libs/libmvec.so.1 differ
diff --git a/modules/paravision.libs/libopencv_core.so.4.5 b/modules/paravision.libs/libopencv_core.so.4.5
new file mode 100644
index 000000000..5f04b06b6
Binary files /dev/null and b/modules/paravision.libs/libopencv_core.so.4.5 differ
diff --git a/modules/paravision.libs/libopencv_imgcodecs.so.4.5 b/modules/paravision.libs/libopencv_imgcodecs.so.4.5
new file mode 100644
index 000000000..be118fab0
Binary files /dev/null and b/modules/paravision.libs/libopencv_imgcodecs.so.4.5 differ
diff --git a/modules/paravision.libs/libopencv_imgproc.so.4.5 b/modules/paravision.libs/libopencv_imgproc.so.4.5
new file mode 100644
index 000000000..287a5bb07
Binary files /dev/null and b/modules/paravision.libs/libopencv_imgproc.so.4.5 differ
diff --git a/modules/paravision.libs/libopenvino.so.2230 b/modules/paravision.libs/libopenvino.so.2230
new file mode 100644
index 000000000..5ba66bfee
Binary files /dev/null and b/modules/paravision.libs/libopenvino.so.2230 differ
diff --git a/modules/paravision.libs/libopenvino_intel_cpu_plugin.so b/modules/paravision.libs/libopenvino_intel_cpu_plugin.so
new file mode 100644
index 000000000..d0d3e01c9
Binary files /dev/null and b/modules/paravision.libs/libopenvino_intel_cpu_plugin.so differ
diff --git a/modules/paravision.libs/libopenvino_ir_frontend.so.2230 b/modules/paravision.libs/libopenvino_ir_frontend.so.2230
new file mode 100644
index 000000000..90c2e4453
Binary files /dev/null and b/modules/paravision.libs/libopenvino_ir_frontend.so.2230 differ
diff --git a/modules/paravision.libs/libopenvino_onnx_frontend.so.2230 b/modules/paravision.libs/libopenvino_onnx_frontend.so.2230
new file mode 100644
index 000000000..65c3f086f
Binary files /dev/null and b/modules/paravision.libs/libopenvino_onnx_frontend.so.2230 differ
diff --git a/modules/paravision.libs/libparavision_common.so b/modules/paravision.libs/libparavision_common.so
new file mode 100644
index 000000000..238446c1a
Binary files /dev/null and b/modules/paravision.libs/libparavision_common.so differ
diff --git a/modules/paravision.libs/libparavision_image_cpu.so b/modules/paravision.libs/libparavision_image_cpu.so
new file mode 100755
index 000000000..f5e41703e
Binary files /dev/null and b/modules/paravision.libs/libparavision_image_cpu.so differ
diff --git a/modules/paravision.libs/libparavision_liveness2d.so.2.1.1 b/modules/paravision.libs/libparavision_liveness2d.so.2.1.1
new file mode 100755
index 000000000..a003c6d36
Binary files /dev/null and b/modules/paravision.libs/libparavision_liveness2d.so.2.1.1 differ
diff --git a/modules/paravision.libs/libparavision_liveness2d_openvino.so.2.1.1 b/modules/paravision.libs/libparavision_liveness2d_openvino.so.2.1.1
new file mode 100755
index 000000000..6ac120464
Binary files /dev/null and b/modules/paravision.libs/libparavision_liveness2d_openvino.so.2.1.1 differ
diff --git a/modules/paravision.libs/libparavision_liveness2d_tensorrt.so.2.1.1 b/modules/paravision.libs/libparavision_liveness2d_tensorrt.so.2.1.1
new file mode 100755
index 000000000..f2f3468c3
Binary files /dev/null and b/modules/paravision.libs/libparavision_liveness2d_tensorrt.so.2.1.1 differ
diff --git a/modules/paravision.libs/libparavision_recognition.so.9.3.1 b/modules/paravision.libs/libparavision_recognition.so.9.3.1
new file mode 100755
index 000000000..750022f1a
Binary files /dev/null and b/modules/paravision.libs/libparavision_recognition.so.9.3.1 differ
diff --git a/modules/paravision.libs/libparavision_recognition_openvino.so.9.3.1 b/modules/paravision.libs/libparavision_recognition_openvino.so.9.3.1
new file mode 100755
index 000000000..91ab95a2c
Binary files /dev/null and b/modules/paravision.libs/libparavision_recognition_openvino.so.9.3.1 differ
diff --git a/modules/paravision.libs/libparavision_recognition_tensorrt.so.9.3.1 b/modules/paravision.libs/libparavision_recognition_tensorrt.so.9.3.1
new file mode 100755
index 000000000..e3fa9b918
Binary files /dev/null and b/modules/paravision.libs/libparavision_recognition_tensorrt.so.9.3.1 differ
diff --git a/modules/paravision.libs/libpugixml.so.1 b/modules/paravision.libs/libpugixml.so.1
new file mode 100644
index 000000000..c9813cb2d
Binary files /dev/null and b/modules/paravision.libs/libpugixml.so.1 differ
diff --git a/modules/paravision.libs/libtbb.so.2 b/modules/paravision.libs/libtbb.so.2
new file mode 100644
index 000000000..63021fd9a
Binary files /dev/null and b/modules/paravision.libs/libtbb.so.2 differ
diff --git a/modules/paravision.libs/plugins.xml b/modules/paravision.libs/plugins.xml
new file mode 100644
index 000000000..fc93b1c3e
--- /dev/null
+++ b/modules/paravision.libs/plugins.xml
@@ -0,0 +1 @@
+
diff --git a/modules/paravision/liveness/__init__.py b/modules/paravision/liveness/__init__.py
new file mode 100644
index 000000000..ddbfb6256
--- /dev/null
+++ b/modules/paravision/liveness/__init__.py
@@ -0,0 +1,6 @@
+try:
+ from .session import Liveness, CameraParams # noqa
+except Exception:
+ pass
+
+__version__ = "7.0.0"
diff --git a/modules/paravision/liveness/__pycache__/__init__.cpython-310.pyc b/modules/paravision/liveness/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 000000000..b54a366fd
Binary files /dev/null and b/modules/paravision/liveness/__pycache__/__init__.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/__pycache__/exceptions.cpython-310.pyc b/modules/paravision/liveness/__pycache__/exceptions.cpython-310.pyc
new file mode 100644
index 000000000..90dbfe5f6
Binary files /dev/null and b/modules/paravision/liveness/__pycache__/exceptions.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/__pycache__/session.cpython-310.pyc b/modules/paravision/liveness/__pycache__/session.cpython-310.pyc
new file mode 100644
index 000000000..ce173180a
Binary files /dev/null and b/modules/paravision/liveness/__pycache__/session.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/__pycache__/types.cpython-310.pyc b/modules/paravision/liveness/__pycache__/types.cpython-310.pyc
new file mode 100644
index 000000000..673c91e33
Binary files /dev/null and b/modules/paravision/liveness/__pycache__/types.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/__pycache__/utils.cpython-310.pyc b/modules/paravision/liveness/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 000000000..f06085c98
Binary files /dev/null and b/modules/paravision/liveness/__pycache__/utils.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/exceptions.py b/modules/paravision/liveness/exceptions.py
new file mode 100644
index 000000000..180574990
--- /dev/null
+++ b/modules/paravision/liveness/exceptions.py
@@ -0,0 +1,7 @@
+class InvalidWindowSizeException(Exception):
+ def __init__(self, message):
+ self.message = message
+
+
+class InvalidSpecError(Exception):
+ pass
diff --git a/modules/paravision/liveness/session.py b/modules/paravision/liveness/session.py
new file mode 100644
index 000000000..ac498217c
--- /dev/null
+++ b/modules/paravision/liveness/session.py
@@ -0,0 +1,64 @@
+import pyrealsense2 as rs
+import cv2
+import numpy as np
+
+from .tensorrt.engine import Engine
+from .utils import (
+ estimate_depth_bounding_box,
+ expand_bbox_to_edge_and_crop,
+ model_location,
+)
+from .exceptions import InvalidWindowSizeException
+
+# validity constants
+WINDOW_SIZE = 5
+
+
+class CameraParams(object):
+ def __init__(self, depth_intr, color_intr, color_to_depth_extr):
+ self.depth_intr = depth_intr
+ self.color_intr = color_intr
+ self.color_to_depth_extr = color_to_depth_extr
+
+
+class CameraParams(object):
+ def __init__(self, depth_intr, color_intr, color_to_depth_extr):
+ self.depth_intr = depth_intr
+ self.color_intr = color_intr
+ self.color_to_depth_extr = color_to_depth_extr
+
+
+class Liveness(object):
+ def __init__(self, model_path=None, settings={}):
+ if model_path is None:
+ model_path = model_location()
+ self.predictor = Engine(model_path, settings)
+
+ def load_depth_data_from_file(self, file_path):
+ return np.loadtxt(file_path, dtype=np.int16, delimiter=",")
+
+ def write_depth_data_to_file(self, file_path, depth_data):
+ np.savetxt(file_path, depth_data, fmt="%d", delimiter=",")
+
+ def crop_depth_frame(self, camera_params, depth_frame, bounding_box):
+ if camera_params is None or depth_frame is None or bounding_box is None:
+ raise Exception("Invalid input arguments")
+
+ proj_depth_bb = estimate_depth_bounding_box(bounding_box, camera_params)
+ cropped_depth_frame = expand_bbox_to_edge_and_crop(depth_frame, proj_depth_bb)
+ return cropped_depth_frame
+
+ def compute_liveness_probability(self, depth_imgs):
+ if len(depth_imgs) != WINDOW_SIZE:
+ raise InvalidWindowSizeException("Windows size must equal 5")
+
+ resized = [
+ cv2.resize(
+ depth_img.astype(np.float32),
+ (self.predictor.input_shape[1], self.predictor.input_shape[0]),
+ )
+ for depth_img in depth_imgs
+ ]
+
+ frame_probs = self.predictor.predict(resized)
+ return np.mean(frame_probs)
diff --git a/modules/paravision/liveness/tensorrt/__pycache__/engine.cpython-310.pyc b/modules/paravision/liveness/tensorrt/__pycache__/engine.cpython-310.pyc
new file mode 100644
index 000000000..53f7b4f5e
Binary files /dev/null and b/modules/paravision/liveness/tensorrt/__pycache__/engine.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/tensorrt/__pycache__/utils.cpython-310.pyc b/modules/paravision/liveness/tensorrt/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 000000000..e0eb0eb58
Binary files /dev/null and b/modules/paravision/liveness/tensorrt/__pycache__/utils.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/tensorrt/engine.py b/modules/paravision/liveness/tensorrt/engine.py
new file mode 100644
index 000000000..4e2310578
--- /dev/null
+++ b/modules/paravision/liveness/tensorrt/engine.py
@@ -0,0 +1,114 @@
+import os
+
+import cv2
+import numpy as np
+import pycuda.driver as cuda
+import pycuda.autoinit # noqa
+import tensorrt as trt
+
+from .utils import do_inference, allocate_buffers, GiB
+from ..utils import _read_spec_value
+
+LOGGER = trt.Logger(trt.Logger.Severity.ERROR)
+DEFAULT_MAX_BATCH_SIZE = 1
+
+
+class Engine(object):
+ def __init__(self, model_path, settings):
+ self.stream = cuda.Stream()
+ self.input_shape = _read_spec_value(model_path, "input_shape")
+ self.engine = self._load_engine(model_path, settings)
+ self.context = self.engine.create_execution_context()
+ (self.inputs, self.outputs, self.data, self.bindings) = allocate_buffers(
+ self.engine
+ )
+
+ def _load_engine(self, model_path, settings):
+ engine_dirpath = model_path
+ try:
+ import paravision_models.liveness
+
+ if paravision_models.liveness.location() == model_path:
+ engine_dirpath = paravision_models.liveness.TRT_ENGINE_PATH
+ except (ImportError, NameError, AttributeError):
+ pass
+
+ runtime = trt.Runtime(LOGGER)
+
+ engine_path = "{}/liveness.engine".format(engine_dirpath)
+ if os.path.isfile(engine_path) is False:
+ return self._build_engine(model_path, engine_path, settings)
+
+ with open(engine_path, "rb") as f:
+ return runtime.deserialize_cuda_engine(f.read())
+
+ def _build_engine(self, model_path, engine_path, settings):
+ model_file = "{}/liveness.onnx".format(model_path)
+
+ max_batch_size = settings.get("max_batch_size", DEFAULT_MAX_BATCH_SIZE)
+
+ trt_version = int(trt.__version__.split(".")[0])
+ if trt_version >= 7:
+ input_shape = [max_batch_size, 3] + list(self.input_shape)
+ net_flags = 1 << (int)(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)
+ elif trt_version == 6:
+ input_shape = [3] + list(self.input_shape)
+ net_flags = 0
+ else:
+ raise Exception("TensorRT version 6 or higher required to build engine")
+
+ if os.path.isfile(model_file) is False:
+ raise Exception("No model found at {}".format(model_file))
+
+ with open(model_file, "rb") as f:
+ model = f.read()
+
+ with trt.Builder(LOGGER) as builder, builder.create_network(
+ net_flags
+ ) as network, trt.OnnxParser(network, LOGGER) as parser:
+
+ builder.max_workspace_size = GiB(1)
+ builder.max_batch_size = max_batch_size
+
+ if not parser.parse(model):
+ raise Exception("Cannot parse liveness model.")
+
+ network.get_input(0).shape = input_shape
+ engine = builder.build_cuda_engine(network)
+
+ serialized = engine.serialize()
+ if serialized is None:
+ raise Exception("Cannot serialize engine")
+
+ with open(engine_path, "wb") as f:
+ f.write(serialized)
+
+ return engine
+
+ def predict(self, exp_bb_depth_imgs):
+ max_batch_size = self.engine.max_batch_size
+ live_probs = []
+ for i in range(0, len(exp_bb_depth_imgs), max_batch_size):
+ batch = exp_bb_depth_imgs[
+ i : min(len(exp_bb_depth_imgs), i + max_batch_size)
+ ]
+ probs_batch = self._batch_predict(batch)
+ live_probs.extend(probs_batch)
+
+ return live_probs
+
+ def _batch_predict(self, np_imgs):
+ stacked = [np.stack([np_img for _ in range(3)], axis=0) for np_img in np_imgs]
+ np_imgs = np.asarray(stacked, dtype=np.float32)
+ results = do_inference(
+ self.context,
+ bindings=self.bindings,
+ inputs=self.inputs,
+ input_data=[np_imgs.ravel()],
+ outputs=self.outputs,
+ output_data=self.data,
+ stream=self.stream,
+ )
+
+ # grab every other value to return the live probabilities
+ return results[0][0 : 2 * len(np_imgs) : 2]
diff --git a/modules/paravision/recognition/tensorrt/utils.py b/modules/paravision/liveness/tensorrt/utils.py
similarity index 67%
rename from modules/paravision/recognition/tensorrt/utils.py
rename to modules/paravision/liveness/tensorrt/utils.py
index d99488524..bef94d063 100644
--- a/modules/paravision/recognition/tensorrt/utils.py
+++ b/modules/paravision/liveness/tensorrt/utils.py
@@ -1,21 +1,28 @@
import numpy as np
import pycuda.driver as cuda
+import tensorrt as trt
from collections import defaultdict
DTYPES = defaultdict(lambda: np.float32)
-DTYPES["num_detections"] = np.int32
+DTYPES["NMS_1"] = np.int32
+
+
+def GiB(val):
+ return val * 1 << 30
def do_inference(
context, bindings, inputs, input_data, outputs, output_data, stream, batch_size=1
):
- flattened_input_data = [input_data.ravel()]
- for i, input_ in enumerate(inputs):
- cuda.memcpy_htod_async(input_, flattened_input_data[i], stream)
+ [
+ cuda.memcpy_htod_async(input, input_data[i], stream)
+ for i, input in enumerate(inputs)
+ ]
- context.set_binding_shape(0, input_data.shape)
- context.execute_async_v2(bindings=bindings, stream_handle=stream.handle)
+ context.execute_async(
+ bindings=bindings, stream_handle=stream.handle, batch_size=batch_size
+ )
data = []
@@ -39,7 +46,7 @@ def allocate_buffers(engine):
for binding in engine:
shape = engine.get_binding_shape(binding)
- size = calculate_volume(shape)
+ size = trt.volume(shape)
dtype = DTYPES[str(binding)]
host_mem = (size, dtype)
device_mem = cuda.mem_alloc(size * engine.max_batch_size * dtype().itemsize)
@@ -53,14 +60,3 @@ def allocate_buffers(engine):
data.append(host_mem)
return inputs, outputs, data, bindings
-
-
-def calculate_volume(shape):
- volume = 1
- for dim in shape:
- # -1 indicates dynamic batching
- if dim == -1:
- continue
- volume *= dim
-
- return volume
diff --git a/modules/paravision/recognition/openvino/__init__.py b/modules/paravision/liveness/tests/__init__.py
similarity index 100%
rename from modules/paravision/recognition/openvino/__init__.py
rename to modules/paravision/liveness/tests/__init__.py
diff --git a/modules/paravision/liveness/tests/__pycache__/__init__.cpython-310.pyc b/modules/paravision/liveness/tests/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 000000000..021f51e2f
Binary files /dev/null and b/modules/paravision/liveness/tests/__pycache__/__init__.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/tests/__pycache__/test_liveness.cpython-310.pyc b/modules/paravision/liveness/tests/__pycache__/test_liveness.cpython-310.pyc
new file mode 100644
index 000000000..b70c89376
Binary files /dev/null and b/modules/paravision/liveness/tests/__pycache__/test_liveness.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/tests/__pycache__/test_utils.cpython-310.pyc b/modules/paravision/liveness/tests/__pycache__/test_utils.cpython-310.pyc
new file mode 100644
index 000000000..921fef30c
Binary files /dev/null and b/modules/paravision/liveness/tests/__pycache__/test_utils.cpython-310.pyc differ
diff --git a/modules/paravision/liveness/tests/test_liveness.py b/modules/paravision/liveness/tests/test_liveness.py
new file mode 100644
index 000000000..7ba43d003
--- /dev/null
+++ b/modules/paravision/liveness/tests/test_liveness.py
@@ -0,0 +1,136 @@
+import os
+import pyrealsense2 as rs
+import numpy as np
+from unittest import TestCase
+
+from ..session import Liveness, CameraParams
+from ..types import Rectangle
+from ..exceptions import InvalidWindowSizeException
+
+ASSETS_PATH = os.path.join(os.path.dirname(__file__), "assets")
+liveness_session = None
+
+
+class TestSession(TestCase):
+ @classmethod
+ def setUpClass(cls):
+ global liveness_session
+ liveness_session = Liveness()
+
+ def setUp(self):
+ self.liveness_session = liveness_session
+
+ def test_crop_depth_frame(self):
+ bounding_box = Rectangle(
+ 528.551139831543, 234.36917863815668, 839.0621948242188, 642.2044240250417
+ )
+
+ depth_intrinsic = rs.intrinsics()
+ depth_intrinsic.width = 1280
+ depth_intrinsic.height = 720
+ depth_intrinsic.ppx = 640.387
+ depth_intrinsic.ppy = 357.513
+ depth_intrinsic.fx = 635.811
+ depth_intrinsic.fy = 635.811
+ depth_intrinsic.model = rs.distortion.brown_conrady
+ depth_intrinsic.coeffs = [0, 0, 0, 0, 0]
+
+ color_intrinsic = rs.intrinsics()
+ color_intrinsic.width = 1280
+ color_intrinsic.height = 720
+ color_intrinsic.ppx = 647.024
+ color_intrinsic.ppy = 356.927
+ color_intrinsic.fx = 922.169
+ color_intrinsic.fy = 922.476
+ color_intrinsic.model = rs.distortion.inverse_brown_conrady
+ color_intrinsic.coeffs = [0, 0, 0, 0, 0]
+
+ color_depth_extr = rs.extrinsics()
+ color_depth_extr.rotation = [
+ 0.999945,
+ 0.0103263,
+ 0.00163071,
+ -0.0103348,
+ 0.999932,
+ 0.00530964,
+ -0.00157577,
+ -0.0053262,
+ 0.999985,
+ ]
+ color_depth_extr.translation = [-0.0147758, -0.000159923, -0.000372309]
+
+ camera_params = CameraParams(depth_intrinsic, color_intrinsic, color_depth_extr)
+
+ depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "depth.txt")
+ )
+ cropped_depth_frame = liveness_session.crop_depth_frame(
+ camera_params, depth_frame, bounding_box
+ )
+ expected_cropped_depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "cropped_depth.txt")
+ )
+ self.assertIsNotNone(cropped_depth_frame, msg="unexpected cropped depth frame")
+ self.assertTrue(
+ np.array_equal(cropped_depth_frame, expected_cropped_depth_frame),
+ msg="invalid cropped depth frame",
+ )
+
+ def test_crop_depth_frame_invalid_camera_params(self):
+ depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "depth.txt")
+ )
+ bounding_box = Rectangle(
+ 528.551139831543, 234.36917863815668, 839.0621948242188, 642.2044240250417
+ )
+ self.assertRaises(
+ Exception,
+ liveness_session.crop_depth_frame,
+ None,
+ depth_frame,
+ bounding_box,
+ )
+
+ def test_crop_depth_frame_invalid_depth_frame(self):
+ camera_params = CameraParams(None, None, None)
+ depth_frame = None
+ bounding_box = Rectangle(1.2, 1.2, 1.2, 1.2)
+ self.assertRaises(
+ Exception,
+ liveness_session.crop_depth_frame,
+ camera_params,
+ depth_frame,
+ bounding_box,
+ )
+
+ def test_crop_depth_frame_invalid_bounding_box(self):
+ camera_params = CameraParams(None, None, None)
+ depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "depth.txt")
+ )
+ bounding_box = None
+ self.assertRaises(
+ Exception,
+ liveness_session.crop_depth_frame,
+ camera_params,
+ depth_frame,
+ bounding_box,
+ )
+
+ def test_engine_invalid_window_size(self):
+ depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "depth.txt")
+ )
+ self.assertRaises(
+ InvalidWindowSizeException,
+ liveness_session.compute_liveness_probability,
+ [depth_frame],
+ )
+
+ def test_engine_valid_window_size(self):
+ depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "depth.txt")
+ )
+
+ prob = liveness_session.compute_liveness_probability([depth_frame] * 5)
+ self.assertTrue(prob >= 0 and prob <= 1)
diff --git a/modules/paravision/liveness/tests/test_utils.py b/modules/paravision/liveness/tests/test_utils.py
new file mode 100644
index 000000000..226650cce
--- /dev/null
+++ b/modules/paravision/liveness/tests/test_utils.py
@@ -0,0 +1,100 @@
+import os
+import pyrealsense2 as rs
+import numpy as np
+
+from unittest import TestCase
+
+from ..session import Liveness, CameraParams
+from ..utils import estimate_depth_bounding_box, expand_bbox_to_edge_and_crop
+from ..types import Rectangle
+
+
+ASSETS_PATH = os.path.join(os.path.dirname(__file__), "assets")
+liveness_session = None
+expected_projected_bounding_box = [
+ 514.62513733,
+ 267.85284424,
+ 726.44473267,
+ 551.44799805,
+]
+
+
+class TestUtils(TestCase):
+ @classmethod
+ def setUpClass(cls):
+ global liveness_session
+ liveness_session = Liveness()
+
+ def setUp(self):
+ self.liveness_session = liveness_session
+
+ def test_estimate_depth_bounding_box(self):
+ bounding_box = Rectangle(
+ 528.551139831543, 234.36917863815668, 839.0621948242188, 642.2044240250417
+ )
+
+ depth_intrinsic = rs.intrinsics()
+ depth_intrinsic.width = 1280
+ depth_intrinsic.height = 720
+ depth_intrinsic.ppx = 640.387
+ depth_intrinsic.ppy = 357.513
+ depth_intrinsic.fx = 635.811
+ depth_intrinsic.fy = 635.811
+ depth_intrinsic.model = rs.distortion.brown_conrady
+ depth_intrinsic.coeffs = [0, 0, 0, 0, 0]
+
+ color_intrinsic = rs.intrinsics()
+ color_intrinsic.width = 1280
+ color_intrinsic.height = 720
+ color_intrinsic.ppx = 647.024
+ color_intrinsic.ppy = 356.927
+ color_intrinsic.fx = 922.169
+ color_intrinsic.fy = 922.476
+ color_intrinsic.model = rs.distortion.inverse_brown_conrady
+ color_intrinsic.coeffs = [0, 0, 0, 0, 0]
+
+ color_depth_extr = rs.extrinsics()
+ color_depth_extr.rotation = [
+ 0.999945,
+ 0.0103263,
+ 0.00163071,
+ -0.0103348,
+ 0.999932,
+ 0.00530964,
+ -0.00157577,
+ -0.0053262,
+ 0.999985,
+ ]
+ color_depth_extr.translation = [-0.0147758, -0.000159923, -0.000372309]
+ camera_params = CameraParams(depth_intrinsic, color_intrinsic, color_depth_extr)
+
+ proj_depth_bb = estimate_depth_bounding_box(bounding_box, camera_params)
+
+ self.assertIsNotNone(
+ proj_depth_bb, msg="unexpected or none projected depth bounding box"
+ )
+
+ self.assertTrue(
+ np.allclose(proj_depth_bb, expected_projected_bounding_box),
+ msg="invalid projected depth bounding box",
+ )
+
+ def test_expand_bbox_to_edge_and_crop(self):
+ depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "depth.txt")
+ )
+ expanded_cropped_frame = expand_bbox_to_edge_and_crop(
+ depth_frame, expected_projected_bounding_box
+ )
+
+ expected_cropped_depth_frame = liveness_session.load_depth_data_from_file(
+ os.path.join(ASSETS_PATH, "cropped_depth.txt")
+ )
+
+ self.assertIsNotNone(
+ expanded_cropped_frame, msg="unexpected cropped depth frame"
+ )
+ self.assertTrue(
+ np.array_equal(expanded_cropped_frame, expected_cropped_depth_frame),
+ msg="invalid cropped depth frame",
+ )
diff --git a/modules/paravision/liveness/types.py b/modules/paravision/liveness/types.py
new file mode 100644
index 000000000..fdeed0bbd
--- /dev/null
+++ b/modules/paravision/liveness/types.py
@@ -0,0 +1,91 @@
+import base64
+import cv2
+
+import numpy as np
+
+
+class Point(object):
+ """
+ A point within an image, represented by x- and y-coordinates.
+
+ Attributes
+ ----------
+ x : int
+ The x-coordinate.
+ y : int
+ The y-coordinate.
+ """
+
+ def __init__(self, x, y):
+ self.x = int(x)
+ self.y = int(y)
+
+ def __repr__(self):
+ return "" % (self.x, self.y)
+
+ def __str__(self):
+ return "(%d, %d)" % (self.x, self.y)
+
+ def as_dict(self):
+ """Convert this object to a dictionary"""
+ return {"x": self.x, "y": self.y}
+
+ def todict(self):
+ return self.as_dict()
+
+ def tolist(self):
+ """Convert this object to a list"""
+ return [self.x, self.y]
+
+
+class Rectangle(object):
+ """
+ A rectangle, represented by top-left and bottom-right Points.
+
+ Attributes
+ ----------
+ top_left : Point
+ The top-left corner of the rectangle.
+ bottom_right : Point
+ The bottom-right corner of the rectangle.
+ """
+
+ def __init__(self, x1, y1, x2, y2):
+ self.top_left = Point(x1, y1)
+ self.bottom_right = Point(x2, y2)
+
+ def __repr__(self):
+ return "" % (
+ repr(self.top_left),
+ repr(self.bottom_right),
+ )
+
+ def __str__(self):
+ return "(%s, %s)" % (str(self.top_left), str(self.bottom_right))
+
+ def as_dict(self):
+ """Convert this object to a dictionary"""
+ return {
+ "top_left": self.top_left.as_dict(),
+ "bottom_right": self.bottom_right.as_dict(),
+ }
+
+ def todict(self):
+ return self.as_dict()
+
+ def tolist(self):
+ """Convert this object to a list"""
+ return [
+ self.top_left.x,
+ self.top_left.y,
+ self.bottom_right.x,
+ self.bottom_right.y,
+ ]
+
+ def width(self):
+ """Get the width of the Rectangle."""
+ return self.bottom_right.x - self.top_left.x
+
+ def height(self):
+ """Get the height of the Rectangle."""
+ return self.bottom_right.y - self.top_left.y
diff --git a/modules/paravision/liveness/utils.py b/modules/paravision/liveness/utils.py
new file mode 100644
index 000000000..daec95d1a
--- /dev/null
+++ b/modules/paravision/liveness/utils.py
@@ -0,0 +1,100 @@
+import numpy as np
+import json
+import pyrealsense2 as rs
+
+from .exceptions import InvalidSpecError
+
+DEPTH_MIN = 0.11
+DEPTH_MAX = 10.0
+EXPANSION_FACTOR = 0.7
+
+
+def _read_spec_value(model_loc, key):
+ try:
+ with open("{}/spec.json".format(model_loc), "r") as f:
+ spec = json.load(f)
+
+ return spec[key]
+ except (FileNotFoundError, KeyError):
+ raise InvalidSpecError("Invalid spec file. Try upgrading your model.")
+
+
+def model_location():
+ try:
+ import paravision_models.liveness
+
+ return paravision_models.liveness.location()
+ except ImportError:
+ raise ImportError("You need to install Paravision Liveness Models package")
+
+
+def expand_bbox_to_edge_and_crop(depth_frame, proj_depth_bb):
+ h, w = depth_frame.shape[:2]
+ exp_bbox = _expand_bbox_to_edges(h, w, proj_depth_bb)
+ cropped = _crop(depth_frame, exp_bbox)
+ return cropped
+
+
+def estimate_depth_bounding_box(bb, camera_params):
+ height = camera_params.depth_intr.height
+ width = camera_params.depth_intr.width
+
+ left_corner = [bb.top_left.x, bb.top_left.y]
+ right_corner = [bb.bottom_right.x, bb.bottom_right.y]
+
+ left_depth_corner = _compute_epipolar_midpoint(
+ height, width, left_corner, camera_params
+ )
+ right_depth_corner = _compute_epipolar_midpoint(
+ height, width, right_corner, camera_params
+ )
+ proj_bb = np.hstack([left_depth_corner, right_depth_corner])
+ return proj_bb
+
+
+def _get_depth_point(height, width, pt, scale, camera_params):
+ color_world_pt = rs.rs2_deproject_pixel_to_point(
+ camera_params.color_intr, pt, scale
+ )
+ depth_world_pt = rs.rs2_transform_point_to_point(
+ camera_params.color_to_depth_extr, color_world_pt
+ )
+ depth_pt = rs.rs2_project_point_to_pixel(camera_params.depth_intr, depth_world_pt)
+ depth_pt = rs.adjust_2D_point_to_boundary(depth_pt, width, height)
+ return depth_pt
+
+
+def _compute_epipolar_midpoint(height, width, pt, camera_params):
+ # define depth endpoints of epipolar line to search
+ start_depth_pt = _get_depth_point(height, width, pt, DEPTH_MIN, camera_params)
+ end_depth_pt = _get_depth_point(height, width, pt, DEPTH_MAX, camera_params)
+
+ mid_pt = (np.array(start_depth_pt) + np.array(end_depth_pt)) / 2
+ return mid_pt
+
+
+def _expand_bbox_to_edges(h, w, bbox):
+ x1, y1, x2, y2 = bbox
+ dx = (x2 - x1) * EXPANSION_FACTOR / 2
+ dy = (y2 - y1) * EXPANSION_FACTOR / 2
+ x1_ = max(0, x1 - dx)
+ y1_ = max(0, y1 - dy)
+ x2_ = min(w, x2 + dx)
+ y2_ = min(h, y2 + dy)
+ return _round(np.array([x1_, y1_, x2_, y2_]))
+
+
+def _crop(np_img, bbox):
+ bbox = _round(bbox)
+ x1, y1, x2, y2 = bbox
+ h, w = np_img.shape[:2]
+ x1 = max(x1, 0)
+ y1 = max(y1, 0)
+ x2 = min(x2, w)
+ y2 = min(y2, h)
+
+ return np_img[y1:y2, x1:x2]
+
+
+def _round(bbox):
+ return np.rint(bbox).astype(np.int32)
diff --git a/modules/paravision/liveness2d.cpython-310-x86_64-linux-gnu.so b/modules/paravision/liveness2d.cpython-310-x86_64-linux-gnu.so
new file mode 100755
index 000000000..f88f20eb3
Binary files /dev/null and b/modules/paravision/liveness2d.cpython-310-x86_64-linux-gnu.so differ
diff --git a/modules/paravision/liveness2d/__init__.pyi b/modules/paravision/liveness2d/__init__.pyi
new file mode 100644
index 000000000..7bbcc2d09
--- /dev/null
+++ b/modules/paravision/liveness2d/__init__.pyi
@@ -0,0 +1,15 @@
+from __future__ import annotations
+import paravision.liveness2d
+import typing
+from .types import Engine
+from .sdk import SDK
+
+__all__ = [
+ "Engine",
+ "SDK",
+ "sdk",
+ "types"
+]
+
+
+__version__ = 'dev'
diff --git a/modules/paravision/recognition/tensorrt/__init__.py b/modules/paravision/liveness2d/py.typed
similarity index 100%
rename from modules/paravision/recognition/tensorrt/__init__.py
rename to modules/paravision/liveness2d/py.typed
diff --git a/modules/paravision/liveness2d/sdk/__init__.pyi b/modules/paravision/liveness2d/sdk/__init__.pyi
new file mode 100644
index 000000000..d30d21063
--- /dev/null
+++ b/modules/paravision/liveness2d/sdk/__init__.pyi
@@ -0,0 +1,113 @@
+from __future__ import annotations
+import paravision.liveness2d.sdk
+import typing
+import paravision.liveness2d.types
+import paravision.recognition.sdk
+import paravision.recognition.types
+
+__all__ = [
+ "Metadata",
+ "SDK"
+]
+
+
+class Metadata():
+ def __init__(self) -> None: ...
+
+ @property
+ def engine(self) -> str:
+ """
+ The engine or accelerator of the Liveness2D SDK instance being used.
+
+ :type: str
+ """
+ @engine.setter
+ def engine(self, arg0: str) -> None:
+ """
+ The engine or accelerator of the Liveness2D SDK instance being used.
+ """
+ @property
+ def engine_version(self) -> str:
+ """
+ The version of the engine or accelerator being used.
+
+ :type: str
+ """
+ @engine_version.setter
+ def engine_version(self, arg0: str) -> None:
+ """
+ The version of the engine or accelerator being used.
+ """
+ @property
+ def generation(self) -> int:
+ """
+ The generation of the Liveness2D SDK.
+
+ :type: int
+ """
+ @generation.setter
+ def generation(self, arg0: int) -> None:
+ """
+ The generation of the Liveness2D SDK.
+ """
+ @property
+ def model_version(self) -> str:
+ """
+ The version of the Liveness2D models.
+
+ :type: str
+ """
+ @model_version.setter
+ def model_version(self, arg0: str) -> None:
+ """
+ The version of the Liveness2D models.
+ """
+ @property
+ def sdk_version(self) -> str:
+ """
+ The version of the Liveness2D SDK.
+
+ :type: str
+ """
+ @sdk_version.setter
+ def sdk_version(self, arg0: str) -> None:
+ """
+ The version of the Liveness2D SDK.
+ """
+ pass
+
+
+class SDK():
+ """
+ SDK()
+
+ A sdk object contains an instance of the Paravision model and its
+ associated resources.
+
+ SDK objects are long-living and do not need to be re-instantiated between
+ method calls.
+ """
+ def __init__(self, models_dir: typing.Optional[str] = None, settings: typing.Optional[paravision.liveness2d.types.Settings] = None) -> None:
+ """
+ Create a new SDK instance.
+ """
+
+ def check_validness(self, face: paravision.recognition.types.Face, validness_settings: paravision.liveness2d.types.ValidnessSettings, reco_sdk: paravision.recognition.sdk.SDK) -> paravision.recognition.types.ValidnessResult:
+ """
+ Check if a face is valid for liveness inference.
+ """
+
+ def get_liveness(self, face: paravision.recognition.types.Face) -> paravision.liveness2d.types.LivenessResult:
+ """
+ Get the liveness of a face.
+ """
+
+ @staticmethod
+ def get_metadata(models_dir: typing.Optional[str] = None) -> Metadata:
+ """
+ Returns metadata for SDK and model info.
+ """
+
+ pass
+
+
diff --git a/modules/paravision/liveness2d/types/__init__.pyi b/modules/paravision/liveness2d/types/__init__.pyi
new file mode 100644
index 000000000..7249f9995
--- /dev/null
+++ b/modules/paravision/liveness2d/types/__init__.pyi
@@ -0,0 +1,383 @@
+from __future__ import annotations
+import paravision.liveness2d.types
+import typing
+import paravision.recognition.types
+
+__all__ = [
+ "Engine",
+ "GPUConfig",
+ "LivenessResult",
+ "Settings",
+ "ValidnessSettings"
+]
+
+
+class Engine():
+ """
+ Members:
+
+ AUTO : Automatically select the engine
+
+ OPENVINO : Use the OpenVINO engine
+
+ TENSORRT : Use the TensorRT engine
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ AUTO: paravision.liveness2d.types.Engine # value =
+ OPENVINO: paravision.liveness2d.types.Engine # value =
+ TENSORRT: paravision.liveness2d.types.Engine # value =
+ __members__: dict # value = {'AUTO': , 'OPENVINO': , 'TENSORRT': }
+ pass
+
+
+class GPUConfig():
+ def __init__(self) -> None: ...
+
+ @property
+ def gpu_id(self) -> int:
+ """
+ The index of the GPU device to use.
+
+ :type: int
+ """
+ @gpu_id.setter
+ def gpu_id(self, arg0: int) -> None:
+ """
+ The index of the GPU device to use.
+ """
+ @property
+ def worker_count(self) -> int:
+ """
+ The number of workers to allocate.
+
+ :type: int
+ """
+ @worker_count.setter
+ def worker_count(self, arg0: int) -> None:
+ """
+ The number of workers to allocate.
+ """
+ pass
+
+
+class LivenessResult():
+ """
+ LivenessResult()
+
+ A LivenessResult object contains the result of a liveness2d inference run on a face.
+
+ Attributes
+ ----------
+ live_probability : float
+ The probability that the face is real.
+ spoof_probability : float
+ The probability that the face is a spoof.
+ """
+ @typing.overload
+ def __init__(self) -> None:
+ """
+ Construct an empty LivenessResult
+ """
+
+ @typing.overload
+ def __init__(self, live_prob: float, spoof_prob: float) -> None:
+ """
+ Constructor a LiveFaceResult with the given probabilities
+ """
+
+ def __repr__(self) -> str: ...
+
+ def asdict(self) -> dict:
+ """
+ Convert this object to a dictionary
+ """
+
+ @property
+ def live_prob(self) -> float:
+ """
+ The probability that the face is real
+
+ :type: float
+ """
+ @property
+ def spoof_prob(self) -> float:
+ """
+ The probability that the face is a spoof
+
+ :type: float
+ """
+ pass
+
+
+class Settings():
+ def __init__(self) -> None: ...
+
+ @property
+ def cache_generated_engine(self) -> bool:
+ """
+ Cache generated engine for TensorRT only
+
+ :type: bool
+ """
+ @cache_generated_engine.setter
+ def cache_generated_engine(self, arg0: bool) -> None:
+ """
+ Cache generated engine for TensorRT only
+ """
+ @property
+ def engine(self) -> Engine:
+ """
+ Engine to use
+
+ :type: Engine
+ """
+ @engine.setter
+ def engine(self, arg0: Engine) -> None:
+ """
+ Engine to use
+ """
+ @property
+ def gpu_configs(self) -> list[GPUConfig]:
+ """
+ List of GPU configs for worker allocation in multiple GPUs
+
+ :type: list[GPUConfig]
+ """
+ @gpu_configs.setter
+ def gpu_configs(self, arg0: list[GPUConfig]) -> None:
+ """
+ List of GPU configs for worker allocation in multiple GPUs
+ """
+ @property
+ def openvino_threads_limit(self) -> int:
+ """
+ Thread limit for OpenVINO
+
+ :type: int
+ """
+ @openvino_threads_limit.setter
+ def openvino_threads_limit(self, arg0: int) -> None:
+ """
+ Thread limit for OpenVINO
+ """
+ @property
+ def tensorrt_engine_cache_path(self) -> str:
+ """
+ Directory of where serialized model files are stored
+
+ :type: str
+ """
+ @tensorrt_engine_cache_path.setter
+ def tensorrt_engine_cache_path(self, arg0: str) -> None:
+ """
+ Directory of where serialized model files are stored
+ """
+ @property
+ def use_cached_engine(self) -> bool:
+ """
+ Use cached engine for TensorRT only
+
+ :type: bool
+ """
+ @use_cached_engine.setter
+ def use_cached_engine(self, arg0: bool) -> None:
+ """
+ Use cached engine for TensorRT only
+ """
+ @property
+ def worker_count(self) -> int:
+ """
+ The number of workers for inference
+
+ :type: int
+ """
+ @worker_count.setter
+ def worker_count(self, arg0: int) -> None:
+ """
+ The number of workers for inference
+ """
+ pass
+
+
+class ValidnessSettings():
+ def __init__(self, face: paravision.recognition.types.Face) -> None:
+ """
+ Construct ValidnessSettings instance.
+ """
+
+ @property
+ def fail_fast(self) -> bool:
+ """
+ Option to early return if any check fails
+
+ :type: bool
+ """
+ @fail_fast.setter
+ def fail_fast(self, arg0: bool) -> None:
+ """
+ Option to early return if any check fails
+ """
+ @property
+ def image_boundary_height_pct(self) -> float:
+ """
+ The percentage of image height the face is permitted
+
+ :type: float
+ """
+ @image_boundary_height_pct.setter
+ def image_boundary_height_pct(self, arg0: float) -> None:
+ """
+ The percentage of image height the face is permitted
+ """
+ @property
+ def image_boundary_width_pct(self) -> float:
+ """
+ The percentage of image width the face is permitted
+
+ :type: float
+ """
+ @image_boundary_width_pct.setter
+ def image_boundary_width_pct(self, arg0: float) -> None:
+ """
+ The percentage of image width the face is permitted
+ """
+ @property
+ def image_illumination_control(self) -> int:
+ """
+ The image illumination control
+
+ :type: int
+ """
+ @image_illumination_control.setter
+ def image_illumination_control(self, arg0: int) -> None:
+ """
+ The image illumination control
+ """
+ @property
+ def max_face_mask_prob(self) -> float:
+ """
+ The max probability the face has a mask
+
+ :type: float
+ """
+ @max_face_mask_prob.setter
+ def max_face_mask_prob(self, arg0: float) -> None:
+ """
+ The max probability the face has a mask
+ """
+ @property
+ def max_face_roll_angle(self) -> int:
+ """
+ The maximum roll angle allowed of the face.
+
+ :type: int
+ """
+ @max_face_roll_angle.setter
+ def max_face_roll_angle(self, arg0: int) -> None:
+ """
+ The maximum roll angle allowed of the face.
+ """
+ @property
+ def max_face_size_pct(self) -> float:
+ """
+ The max percentage of the face size relative to the image boundaries
+
+ :type: float
+ """
+ @max_face_size_pct.setter
+ def max_face_size_pct(self, arg0: float) -> None:
+ """
+ The max percentage of the face size relative to the image boundaries
+ """
+ @property
+ def min_face_acceptability(self) -> float:
+ """
+ The minimum face acceptability threshold
+
+ :type: float
+ """
+ @min_face_acceptability.setter
+ def min_face_acceptability(self, arg0: float) -> None:
+ """
+ The minimum face acceptability threshold
+ """
+ @property
+ def min_face_frontality(self) -> int:
+ """
+ The minimum face frontality threshold
+
+ :type: int
+ """
+ @min_face_frontality.setter
+ def min_face_frontality(self, arg0: int) -> None:
+ """
+ The minimum face frontality threshold
+ """
+ @property
+ def min_face_quality(self) -> float:
+ """
+ The minimum image quality threshold
+
+ :type: float
+ """
+ @min_face_quality.setter
+ def min_face_quality(self, arg0: float) -> None:
+ """
+ The minimum image quality threshold
+ """
+ @property
+ def min_face_sharpness(self) -> float:
+ """
+ The minimum image sharpness threshold
+
+ :type: float
+ """
+ @min_face_sharpness.setter
+ def min_face_sharpness(self, arg0: float) -> None:
+ """
+ The minimum image sharpness threshold
+ """
+ @property
+ def min_face_size(self) -> int:
+ """
+ The minimum size of the face
+
+ :type: int
+ """
+ @min_face_size.setter
+ def min_face_size(self, arg0: int) -> None:
+ """
+ The minimum size of the face
+ """
+ pass
+
+
diff --git a/modules/paravision/recognition.cpython-310-x86_64-linux-gnu.so b/modules/paravision/recognition.cpython-310-x86_64-linux-gnu.so
new file mode 100755
index 000000000..ce040b283
Binary files /dev/null and b/modules/paravision/recognition.cpython-310-x86_64-linux-gnu.so differ
diff --git a/modules/paravision/recognition/__init__.py b/modules/paravision/recognition/__init__.py
deleted file mode 100644
index d962558b4..000000000
--- a/modules/paravision/recognition/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-Paravision
-======
-
-Provides an interface to the Paravision models.
-
-How to use the documentation
-----------------------------
-Documentation is available in two forms: docstrings provided within the code,
-and a reference guide, available
-`here `.
-
-Code snippets are indicated by three greater-than signs::
-
- >>> sdk = paravision.SDK()
-
-Use the built-in ``help`` function to view a function or object's docstring::
-
- >>> help(paravision.SDK)
- ...
-
-Example
--------
-This simple example illustrates how to detect the bounding boxes of faces in an image:
-
- >>> import paravision
- >>> from paravision.utils import load_image
- >>> img = load_image('/tmp/face.jpg')
- >>> sdk = paravision.SDK()
- >>> sdk.get_faces(img)
- ([], 0)
-"""
-
-from .sdk import SDK # noqa
-from .engine import Engine # noqa
-
-__version__ = "8.2.0"
diff --git a/modules/paravision/recognition/__init__.pyi b/modules/paravision/recognition/__init__.pyi
new file mode 100644
index 000000000..7c0a0d8ec
--- /dev/null
+++ b/modules/paravision/recognition/__init__.pyi
@@ -0,0 +1,19 @@
+from __future__ import annotations
+import paravision.recognition
+import typing
+from .types import Engine
+from .types import ImageManipulator
+from .sdk import SDK
+
+__all__ = [
+ "Engine",
+ "ImageManipulator",
+ "SDK",
+ "exceptions",
+ "sdk",
+ "types",
+ "utils"
+]
+
+
+__version__ = 'dev'
diff --git a/modules/paravision/recognition/__pycache__/__init__.cpython-310.pyc b/modules/paravision/recognition/__pycache__/__init__.cpython-310.pyc
deleted file mode 100644
index f9925d72d..000000000
Binary files a/modules/paravision/recognition/__pycache__/__init__.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/__init__.cpython-36.pyc b/modules/paravision/recognition/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index cbad50ae2..000000000
Binary files a/modules/paravision/recognition/__pycache__/__init__.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/_internal.cpython-310.pyc b/modules/paravision/recognition/__pycache__/_internal.cpython-310.pyc
deleted file mode 100644
index 96b594792..000000000
Binary files a/modules/paravision/recognition/__pycache__/_internal.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/_internal.cpython-36.pyc b/modules/paravision/recognition/__pycache__/_internal.cpython-36.pyc
deleted file mode 100644
index 2c786e246..000000000
Binary files a/modules/paravision/recognition/__pycache__/_internal.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/_utils.cpython-310.pyc b/modules/paravision/recognition/__pycache__/_utils.cpython-310.pyc
deleted file mode 100644
index 075025619..000000000
Binary files a/modules/paravision/recognition/__pycache__/_utils.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/_utils.cpython-36.pyc b/modules/paravision/recognition/__pycache__/_utils.cpython-36.pyc
deleted file mode 100644
index b09f1914c..000000000
Binary files a/modules/paravision/recognition/__pycache__/_utils.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/engine.cpython-310.pyc b/modules/paravision/recognition/__pycache__/engine.cpython-310.pyc
deleted file mode 100644
index 601c13499..000000000
Binary files a/modules/paravision/recognition/__pycache__/engine.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/engine.cpython-36.pyc b/modules/paravision/recognition/__pycache__/engine.cpython-36.pyc
deleted file mode 100644
index 301786e1f..000000000
Binary files a/modules/paravision/recognition/__pycache__/engine.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/exceptions.cpython-310.pyc b/modules/paravision/recognition/__pycache__/exceptions.cpython-310.pyc
deleted file mode 100644
index 1c6c072ee..000000000
Binary files a/modules/paravision/recognition/__pycache__/exceptions.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/exceptions.cpython-36.pyc b/modules/paravision/recognition/__pycache__/exceptions.cpython-36.pyc
deleted file mode 100644
index 6d4721a2d..000000000
Binary files a/modules/paravision/recognition/__pycache__/exceptions.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/sdk.cpython-310.pyc b/modules/paravision/recognition/__pycache__/sdk.cpython-310.pyc
deleted file mode 100644
index c86c66b1d..000000000
Binary files a/modules/paravision/recognition/__pycache__/sdk.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/sdk.cpython-36.pyc b/modules/paravision/recognition/__pycache__/sdk.cpython-36.pyc
deleted file mode 100644
index e4061b687..000000000
Binary files a/modules/paravision/recognition/__pycache__/sdk.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/types.cpython-310.pyc b/modules/paravision/recognition/__pycache__/types.cpython-310.pyc
deleted file mode 100644
index f9dc369c1..000000000
Binary files a/modules/paravision/recognition/__pycache__/types.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/types.cpython-36.pyc b/modules/paravision/recognition/__pycache__/types.cpython-36.pyc
deleted file mode 100644
index 17a03f6e0..000000000
Binary files a/modules/paravision/recognition/__pycache__/types.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/utils.cpython-310.pyc b/modules/paravision/recognition/__pycache__/utils.cpython-310.pyc
deleted file mode 100644
index 082edeff0..000000000
Binary files a/modules/paravision/recognition/__pycache__/utils.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/__pycache__/utils.cpython-36.pyc b/modules/paravision/recognition/__pycache__/utils.cpython-36.pyc
deleted file mode 100644
index 8d2dbd08b..000000000
Binary files a/modules/paravision/recognition/__pycache__/utils.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/_internal.py b/modules/paravision/recognition/_internal.py
deleted file mode 100644
index 2cec95568..000000000
--- a/modules/paravision/recognition/_internal.py
+++ /dev/null
@@ -1,298 +0,0 @@
-import cv2
-import numpy as np
-
-from . import _utils as utils
-from .engine import Engine
-from .types import BoundingBox, Landmarks, Embedding
-from .exceptions import ModelLoadingException
-
-LANDMARKS_EXPAND_FACTOR = 0.3
-ALIGNMENT_EXPAND_FACTOR = 1.0
-MASK_EXPAND_FACTOR = 0.3
-
-"""The way the pipeline is run needs to be refactored.
- Making temporary fixes for now. """
-AVAILABLE_OPTIONS = ["find_landmarks", "compute_embeddings"]
-
-
-class SplitGraph:
- def __init__(self, models_dirpath, settings=None, engine=Engine.OPENVINO):
- if settings is None:
- settings = {}
-
- if isinstance(engine, Engine):
- self.engine_name = engine
- else:
- self.engine_name = engine.split("-")[0]
-
- if self.engine_name == Engine.OPENVINO:
- from .openvino.engine import Engine as E
- elif self.engine_name == Engine.TENSORRT:
- from .tensorrt.engine import Engine as E
- else:
- raise ModelLoadingException(
- f"This is not a valid engine choice: {engine}. Available choices are: {Engine.all()}."
- )
-
- self.engine = E(models_dirpath, settings)
-
- def prepare_for_detection(self, img):
- height, width = img.shape[:2]
- fd_input_height, fd_input_width = self.engine.fd_input_shape
- ratio = min(fd_input_height / height, fd_input_width / width)
- target_width = round(width * ratio)
- target_height = round(height * ratio)
- resized = utils.resize(img, target_height, target_width)
- offset_pad_height = fd_input_height - target_height
- offset_pad_width = fd_input_width - target_width
- padded = cv2.copyMakeBorder(
- resized,
- 0,
- offset_pad_height,
- 0,
- offset_pad_width,
- cv2.BORDER_CONSTANT,
- value=[0, 0, 0],
- )
-
- return padded, (target_height, target_width)
-
- def prepare_for_landmarks(self, np_img, bbox, original_size):
- exp_bbox, _, pre_pad_exp_img = utils.expand_and_crop(
- np_img, LANDMARKS_EXPAND_FACTOR, bbox, original_size
- )
-
- image_h, image_w = np_img.shape[:2]
- exp_img = utils.maybe_pad(pre_pad_exp_img, exp_bbox, image_h, image_w)
-
- target_h, target_w = self.engine.lm_input_shape
- resized_img = utils.resize(exp_img, target_h, target_w)
-
- return exp_bbox, resized_img
-
- def process_post_detection(
- self, imgs, relative_bboxes, detection_input_sizes, img_indexes
- ):
- absolute_bboxes = []
- alignment_images = []
- alignment_bounding_boxes = []
- landmarks_input_bounding_boxes = []
- landmarks_input_images = []
-
- for i, relative_bbox in enumerate(relative_bboxes):
- img = imgs[img_indexes[i]]
- detection_input_size = detection_input_sizes[img_indexes[i]]
- img_size = np.asarray(img.shape[:2])
- absolute_bbox = utils.convert_to_absolute_coordinates(
- relative_bbox,
- detection_input_size,
- img_size,
- self.engine.fd_input_shape,
- )
-
- if absolute_bbox[0] > img_size[1] or absolute_bbox[1] > img_size[0]:
- continue
-
- square_bb = utils.square(absolute_bbox)
- landmarks_input_bbox, landmarks_input_image = self.prepare_for_landmarks(
- img, square_bb, img_size
- )
- _, alignment_bbox, alignment_image = utils.expand_and_crop(
- img, ALIGNMENT_EXPAND_FACTOR, square_bb, img_size
- )
-
- absolute_bboxes.append(absolute_bbox)
- alignment_images.append(alignment_image)
- alignment_bounding_boxes.append(alignment_bbox)
- landmarks_input_images.append(landmarks_input_image)
- landmarks_input_bounding_boxes.append(landmarks_input_bbox)
-
- values = {
- "bounding_boxes": [BoundingBox(*_bb) for _bb in absolute_bboxes],
- "landmarks_input_bounding_boxes": landmarks_input_bounding_boxes,
- "landmarks_input_images": landmarks_input_images,
- "alignment_bounding_boxes": alignment_bounding_boxes,
- "alignment_images": alignment_images,
- }
-
- return values
-
- def process_detection_options(self, detect_resp, scoring_mode, options):
- values = {}
-
- if "get_qualities" in options:
- qualities, acceptabilities = self.get_qualities(
- detect_resp["landmarks_input_images"]
- )
- values["qualities"] = qualities
- values["acceptabilities"] = acceptabilities
-
- if any(option in AVAILABLE_OPTIONS for option in options):
- (landmarks, recognition_input_images) = self.find_landmarks(
- detect_resp["landmarks_input_bounding_boxes"],
- detect_resp["landmarks_input_images"],
- detect_resp["alignment_bounding_boxes"],
- detect_resp["alignment_images"],
- )
-
- values["landmarks"] = [Landmarks(*x) for x in landmarks]
- values["recognition_input_images"] = recognition_input_images
-
- if "compute_embeddings" in options:
- values["embeddings"] = [
- Embedding(data, scoring_mode)
- for data in self.compute_embeddings(recognition_input_images)
- ]
-
- return values
-
- def run(self, imgs, scoring_mode, options=None):
- if options is None:
- options = []
-
- detection_inputs = []
- detection_input_sizes = []
-
- for img in imgs:
- img_for_fd, resized_size = self.prepare_for_detection(img)
-
- detection_inputs.append(img_for_fd)
- detection_input_sizes.append(resized_size)
-
- relative_bboxes, confidences, img_indexes = self.engine.predict_bounding_boxes(
- detection_inputs
- )
-
- values = {"confidences": confidences}
-
- # post-process detection
- detect_resp = self.process_post_detection(
- imgs, relative_bboxes, detection_input_sizes, img_indexes
- )
- values.update(detect_resp)
-
- # process options
- options_resp = self.process_detection_options(
- detect_resp, scoring_mode, options
- )
- values.update(options_resp)
-
- return values, img_indexes
-
- def run_from_landmarks(self, img, bboxes):
- absolute_bboxes = []
- alignment_images = []
- alignment_bounding_boxes = []
- landmarks_input_bounding_boxes = []
- landmarks_input_images = []
-
- for absolute_bbox in bboxes:
- img_size = np.asarray(img.shape[:2])
- bounding_box = np.array(
- [
- absolute_bbox.origin.x,
- absolute_bbox.origin.y,
- absolute_bbox.origin.x + absolute_bbox.width,
- absolute_bbox.origin.y + absolute_bbox.height,
- ]
- )
-
- if bounding_box[0] > img_size[1] or bounding_box[1] > img_size[0]:
- continue
-
- square_bb = utils.square(bounding_box)
- landmarks_input_bbox, landmarks_input_image = self.prepare_for_landmarks(
- img, square_bb, img_size
- )
- _, alignment_bbox, alignment_image = utils.expand_and_crop(
- img, ALIGNMENT_EXPAND_FACTOR, square_bb, img_size
- )
-
- absolute_bboxes.append(bounding_box)
- alignment_images.append(alignment_image)
- alignment_bounding_boxes.append(alignment_bbox)
- landmarks_input_images.append(landmarks_input_image)
- landmarks_input_bounding_boxes.append(landmarks_input_bbox)
-
- (landmarks, recognition_input_images) = self.find_landmarks(
- landmarks_input_bounding_boxes,
- landmarks_input_images,
- alignment_bounding_boxes,
- alignment_images,
- )
-
- values = {
- "bounding_boxes": [BoundingBox(*_bb) for _bb in absolute_bboxes],
- "landmarks_input_bounding_boxes": landmarks_input_bounding_boxes,
- "landmarks_input_images": landmarks_input_images,
- "alignment_bounding_boxes": alignment_bounding_boxes,
- "alignment_images": alignment_images,
- "landmarks": [Landmarks(*x) for x in landmarks],
- "recognition_input_images": recognition_input_images,
- }
-
- return values
-
- def find_landmarks(
- self,
- landmarks_input_bounding_boxes,
- landmarks_input_images,
- alignment_bounding_boxes,
- alignment_images,
- ):
- if len(landmarks_input_bounding_boxes) == 0:
- return [], []
-
- relative_landmarks = self.engine.predict_landmarks(landmarks_input_images)
- relative_landmarks = relative_landmarks.reshape(-1, 5, 2)
- absolute_landmarks = []
- recognition_input_images = []
-
- for i, landmarks in enumerate(relative_landmarks):
- landmarks_input_bounding_box = landmarks_input_bounding_boxes[i]
- alignment_bounding_box = alignment_bounding_boxes[i]
- alignment_image = alignment_images[i]
- landmarks = utils.normalize(landmarks_input_bounding_box, landmarks)
- recognition_input_image = utils.crop_and_align(
- alignment_image,
- landmarks - alignment_bounding_box[:2],
- self.engine.fr_input_shape,
- )
-
- absolute_landmarks.append(landmarks)
- recognition_input_images.append(recognition_input_image)
-
- return absolute_landmarks, recognition_input_images
-
- def compute_embeddings(self, recognition_input_images):
- if len(recognition_input_images) == 0:
- return []
-
- return self.engine.predict_embeddings(recognition_input_images)
-
- def get_attributes(self, recognition_input_images):
- if len(recognition_input_images) == 0:
- return [], []
-
- return self.engine.predict_attributes(recognition_input_images)
-
- def get_fr_input_shape(self):
- return self.engine.fr_input_shape
-
- def get_fr_output_shape(self):
- return self.engine.fr_output_shape
-
- def check_for_mask(self, landmarks_input_images):
- if len(landmarks_input_images) == 0:
- return []
-
- return self.engine.check_for_masks(landmarks_input_images)
-
- def get_qualities(self, landmarks_input_images):
- if len(landmarks_input_images) == 0:
- return [], []
-
- qualities, acceptabilities = self.engine.get_qualities(landmarks_input_images)
- qualities = np.clip(qualities, 0, 1).tolist()
- acceptabilities = np.clip(acceptabilities, 0, 1).tolist()
- return qualities, acceptabilities
diff --git a/modules/paravision/recognition/_utils.py b/modules/paravision/recognition/_utils.py
deleted file mode 100644
index cd165c7fb..000000000
--- a/modules/paravision/recognition/_utils.py
+++ /dev/null
@@ -1,310 +0,0 @@
-import json
-import cv2
-import importlib
-import numpy as np
-
-from os import walk, path
-
-from .engine import Engine
-from .exceptions import ModelLoadingException, InternalErrorException
-from .types import Face
-
-OPENVINO_EXT = "xml"
-TENSORRT_EXT = "onnx"
-MODELS_DIRECTORY = "recognition"
-
-KEYS = {
- "acceptabilities": "acceptability",
- "bounding_boxes": "bounding_box",
- "confidences": "score",
- "recognition_input_images": "recognition_input_image",
- "embeddings": "embedding",
- "landmarks_input_images": "landmarks_input_image",
- "mask_input_images": "mask_input_image",
- "landmarks_input_bounding_boxes": "landmarks_input_bounding_box",
- "alignment_bounding_boxes": "alignment_bounding_box",
- "alignment_images": "alignment_image",
- "qualities": "quality",
-}
-
-_SQUARE_TO_POINTS = [
- [38.2946, 51.6963],
- [73.5318, 51.5014],
- [56.0252, 71.7366],
- [41.5493, 92.3655],
- [70.7299, 92.2041],
-]
-
-
-def model_location():
- try:
- paravision_models = importlib.import_module("paravision_models")
-
- return paravision_models.location()
- except ModuleNotFoundError as err:
- raise ModelLoadingException(
- "You need to install Paravision Models package"
- ) from err
-
-
-def match_engine():
- try:
- paravision_models = importlib.import_module("paravision_models")
-
- return paravision_models.engine()
- except ModuleNotFoundError as err:
- raise ModelLoadingException(
- "You need to install Paravision Models package"
- ) from err
-
-
-def match_engine_given_path(models_dir):
- (_, _, filenames) = next(walk(path.join(models_dir, MODELS_DIRECTORY)))
-
- if any(OPENVINO_EXT in f_name for f_name in filenames):
- return Engine.OPENVINO
-
- if any(TENSORRT_EXT in f_name for f_name in filenames):
- return Engine.TENSORRT
-
- raise ModelLoadingException(
- "No compatible models found. Please ensure that your model path is correct."
- )
-
-
-def mask_model_location():
- try:
- mask = importlib.import_module("paravision_models.mask")
-
- return mask.location()
- except ModuleNotFoundError as err:
- raise ModelLoadingException(
- "You need to install Paravision Mask Model package"
- ) from err
-
-
-def read_spec_value(model_loc, key):
- try:
- with open(path.join(model_loc, "spec.json"), "r", encoding="utf-8") as f:
- spec = json.load(f)
-
- return spec[key]
- except (FileNotFoundError, KeyError) as err:
- raise ModelLoadingException(
- "Invalid spec file. Please verify the models are installed correctly."
- ) from err
-
-
-def build_faces(graph_dict):
- faces = []
- for values in zip(*graph_dict.values()):
- face_dict = {KEYS.get(k, k): v for k, v in zip(graph_dict.keys(), values)}
- face_dict["bounding_box"].score = face_dict.get("score", None)
- face = Face(face_dict["bounding_box"])
- face_dict.pop("bounding_box")
- face_dict.pop("score", None)
- for k, v in face_dict.items():
- setattr(face, k, v)
- faces.append(face)
-
- return faces
-
-
-def read_fd_input_shape(model_loc, fd_model_type):
- if fd_model_type == "streaming":
- return read_spec_value(model_loc, "fd_streaming_input_shape")
- return read_spec_value(model_loc, "fd_input_shape")
-
-
-def read_lm_input_shape(model_loc):
- return read_spec_value(model_loc, "lm_input_shape")
-
-
-def read_fr_input_shape(model_loc):
- return read_spec_value(model_loc, "fr_input_shape")
-
-
-def read_fr_output_shape(model_loc):
- return read_spec_value(model_loc, "embedding_size")
-
-
-def read_at_input_shape(model_loc):
- return read_spec_value(model_loc, "at_input_shape")
-
-
-def read_em_input_shape(model_loc):
- return read_spec_value(model_loc, "em_input_shape")
-
-
-def read_md_input_shape(model_loc):
- return read_spec_value(model_loc, "md_input_shape")
-
-
-def resize(np_img, height, width):
- return cv2.resize(np_img, (width, height))
-
-
-def expand_bb(bbox, p=1.0):
- """Takes a bounding box and expand by a factor of 1 + p
-
- Args:
- bb: bounding box in the format of [x1, y1, x2, y2]
- p: additive factor
- """
- x1, y1, x2, y2 = bbox
-
- dx = (x2 - x1) * p / 2
- dy = (y2 - y1) * p / 2
-
- x1 -= dx
- y1 -= dy
- x2 += dx
- y2 += dy
-
- return x1, y1, x2, y2
-
-
-def restrict_bbox_to_edges(h, w, bbox):
- x1, y1, x2, y2 = bbox
-
- x1 = max(x1, 0)
- y1 = max(y1, 0)
- x2 = min(x2, w)
- y2 = min(y2, h)
-
- return x1, y1, x2, y2
-
-
-def maybe_pad(crop_img, exp_bbox, h, w):
- x1, y1, x2, y2 = exp_bbox
-
- pc1 = max(0 - x1, 0)
- pc2 = max(0, x2 - w)
- pr1 = max(0 - y1, 0)
- pr2 = max(0, y2 - h)
- pad = np.rint(np.array([(pr1, pr2), (pc1, pc2), (0, 0)])).astype(np.int32)
- crop_pad_img = np.pad(crop_img, pad, mode="constant")
-
- return crop_pad_img
-
-
-def square(bb):
- x1, y1, x2, y2 = bb
- padding = ((x2 - x1) - (y2 - y1)) / 2
-
- if padding < 0:
- x1 += padding
- x2 -= padding
- elif padding > 0:
- y1 -= padding
- y2 += padding
-
- return x1, y1, x2, y2
-
-
-def crop(np_img, bb, h, w):
- """Simple crop function in numpy
-
- Args:
- np_img: H x W x C image
- bb: list or tuple of format (x1, y1, x2, y2)
-
- Returns:
- cropped numpy image
- """
- x1, y1, x2, y2 = bb
-
- if x1 >= x2 or y1 >= y2:
- raise InternalErrorException("Invalid bounding box for image cropping.")
-
- x1 = max(x1, 0)
- y1 = max(y1, 0)
- x2 = min(x2, w)
- y2 = min(y2, h)
-
- x1, y1, x2, y2 = np.rint([x1, y1, x2, y2]).astype(np.int32)
-
- return np_img[y1:y2, x1:x2, :]
-
-
-def compute_transform(src_points, dst_points):
- """estimate the rigid transform needed to transform src_points into
- dst_points
- """
- points1 = np.asarray(src_points)
- points2 = np.asarray(dst_points)
-
- # zero-mean
- center1 = np.expand_dims(np.mean(points1, axis=0), axis=0)
- center2 = np.expand_dims(np.mean(points2, axis=0), axis=0)
- points1 -= center1
- points2 -= center2
-
- std1 = np.std(points1)
- std2 = np.std(points2)
- points1 /= std1
- points2 /= std2
-
- U, _, V = np.linalg.svd(points1.T.dot(points2))
- R = (U.dot(V)).T
- trans = np.hstack(
- ((std2 / std1) * R, center2.T - ((std2 / std1) * R).dot(center1.T))
- )
-
- return trans
-
-
-def crop_and_align(np_img, from_points, img_shape):
- h, w = img_shape
- trans = compute_transform(from_points, _SQUARE_TO_POINTS)
-
- return cv2.warpAffine(np_img, trans, (w, h))
-
-
-def normalize(exp_bbox, lmks):
- x1, y1, x2, y2 = exp_bbox
- return lmks * [x2 - x1, y2 - y1] + [x1, y1]
-
-
-def expand_and_crop(np_img, p, bbox, original_size):
- h, w = original_size
-
- exp_bbox = expand_bb(bbox, p)
- exp_edge_restricted_bbox = restrict_bbox_to_edges(h, w, exp_bbox)
-
- crop_img = crop(np_img, exp_edge_restricted_bbox, h, w)
-
- return exp_bbox, exp_edge_restricted_bbox, crop_img
-
-
-def convert_to_absolute_coordinates(bbox, resized_size, original_size, fd_input_shape):
- h, w = original_size
- ratio = fd_input_shape / np.asarray(resized_size)
-
- return (
- bbox
- * np.asarray([w, h, w, h])
- * np.asarray([ratio[1], ratio[0], ratio[1], ratio[0]])
- )
-
-
-def sigmoid_transform(value, weight, bias):
- return 1 / (1 + np.exp(-(weight * value + bias)))
-
-
-def get_model_types(settings):
- fd_model_type = settings.get("detection_model", "default")
- lm_model_type = "default"
- ql_model_type = "default"
- fr_model_type = "default"
- at_model_type = "default"
- md_model_type = "default"
-
- return (
- fd_model_type,
- lm_model_type,
- ql_model_type,
- fr_model_type,
- at_model_type,
- md_model_type,
- )
diff --git a/modules/paravision/recognition/engine.py b/modules/paravision/recognition/engine.py
deleted file mode 100644
index 3d0a597a7..000000000
--- a/modules/paravision/recognition/engine.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from enum import Enum
-
-
-class Engine(str, Enum):
- OPENVINO = "openvino"
- TENSORRT = "tensorrt"
- AUTO = "auto"
-
- @staticmethod
- def all():
- return [Engine.OPENVINO, Engine.TENSORRT, Engine.AUTO]
diff --git a/modules/paravision/recognition/exceptions.py b/modules/paravision/recognition/exceptions.py
deleted file mode 100644
index d6f7511ee..000000000
--- a/modules/paravision/recognition/exceptions.py
+++ /dev/null
@@ -1,15 +0,0 @@
-class ParavisionException(Exception):
- def __init__(self, message):
- self.message = message
-
-
-class ModelLoadingException(ParavisionException):
- pass
-
-
-class InvalidInputException(ParavisionException):
- pass
-
-
-class InternalErrorException(ParavisionException):
- pass
diff --git a/modules/paravision/recognition/exceptions/__init__.pyi b/modules/paravision/recognition/exceptions/__init__.pyi
new file mode 100644
index 000000000..bf8bdc384
--- /dev/null
+++ b/modules/paravision/recognition/exceptions/__init__.pyi
@@ -0,0 +1,28 @@
+from __future__ import annotations
+import paravision.recognition.exceptions
+import typing
+
+__all__ = [
+ "InternalErrorException",
+ "InvalidInputException",
+ "ModelLoadingException",
+ "ParavisionException"
+]
+
+
+class ParavisionException(Exception, BaseException):
+ pass
+
+
+class InvalidInputException(ParavisionException, Exception, BaseException):
+ pass
+
+
+class ModelLoadingException(ParavisionException, Exception, BaseException):
+ pass
+
+
+class InternalErrorException(ParavisionException, Exception, BaseException):
+ pass
+
+
diff --git a/modules/paravision/recognition/openvino/__pycache__/__init__.cpython-310.pyc b/modules/paravision/recognition/openvino/__pycache__/__init__.cpython-310.pyc
deleted file mode 100644
index 4a313b9c9..000000000
Binary files a/modules/paravision/recognition/openvino/__pycache__/__init__.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/openvino/__pycache__/__init__.cpython-36.pyc b/modules/paravision/recognition/openvino/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index 00d040814..000000000
Binary files a/modules/paravision/recognition/openvino/__pycache__/__init__.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/openvino/__pycache__/engine.cpython-310.pyc b/modules/paravision/recognition/openvino/__pycache__/engine.cpython-310.pyc
deleted file mode 100644
index a2f2d0a93..000000000
Binary files a/modules/paravision/recognition/openvino/__pycache__/engine.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/openvino/__pycache__/engine.cpython-36.pyc b/modules/paravision/recognition/openvino/__pycache__/engine.cpython-36.pyc
deleted file mode 100644
index 5efa4a119..000000000
Binary files a/modules/paravision/recognition/openvino/__pycache__/engine.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/openvino/engine.py b/modules/paravision/recognition/openvino/engine.py
deleted file mode 100644
index c19cc9559..000000000
--- a/modules/paravision/recognition/openvino/engine.py
+++ /dev/null
@@ -1,245 +0,0 @@
-import multiprocessing
-import numpy as np
-import os
-
-from openvino.inference_engine import IECore
-
-from .. import _utils as utils
-
-UNIT_LOWER_LIMIT = 0
-UNIT_UPPER_LIMIT = 1
-
-FD_NAME = "detection"
-LM_NAME = "landmarks"
-QL_NAME = "quality"
-FR_NAME = "recognition"
-AT_NAME = "attributes"
-MD_NAME = "mask"
-
-BIN_EXT = ".bin"
-XML_EXT = ".xml"
-
-
-class Engine:
- def __init__(self, models_dir, settings):
- ie_core = IECore()
- num_threads = multiprocessing.cpu_count()
- try:
- num_threads = min(
- num_threads, max(int(os.getenv("PV_OPENVINO_THREADS_LIMIT")), 1)
- )
- except (TypeError, ValueError):
- pass
- ie_core.set_config({"CPU_THREADS_NUM": str(num_threads)}, "CPU")
-
- (
- fd_model_type,
- lm_model_type,
- ql_model_type,
- fr_model_type,
- at_model_type,
- md_model_type,
- ) = utils.get_model_types(settings)
-
- fd_net = ie_core.read_network(
- model=os.path.join(models_dir, FD_NAME, fd_model_type, FD_NAME + XML_EXT),
- weights=os.path.join(models_dir, FD_NAME, fd_model_type, FD_NAME + BIN_EXT),
- )
-
- self.fd_input_name = next(iter(fd_net.input_info))
- self.fd_input_shape = utils.read_fd_input_shape(models_dir, fd_model_type)
- self.fd_bboxes_name = "bboxes"
- self.fd_scores_name = "scores"
- self.fd_select_idxs_name = "selected_indices"
- self.fd_net = ie_core.load_network(network=fd_net, device_name="CPU")
-
- lm_net = ie_core.read_network(
- model=os.path.join(models_dir, LM_NAME, lm_model_type, LM_NAME + XML_EXT),
- weights=os.path.join(models_dir, LM_NAME, lm_model_type, LM_NAME + BIN_EXT),
- )
-
- self.lm_input_name = next(iter(lm_net.input_info))
- self.lm_input_shape = utils.read_lm_input_shape(models_dir)
- self.lm_landmarks_name = "landmarks"
- self.lm_net = ie_core.load_network(network=lm_net, device_name="CPU")
-
- ql_net = ie_core.read_network(
- model=os.path.join(models_dir, QL_NAME, ql_model_type, QL_NAME + XML_EXT),
- weights=os.path.join(models_dir, QL_NAME, ql_model_type, QL_NAME + BIN_EXT),
- )
-
- self.ql_input_name = next(iter(ql_net.input_info))
- self.ql_input_shape = utils.read_lm_input_shape(models_dir)
- self.ql_qualities_name = "qualities"
- self.ql_acceptabilities_name = "acceptabilities"
- self.ql_net = ie_core.load_network(network=ql_net, device_name="CPU")
-
- fr_net = ie_core.read_network(
- model=os.path.join(models_dir, FR_NAME, fr_model_type, FR_NAME + XML_EXT),
- weights=os.path.join(models_dir, FR_NAME, fr_model_type, FR_NAME + BIN_EXT),
- )
- self.fr_input_name = next(iter(fr_net.input_info))
- self.fr_input_shape = utils.read_fr_input_shape(models_dir)
- self.fr_output_name = next(iter(fr_net.outputs))
- self.fr_output_shape = utils.read_fr_output_shape(models_dir)
- self.fr_net = ie_core.load_network(network=fr_net, device_name="CPU")
-
- at_net = ie_core.read_network(
- model=os.path.join(models_dir, AT_NAME, at_model_type, AT_NAME + XML_EXT),
- weights=os.path.join(models_dir, AT_NAME, at_model_type, AT_NAME + BIN_EXT),
- )
- self.at_input_name = next(iter(at_net.input_info))
- self.at_input_shape = utils.read_at_input_shape(models_dir)
- self.at_net = ie_core.load_network(network=at_net, device_name="CPU")
-
- if "mask" in settings:
- md_model_path = settings["mask"]["models_dir"]
- md_net = ie_core.read_network(
- model=os.path.join(md_model_path, md_model_type, MD_NAME + XML_EXT),
- weights=os.path.join(md_model_path, md_model_type, MD_NAME + BIN_EXT),
- )
- self.md_input_name = next(iter(md_net.input_info))
- self.md_input_shape = md_net.input_info[
- self.md_input_name
- ].input_data.shape[2:]
- self.md_net = ie_core.load_network(network=md_net, device_name="CPU")
- self.mask_enabled = True
- else:
- self.mask_enabled = False
-
- def predict_bounding_boxes(self, np_imgs):
- """
- Args:
- np_imgs: (list) list of images loaded in numpy, of format (1, H, W, C)
-
- Returns:
- bboxes: (list) list containing arrays of bboxes for each image
- in order [x1, y1, x2, y2], scaled between 0, 1
- confs: (list) list containing arrays of confidences scores
- of the faces for each image
- """
- all_bboxes, all_scores, all_face_counts = [], [], []
- np_imgs = np.transpose(np_imgs, (0, 3, 1, 2))
-
- for np_img in np_imgs:
- ie_out = self.fd_net.infer(inputs={self.fd_input_name: np_img})
-
- bboxes = ie_out[self.fd_bboxes_name]
- scores = ie_out[self.fd_scores_name]
- select_idxs = ie_out[self.fd_select_idxs_name]
-
- # keep select_idxs until we see -1
- i = 0
- for idx in select_idxs[:, 0]:
- if idx == -1:
- break
- i += 1
-
- select_idxs = select_idxs[:i]
-
- # filter bboxes and scores based on select_idxs
- for batch_idx, class_idx, idx in select_idxs:
- all_bboxes.append(bboxes[batch_idx][idx])
- all_scores.append(scores[batch_idx][class_idx][idx].item())
-
- all_face_counts.append(len(select_idxs))
-
- img_idxs = []
-
- for img, num in enumerate(all_face_counts):
- img_idxs += [img] * num
-
- return all_bboxes, all_scores, img_idxs
-
- def predict_landmarks(self, np_imgs):
- """
- Args:
- np_imgs: (list) list of imgages loaded in numpy of format (1, C, H, W)
- Returns:
- qualities: (numpy array) qualities value between 0 and 1
- lmks: (numpy array) landmarks in the shape of (N, 5, 2)
- """
- np_imgs = np.transpose(np_imgs, (0, 3, 1, 2))
- landmarks = []
-
- for np_img in np_imgs:
- ie_out = self.lm_net.infer(inputs={self.lm_input_name: np_img})
- lmks = np.squeeze(ie_out[self.lm_landmarks_name])
- landmarks.append(lmks)
-
- return np.asarray(landmarks)
-
- def get_qualities(self, np_imgs):
- """
- Args:
- np_imgs: (list) list of imgages loaded in numpy of format (1, C, H, W)
- Returns:
- qualities: (numpy array) qualities value between 0 and 1
- """
- np_imgs = np.transpose(np_imgs, (0, 3, 1, 2))
- qualities, acceptabilities = [], []
-
- for np_img in np_imgs:
- ie_out = self.ql_net.infer(inputs={self.ql_input_name: np_img})
-
- quality = np.squeeze(ie_out[self.ql_qualities_name])
- qualities.append(quality)
-
- acceptability = np.squeeze(ie_out[self.ql_acceptabilities_name])
- acceptabilities.append(acceptability)
-
- return (
- np.clip(qualities, UNIT_LOWER_LIMIT, UNIT_UPPER_LIMIT),
- np.clip(acceptabilities, UNIT_LOWER_LIMIT, UNIT_UPPER_LIMIT),
- )
-
- def predict_embeddings(self, np_imgs):
- """
- Args:
- np_imgs: (list) list of images loaded in numpy of format (1, C, H, W)
-
- Returns:
- embs: (numpy array) array of embedding arrays
- """
- np_imgs = np.transpose(np_imgs, (0, 3, 1, 2))
- embeddings = []
-
- for np_img in np_imgs:
- ie_out = self.fr_net.infer(inputs={self.fr_input_name: np_img})
- embeddings.append(np.squeeze(ie_out[self.fr_output_name]))
-
- return np.asarray(embeddings)
-
- def predict_attributes(self, np_imgs):
- """
- Args:
- np_img: (numpy array) img loaded in numpy of format (1, C, H, W)
-
- Returns:
- ages: (numpy array) age probabilities in the shape of (N, 1, 7)
- genders: (numpy array) gender probabilities in the shape of (N, 1, 2)
- """
- np_imgs = np.transpose(np_imgs, (0, 3, 1, 2))
- ages, genders = [], []
-
- for np_img in np_imgs:
- ie_out = self.at_net.infer(inputs={self.at_input_name: np_img})
- ages.append(ie_out["age_probs"][0])
- genders.append(ie_out["gender_probs"][0])
-
- return ages, genders
-
- def check_for_masks(self, np_imgs):
- """
- Args:
- np_img: (numpy array) img loaded in numpy of format (1, C, H, W)
-
- Returns:
- mask_probabilities: (numpy array) mask probabilities in the shape of (N, 1, 4)
- """
- np_imgs = np.transpose(np_imgs, (0, 3, 1, 2))
- mask_probabilities = []
- for np_img in np_imgs:
- ie_out = self.md_net.infer(inputs={self.md_input_name: np_img})
- mask_probabilities.append(list(ie_out.values())[0][0][0])
- return mask_probabilities
diff --git a/modules/paravision/recognition/tests/__init__.py b/modules/paravision/recognition/py.typed
similarity index 100%
rename from modules/paravision/recognition/tests/__init__.py
rename to modules/paravision/recognition/py.typed
diff --git a/modules/paravision/recognition/sdk.py b/modules/paravision/recognition/sdk.py
deleted file mode 100644
index eec08f89f..000000000
--- a/modules/paravision/recognition/sdk.py
+++ /dev/null
@@ -1,471 +0,0 @@
-"""sdk: Instantiate the Paravision model."""
-from typing import List, Optional, Sequence
-import numpy as np
-import warnings
-import os
-
-from ._internal import SplitGraph
-from . import _utils as utils
-
-from .types import (
- BoundingBox,
- Face,
- Embedding,
- InferenceResult,
- ImageInferenceData,
- Landmarks,
- ScoringMode,
-)
-from .exceptions import InvalidInputException, InternalErrorException
-from .engine import Engine
-
-ERR_INVALID_EMB_MODE = "Invalid embedding scoring mode"
-ERR_INVALID_EMB_PREPARED_IMAGE = "Invalid prepared image for embedding"
-
-MATCH_SCORE_SCALE = 1000
-ENHANCED_MATCH_SCORE_WEIGHT = 2.3
-ENHANCED_MATCH_SCORE_BIAS = -0.5
-STANDARD_MATCH_SCORE_WEIGHT = 2.1
-STANDARD_MATCH_SCORE_BIAS = -5.3
-
-
-class SDK:
- """
- SDK()
-
- A sdk object contains an instance of the Paravision model and its
- associated resources.
-
- SDK objects are long-living and do not need to be re-instantiated between
- method calls.
- """
-
- def __init__(
- self,
- models_dir: Optional[str] = None,
- settings: Optional[dict] = None,
- engine: Engine = Engine.AUTO,
- ):
- """Create a SDK instance."""
-
- if settings is None:
- settings = {}
-
- if models_dir is None:
- models_dir = str(utils.model_location())
- if engine == Engine.AUTO:
- engine = utils.match_engine()
- elif engine == Engine.AUTO:
- engine = utils.match_engine_given_path(models_dir)
-
- if "attributes" not in settings:
- settings["attributes"] = {"models_dir": models_dir}
-
- if "mask" not in settings:
- if os.path.isdir(os.path.join(models_dir, "mask")):
- settings["mask"] = {"models_dir": os.path.join(models_dir, "mask")}
- else:
- try:
- settings["mask"] = {"models_dir": utils.mask_model_location()}
- except Exception:
- # TODO: temp solution to silent SonarCloud, should update when logging is added.
- settings.pop("mask", None)
-
- self._graph = SplitGraph(models_dir, settings, engine=engine)
- self._weight = utils.read_spec_value(models_dir, "weight")
- self._bias = utils.read_spec_value(models_dir, "bias")
- self._scoring_mode = settings.get("scoring_mode", ScoringMode.StandardEmbedding)
-
- def get_faces(
- self,
- imgs: Sequence[np.ndarray],
- qualities: bool = False,
- landmarks: bool = False,
- embeddings: bool = False,
- ) -> InferenceResult:
- """
- Detect faces in the image.
-
- Includes bounding boxes, landmarks, and [optionally] image quality
- details.
-
- Accepts a list of NumPy arrays (images).
-
- Returns InferenceResult object.
- """
- options = []
-
- if landmarks is True:
- options.append("find_landmarks")
-
- if embeddings is True:
- options.append("compute_embeddings")
-
- if qualities is True:
- options.append("get_qualities")
-
- outputs, img_idxs = self._graph.run(imgs, self._scoring_mode, options)
-
- faces = utils.build_faces(outputs)
-
- image_inferences = []
- for img in imgs:
- height, width = img.shape[:2]
- image_inferences.append(ImageInferenceData(width, height))
-
- for img_idx, face in zip(img_idxs, faces):
- image_inferences[img_idx].faces.append(face)
-
- return InferenceResult(image_inferences)
-
- def get_qualities(self, faces: Sequence[Face]) -> None:
- """
- Get qualities for faces in the image.
-
- Accepts a list of Face objects.
-
- No return values. Updates the face objects in place with qualities.
- """
- if len(faces) == 0:
- return
-
- imgs = [face.landmarks_input_image for face in faces]
- qualities, acceptabilities = self._graph.get_qualities(imgs)
-
- for face, quality, acceptability in zip(faces, qualities, acceptabilities):
- face.quality = quality
- face.acceptability = acceptability
-
- def get_masks(self, faces: Sequence[Face]) -> None:
- """
- Deprecated: This will be removed in the next major release. An Attributes SDK
- will be provided in the future to replace functionality.
-
- Get the mask probabilities for faces.
-
- Accepts a list of faces.
-
- No return values. Updates the face objects in place with mask probabilities.
- """
- warnings.warn(
- """get_masks is deprecated and will be removed in the next major release.
- An Attributes SDK will be provided in the future to replace functionality.""",
- DeprecationWarning,
- )
-
- if len(faces) == 0:
- return
-
- mask_input_images = []
- for face in faces:
- if face.landmarks_input_image is None:
- raise InvalidInputException(
- "Face.landmarks_input_image is needed but is None"
- )
- mask_input_images.append(face.landmarks_input_image)
-
- probability = self._graph.check_for_mask(mask_input_images)
-
- for i, face in enumerate(faces):
- face.mask = float(probability[i])
-
- def get_bounding_boxes(self, imgs: Sequence[np.ndarray]) -> InferenceResult:
- """
- Detect bounding boxes of faces in the image, returning a list of Faces.
-
- Accepts a list of NumPy arrays (images).
-
- Returns InferenceResult object.
- """
- return self.get_faces(imgs)
-
- def get_landmarks_from_bounding_boxes(
- self, img: np.ndarray, bboxes: Sequence[BoundingBox]
- ) -> InferenceResult:
- outputs = self._graph.run_from_landmarks(img, bboxes)
-
- faces = utils.build_faces(outputs)
- height, width = img.shape[:2]
-
- image_inference = ImageInferenceData(width, height)
- image_inference.faces.extend(faces)
-
- return InferenceResult([image_inference])
-
- def get_landmarks(self, faces: Sequence[Face]):
- """
- Get the landmarks for faces.
-
- Accepts a list of faces.
-
- No return values. Updates the face objects in place with landmark values.
- """
- if len(faces) == 0:
- return
-
- landmarks_input_bounding_boxes = []
- landmarks_input_images = []
- alignment_images = []
- alignment_bounding_boxes = []
-
- for face in faces:
- if face.landmarks_input_image is None:
- raise InvalidInputException("Face.landmarks_input_image is None.")
- if face.landmarks_input_bounding_box is None:
- raise InvalidInputException(
- "Face.landmarks_input_bounding_box is None."
- )
- if face.alignment_image is None:
- raise InvalidInputException("Face.alignment_image is None.")
- if face.alignment_bounding_box is None:
- raise InvalidInputException("Face.alignment_bounding_box is None.")
-
- landmarks_input_images.append(face.landmarks_input_image)
- landmarks_input_bounding_boxes.append(face.landmarks_input_bounding_box)
- alignment_images.append(face.alignment_image)
- alignment_bounding_boxes.append(face.alignment_bounding_box)
-
- landmarks, recognition_input_images = self._graph.find_landmarks(
- landmarks_input_bounding_boxes,
- landmarks_input_images,
- alignment_bounding_boxes,
- alignment_images,
- )
-
- for i, face in enumerate(faces):
- face.landmarks = Landmarks(*landmarks[i])
- face.recognition_input_image = recognition_input_images[i]
-
- def get_embeddings(self, faces: Sequence[Face]):
- """
- Get embeddings for faces.
-
- Accepts a list of Face objects.
-
- No return values. Updates the face objects in place with embeddings.
- """
- if len(faces) == 0:
- return
-
- recognition_input_images = []
- for face in faces:
- if face.recognition_input_image is None:
- raise InvalidInputException("Face.recognition_input_image is None.")
- recognition_input_images.append(face.recognition_input_image)
-
- embeddings = self._graph.compute_embeddings(recognition_input_images)
-
- for i, face in enumerate(faces):
- face.embedding = Embedding(embeddings[i], self._scoring_mode)
-
- def get_embeddings_from_landmarks(
- self, image: np.ndarray, landmarks: Sequence[Landmarks]
- ) -> List[Embedding]:
- recognition_input_images = [
- utils.crop_and_align(
- image, landmark.astuple(), self._graph.engine.fr_input_shape
- )
- for landmark in landmarks
- ]
-
- return [
- Embedding(data, self._scoring_mode)
- for data in self._graph.compute_embeddings(recognition_input_images)
- ]
-
- def get_embedding_from_prepared_image(
- self, prepared_image: np.ndarray
- ) -> Embedding:
- """
- Compute embedding using the prepared image i.e. recognition_input_image.
-
- Accepts one prepared image.
-
- Returns embedding.
- """
-
- if prepared_image is None:
- raise InvalidInputException(ERR_INVALID_EMB_PREPARED_IMAGE)
-
- embeddings = self._graph.compute_embeddings([prepared_image])
-
- return Embedding(embeddings[0], self._scoring_mode)
-
- def get_attributes(self, faces: Sequence[Face]):
- """
- Deprecated: This will be removed in the next major release. An Attributes SDK
- will be provided in the future to replace functionality.
-
- Computes age and gender attributes for faces.
-
- Accepts a list of Face objects.
-
- No return values. Updates the face objects in place with age and gender values.
- """
- warnings.warn(
- """get_attributes is deprecated and will be removed in the next major release.
- An Attributes SDK will be provided in the future to replace functionality.""",
- DeprecationWarning,
- )
-
- if len(faces) == 0:
- return
-
- recognition_input_images = []
- for face in faces:
- if face.recognition_input_image is None:
- raise InvalidInputException("Face.recognition_input_image is None.")
- recognition_input_images.append(face.recognition_input_image)
-
- ages, genders = self._graph.get_attributes(recognition_input_images)
-
- for i, face in enumerate(faces):
- face.ages = ages[i]
- face.genders = genders[i]
-
- @staticmethod
- def _get_standard_score(emb1: Embedding, emb2: Embedding) -> float:
- """
- Compute the difference score of two faces embeddings based on the Euclidean
- distance between them. A larger number indicates a greater similarity between
- the two embeddings; a lower number indicates a greater difference between the two embeddings.
-
- Accepts 2 embedding objects. Assumes the scoring mode of the embeddings to be standard.
-
- Returns a float between [0, 4]. If both embeddings are not in standard scoring mode,
- an InvalidInputException is thrown.
- """
- if (
- emb1.scoring_mode != ScoringMode.StandardEmbedding
- or emb1.scoring_mode != emb2.scoring_mode
- ):
- raise InvalidInputException(ERR_INVALID_EMB_MODE)
-
- score = 4 - np.sum((emb1.data - emb2.data) ** 2)
- return float(np.clip(score, 0, 4))
-
- @staticmethod
- def _get_enhanced_score(emb1: Embedding, emb2: Embedding) -> float:
- """
- Compute quality-aware score between two face embeddings. A larger number indicates a
- greater similarity between the two embeddings; a lower number indicates a
- greater difference between the two embeddings.
-
- Accepts 2 embedding vectors.
-
- Returns a float between [0, 2]. If both embeddings are not in enhanced scoring mode,
- an InvalidInputException is thrown.
- """
- if (
- emb1.scoring_mode != ScoringMode.EnhancedEmbedding
- or emb1.scoring_mode != emb2.scoring_mode
- ):
- raise InvalidInputException(ERR_INVALID_EMB_MODE)
-
- base_emb1, uncertainty1 = emb1.data[:-1], emb1.data[-1]
- base_emb2, uncertainty2 = emb2.data[:-1], emb2.data[-1]
-
- total_uncertainty = uncertainty1 + uncertainty2
- if total_uncertainty < 0:
- raise InternalErrorException("Uncertainty values cannot be negative.")
-
- attention = 2 * (1 - base_emb1 @ base_emb2) / (1e-10 + total_uncertainty)
- dist = attention + np.log(1e-10 + total_uncertainty)
-
- score = np.exp(-dist)
- return float(np.clip(score, 0, 2))
-
- @staticmethod
- def get_similarity(emb1: Embedding, emb2: Embedding) -> float:
-
- """
- Compute the difference score of two faces embeddings. A larger number indicates a
- greater similarity between the two embeddings; a lower number indicates a
- greater difference between the two embeddings.
-
- Accepts 2 embedding objects.
-
- Returns a float between [0, 2] for enhanced mode or [0, 4] for standard mode.
- If either of the embeddings is None, or if the embeddings are of different
- sizes, or if the embeddings have different scoring_method, raises InvalidInputException
- """
- if not (
- isinstance(emb1, Embedding)
- and isinstance(emb2, Embedding)
- and len(emb1.data) == len(emb2.data)
- ):
- raise InvalidInputException("Invalid input embedding")
-
- if emb1.scoring_mode != emb2.scoring_mode:
- raise InvalidInputException("Scoring mode mismatch for input embeddings")
-
- if emb1.scoring_mode == ScoringMode.EnhancedEmbedding:
- score = SDK._get_enhanced_score(emb1, emb2)
- elif emb1.scoring_mode == ScoringMode.StandardEmbedding:
- score = SDK._get_standard_score(emb1, emb2)
- else:
- raise InvalidInputException(ERR_INVALID_EMB_MODE)
-
- return score
-
- @staticmethod
- def get_match_score(emb1: Embedding, emb2: Embedding) -> int:
- """
- Compute the difference score of two faces embeddings. A larger number indicates a
- greater similarity between the two embeddings; a lower number indicates a
- greater difference between the two embeddings.
-
- Accepts 2 embedding objects.
-
- Returns a int between [0, 1000]. If either of the embeddings is None,
- or if the embeddings are of different sizes, or if the embeddings
- have different scoring_method, raises InvalidInputException
- """
- similarity = SDK.get_similarity(emb1, emb2)
- match_score = -1
-
- if emb1.scoring_mode == ScoringMode.EnhancedEmbedding:
- match_score = round(
- utils.sigmoid_transform(
- similarity, ENHANCED_MATCH_SCORE_WEIGHT, ENHANCED_MATCH_SCORE_BIAS
- )
- * MATCH_SCORE_SCALE
- )
- elif emb1.scoring_mode == ScoringMode.StandardEmbedding:
- match_score = round(
- utils.sigmoid_transform(
- similarity, STANDARD_MATCH_SCORE_WEIGHT, STANDARD_MATCH_SCORE_BIAS
- )
- * MATCH_SCORE_SCALE
- )
- else:
- raise InvalidInputException(ERR_INVALID_EMB_MODE)
-
- return int(np.clip(match_score, 0, 1000))
-
- def get_confidence(self, emb1: Embedding, emb2: Embedding) -> float:
- """
- Deprecated: This will be removed in the next major release. Use the
- get_match_score or get_similarity functions instead.
-
- Compute the probability of two faces being the same using the standard mode.
-
- Accepts 2 embedding objects.
-
- Returns a float between [0, 1]. If either of the embeddings is None,
- or if the embeddings are of different sizes, or if the embeddings
- have different scoring_method, raises InvalidInputException
- """
- warnings.warn(
- """get_confidence is deprecated and will be removed in the next major release.
- Use the get_match_score or get_similarity functions instead.""",
- DeprecationWarning,
- )
-
- if emb1 is not None and emb1.scoring_mode == ScoringMode.EnhancedEmbedding:
- emb1 = Embedding(emb1.data, ScoringMode.StandardEmbedding)
- if emb2 is not None and emb2.scoring_mode == ScoringMode.EnhancedEmbedding:
- emb2 = Embedding(emb2.data, ScoringMode.StandardEmbedding)
-
- score = self.get_similarity(emb1, emb2)
- return float(utils.sigmoid_transform(score, self._weight, self._bias))
diff --git a/modules/paravision/recognition/sdk/__init__.pyi b/modules/paravision/recognition/sdk/__init__.pyi
new file mode 100644
index 000000000..d1da51a5a
--- /dev/null
+++ b/modules/paravision/recognition/sdk/__init__.pyi
@@ -0,0 +1,219 @@
+from __future__ import annotations
+import paravision.recognition.sdk
+import typing
+import numpy
+import paravision.recognition.types
+_Shape = typing.Tuple[int, ...]
+
+__all__ = [
+ "Metadata",
+ "SDK"
+]
+
+
+class Metadata():
+ def __init__(self) -> None: ...
+
+ @property
+ def embedding_size(self) -> int:
+ """
+ The embedding size of the Recognition models being used.
+
+ :type: int
+ """
+ @embedding_size.setter
+ def embedding_size(self, arg0: int) -> None:
+ """
+ The embedding size of the Recognition models being used.
+ """
+ @property
+ def engine(self) -> str:
+ """
+ The engine or accelerator of the Recognition SDK instance being used.
+
+ :type: str
+ """
+ @engine.setter
+ def engine(self, arg0: str) -> None:
+ """
+ The engine or accelerator of the Recognition SDK instance being used.
+ """
+ @property
+ def engine_version(self) -> str:
+ """
+ The version of the engine or accelerator being used.
+
+ :type: str
+ """
+ @engine_version.setter
+ def engine_version(self, arg0: str) -> None:
+ """
+ The version of the engine or accelerator being used.
+ """
+ @property
+ def generation(self) -> int:
+ """
+ The generation of the Recognition SDK.
+
+ :type: int
+ """
+ @generation.setter
+ def generation(self, arg0: int) -> None:
+ """
+ The generation of the Recognition SDK.
+ """
+ @property
+ def model(self) -> str:
+ """
+ The name of the Recognition models.
+
+ :type: str
+ """
+ @model.setter
+ def model(self, arg0: str) -> None:
+ """
+ The name of the Recognition models.
+ """
+ @property
+ def model_version(self) -> str:
+ """
+ The version of the Recognition models.
+
+ :type: str
+ """
+ @model_version.setter
+ def model_version(self, arg0: str) -> None:
+ """
+ The version of the Recognition models.
+ """
+ @property
+ def sdk_version(self) -> str:
+ """
+ The version of the Recognition SDK.
+
+ :type: str
+ """
+ @sdk_version.setter
+ def sdk_version(self, arg0: str) -> None:
+ """
+ The version of the Recognition SDK.
+ """
+ pass
+
+
+class SDK():
+ """
+ SDK()
+
+ A sdk object contains an instance of the Paravision model and its
+ associated resources.
+
+ SDK objects are long-living and do not need to be re-instantiated between
+ method calls.
+ """
+ def __init__(self, models_dir: typing.Optional[str] = None, settings: typing.Optional[paravision.recognition.types.Settings] = None) -> None:
+ """
+ Create a new SDK instance with settings as a struct
+ """
+
+ @typing.overload
+ def get_bounding_boxes(self, imgs: list[numpy.ndarray], image_source: paravision.recognition.types.ImageSource = ImageSource.UNKNOWN) -> paravision.recognition.types.InferenceResult:
+ """
+ Detect bounding boxes of faces in the image, returning a list of Faces.
+ """
+
+ @typing.overload
+ def get_bounding_boxes(self, imgs: list[paravision.recognition.types.Image], detection_model: paravision.recognition.types.ImageSource = '') -> paravision.recognition.types.InferenceResult:
+ """
+ Accepts a list of NumPy arrays (images).
+ """
+
+ @typing.overload
+ def get_embedding_from_prepared_image(self, prepared_image: numpy.ndarray) -> paravision.recognition.types.Embedding:
+ """
+ Get the embedding for a prepared image.
+ """
+
+ @typing.overload
+ def get_embedding_from_prepared_image(self, prepared_image: paravision.recognition.types.Image) -> paravision.recognition.types.Embedding:
+ """
+ Accepts one prepared image (numpy array).
+ """
+
+ def get_embeddings(self, faces: list[paravision.recognition.types.Face]) -> None:
+ """
+ Get the embeddings for faces.
+ """
+
+ @typing.overload
+ def get_embeddings_from_landmarks(self, image: numpy.ndarray, landmarks: list[paravision.recognition.types.Landmarks]) -> list[paravision.recognition.types.Embedding]:
+ """
+ Get the embeddings for faces.
+ """
+
+ @typing.overload
+ def get_embeddings_from_landmarks(self, image: paravision.recognition.types.Image, landmarks: list[paravision.recognition.types.Landmarks]) -> list[paravision.recognition.types.Embedding]:
+ """
+ Accepts a NumPy array (image) and a list of landmarks.
+ """
+
+ @typing.overload
+ def get_faces(self, imgs: list[numpy.ndarray], qualities: bool = False, landmarks: bool = False, embeddings: bool = False, image_source: paravision.recognition.types.ImageSource = ImageSource.UNKNOWN) -> paravision.recognition.types.InferenceResult:
+ """
+ Detect faces in the image.
+ """
+
+ @typing.overload
+ def get_faces(self, imgs: list[paravision.recognition.types.Image], qualities: bool = False, landmarks: bool = False, embeddings: bool = False, image_source: paravision.recognition.types.ImageSource = ImageSource.UNKNOWN) -> paravision.recognition.types.InferenceResult:
+ """
+ Includes bounding boxes, landmarks, and [optionally] image quality
+ details.
+ """
+
+ def get_landmarks(self, faces: list[paravision.recognition.types.Face]) -> None:
+ """
+ Get the landmarks for faces.
+ """
+
+ @typing.overload
+ def get_landmarks_from_bounding_boxes(self, img: numpy.ndarray, bboxes: list[paravision.recognition.types.BoundingBox]) -> paravision.recognition.types.InferenceResult:
+ """
+ Get the landmarks from a bounding box.
+ """
+
+ @typing.overload
+ def get_landmarks_from_bounding_boxes(self, img: paravision.recognition.types.Image, bboxes: list[paravision.recognition.types.BoundingBox]) -> paravision.recognition.types.InferenceResult:
+ """
+ Accepts a NumPy array (image) and a list of bounding boxes.
+ """
+
+ @staticmethod
+ def get_match_score(emb1: paravision.recognition.types.Embedding, emb2: paravision.recognition.types.Embedding, scoring_mode: paravision.recognition.types.ScoringMode = ScoringMode.EnhancedEmbedding) -> int:
+ """
+ Compute the difference score of two faces embeddings. A larger number indicates a
+ greater similarity between the two embeddings; a lower number indicates a
+ greater difference between the two embeddings.
+ """
+
+ @staticmethod
+ def get_metadata(models_dir: typing.Optional[str] = None) -> Metadata:
+ """
+ Returns metadata for SDK and model info.
+ """
+
+ def get_qualities(self, faces: list[paravision.recognition.types.Face]) -> None:
+ """
+ Get the quality of the faces in the image.
+ """
+
+ @staticmethod
+ def get_similarity(emb1: paravision.recognition.types.Embedding, emb2: paravision.recognition.types.Embedding, scoring_mode: paravision.recognition.types.ScoringMode = ScoringMode.EnhancedEmbedding) -> float:
+ """
+ Compute the difference score of two faces embeddings. A larger number indicates a
+ greater similarity between the two embeddings; a lower number indicates a
+ greater difference between the two embeddings.
+ """
+
+ pass
+
+
diff --git a/modules/paravision/recognition/tensorrt/__pycache__/__init__.cpython-36.pyc b/modules/paravision/recognition/tensorrt/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index 40e01976f..000000000
Binary files a/modules/paravision/recognition/tensorrt/__pycache__/__init__.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tensorrt/__pycache__/builder.cpython-36.pyc b/modules/paravision/recognition/tensorrt/__pycache__/builder.cpython-36.pyc
deleted file mode 100644
index 4bfa2d24c..000000000
Binary files a/modules/paravision/recognition/tensorrt/__pycache__/builder.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tensorrt/__pycache__/engine.cpython-36.pyc b/modules/paravision/recognition/tensorrt/__pycache__/engine.cpython-36.pyc
deleted file mode 100644
index 18cd6ff59..000000000
Binary files a/modules/paravision/recognition/tensorrt/__pycache__/engine.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tensorrt/__pycache__/utils.cpython-36.pyc b/modules/paravision/recognition/tensorrt/__pycache__/utils.cpython-36.pyc
deleted file mode 100644
index 613834324..000000000
Binary files a/modules/paravision/recognition/tensorrt/__pycache__/utils.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tensorrt/builder.py b/modules/paravision/recognition/tensorrt/builder.py
deleted file mode 100644
index b473555fe..000000000
--- a/modules/paravision/recognition/tensorrt/builder.py
+++ /dev/null
@@ -1,142 +0,0 @@
-import tensorrt as trt
-import os
-from pathlib import Path
-
-from ..exceptions import (
- InvalidInputException,
- ModelLoadingException,
-)
-from contextlib import ExitStack
-
-LOGGER = trt.Logger(trt.Logger.Severity.ERROR)
-
-DEFAULT_DETECTION_MAX_BATCH_SIZE = 1
-DEFAULT_QUALITY_MAX_BATCH_SIZE = 4
-DEFAULT_LANDMARKS_MAX_BATCH_SIZE = 4
-DEFAULT_RECOGNITION_MAX_BATCH_SIZE = 4
-DEFAULT_ATTRIBUTES_MAX_BATCH_SIZE = 4
-DEFAULT_MASK_MAX_BATCH_SIZE = 4
-NUM_CHANNELS_RGB = 3
-MAX_WORKSPACE_SIZE = 1 << 28
-
-trt.init_libnvinfer_plugins(LOGGER, "")
-
-PLUGIN_CREATORS = trt.get_plugin_registry().plugin_creator_list
-
-
-def _get_max_batch_size(name, settings):
- if name == "detection":
- # batching is not enabled for detection yet
- return DEFAULT_DETECTION_MAX_BATCH_SIZE
-
- if name == "landmarks":
- size = settings.get(
- "landmarks_max_batch_size", DEFAULT_LANDMARKS_MAX_BATCH_SIZE
- )
- elif name == "recognition":
- size = settings.get(
- "recognition_max_batch_size", DEFAULT_RECOGNITION_MAX_BATCH_SIZE
- )
- elif name == "attributes":
- size = settings.get(
- "attributes_max_batch_size", DEFAULT_ATTRIBUTES_MAX_BATCH_SIZE
- )
- elif name == "mask":
- size = settings.get("mask_max_batch_size", DEFAULT_MASK_MAX_BATCH_SIZE)
- elif name == "quality":
- size = settings.get("quality_max_batch_size", DEFAULT_QUALITY_MAX_BATCH_SIZE)
- else:
- raise InvalidInputException("Batch size is not specified")
-
- return size
-
-
-def build_engine(name, models_dir, models_type, engine_path, settings, shape):
- if name == "mask":
- model_file = os.path.join(models_dir, models_type, f"{name}.onnx")
- else:
- model_file = os.path.join(models_dir, name, models_type, f"{name}.onnx")
-
- batch_size = _get_max_batch_size(name, settings)
-
- trt_version = int(trt.__version__.split(".")[0])
- if trt_version >= 8:
- # -1 indicates dynamic batching. Does not work for detection model currently
- input_shape = [
- batch_size if name == "detection" else -1,
- NUM_CHANNELS_RGB,
- ] + list(shape)
- net_flags = 1 << (int)(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)
- else:
- raise ModelLoadingException(
- "TensorRT version 8 or higher required to build engine"
- )
-
- if not os.path.isfile(model_file):
- return None
-
- with ExitStack() as stack:
- builder = stack.enter_context(trt.Builder(LOGGER))
- config = stack.enter_context(builder.create_builder_config())
- network = stack.enter_context(builder.create_network(net_flags))
- parser = stack.enter_context(trt.OnnxParser(network, LOGGER))
-
- success = parser.parse_from_file(model_file)
- if not success:
- raise ModelLoadingException(f"Cannot parse {name} model.")
-
- builder.max_batch_size = batch_size
- config.max_workspace_size = MAX_WORKSPACE_SIZE
-
- profile = _create_opt_profile(builder, network, batch_size)
- config.add_optimization_profile(profile)
-
- network.get_input(0).shape = input_shape
- serialized_engine = builder.build_serialized_network(network, config)
- if serialized_engine is None:
- raise ModelLoadingException(f"Cannot serialize {name} engine.")
-
- engine_dir = Path(engine_path).parent
- engine_dir.mkdir(parents=True, exist_ok=True)
- with open(engine_path, "wb") as f:
- f.write(serialized_engine)
-
- return serialized_engine
-
-
-def _create_opt_profile(builder, network, max_batch_size):
- profile = builder.create_optimization_profile()
-
- if network.num_inputs <= 0:
- return profile
-
- input_ = network.get_input(0)
-
- min_shape = trt.Dims(input_.shape)
- min_shape[0] = 1
-
- opt_shape = trt.Dims(input_.shape)
- opt_shape[0] = max_batch_size
-
- max_shape = trt.Dims(input_.shape)
- max_shape[0] = max_batch_size
-
- profile.set_shape(input_.name, min_shape, opt_shape, max_shape)
-
- return profile
-
-
-def load_engine(name, engine_path, models_dir, models_type, settings, input_shape):
- if not os.path.isfile(engine_path):
- serialized_engine = build_engine(
- name, models_dir, models_type, engine_path, settings, input_shape
- )
- else:
- with open(engine_path, "rb") as f:
- serialized_engine = f.read()
-
- if not serialized_engine:
- raise ModelLoadingException(f"Cannot build {name} engine.")
-
- runtime = trt.Runtime(LOGGER)
- return runtime.deserialize_cuda_engine(serialized_engine)
diff --git a/modules/paravision/recognition/tensorrt/engine.py b/modules/paravision/recognition/tensorrt/engine.py
deleted file mode 100644
index 1d9fe0e3d..000000000
--- a/modules/paravision/recognition/tensorrt/engine.py
+++ /dev/null
@@ -1,462 +0,0 @@
-import os
-import importlib
-
-import numpy as np
-import pycuda.driver as cuda
-import pycuda.autoinit # noqa
-
-from .. import _utils as utils
-from ..exceptions import (
- ModelLoadingException,
-)
-from . import utils as trt_utils
-from .builder import load_engine
-
-QUALITIES_QUALITIES_NAME = "qualities"
-QUALITIES_ACCEPTABILTIES_NAME = "acceptabilities"
-LANDMARKS_LANDMARKS_NAME = "landmarks"
-
-ATTRIBUTES_AGES_NAME = "age_probs"
-ATTRIBUTES_GENDERS_NAME = "gender_probs"
-
-UNIT_LOWER_LIMIT = 0
-UNIT_UPPER_LIMIT = 1
-
-ERR_ENGINE_UNINITIALIZED = "The engine is not initialized."
-ERR_MASK_MODEL_NOT_LOADED = "Mask model not loaded."
-
-FD_NAME = "detection"
-LM_NAME = "landmarks"
-QL_NAME = "quality"
-FR_NAME = "recognition"
-AT_NAME = "attributes"
-MD_NAME = "mask"
-
-ENGINE_EXT = ".engine"
-
-
-class Engine:
- def __init__(self, models_dir, settings):
- engine_dirpath = models_dir
-
- try:
- paravision_models = importlib.import_module("paravision_models")
-
- if paravision_models.location() == models_dir:
- engine_dirpath = paravision_models.TRT_ENGINE_PATH
- except (ModuleNotFoundError, AttributeError):
- pass
-
- self.stream = cuda.Stream()
-
- (
- fd_model_type,
- lm_model_type,
- ql_model_type,
- fr_model_type,
- at_model_type,
- md_model_type,
- ) = utils.get_model_types(settings)
-
- self.fd_input_shape = utils.read_fd_input_shape(models_dir, fd_model_type)
-
- fd_engine_path = os.path.join(
- engine_dirpath, FD_NAME, fd_model_type, FD_NAME + ENGINE_EXT
- )
- self.fd_engine = load_engine(
- FD_NAME,
- fd_engine_path,
- models_dir,
- fd_model_type,
- settings,
- self.fd_input_shape,
- )
- if self.fd_engine:
- self.fd_context = self.fd_engine.create_execution_context()
- (
- self.fd_inputs,
- self.fd_outputs,
- self.fd_data,
- self.fd_bindings,
- ) = trt_utils.allocate_buffers(self.fd_engine)
-
- self.lm_input_shape = utils.read_lm_input_shape(models_dir)
- lm_engine_path = os.path.join(
- engine_dirpath, LM_NAME, lm_model_type, LM_NAME + ENGINE_EXT
- )
- self.lm_engine = load_engine(
- LM_NAME,
- lm_engine_path,
- models_dir,
- lm_model_type,
- settings,
- self.lm_input_shape,
- )
- if self.lm_engine:
- self.lm_context = self.lm_engine.create_execution_context()
- (
- self.lm_inputs,
- self.lm_outputs,
- self.lm_data,
- self.lm_bindings,
- ) = trt_utils.allocate_buffers(self.lm_engine)
-
- self.ql_input_shape = utils.read_lm_input_shape(models_dir)
- ql_engine_path = os.path.join(
- engine_dirpath, QL_NAME, ql_model_type, QL_NAME + ENGINE_EXT
- )
- self.ql_engine = load_engine(
- QL_NAME,
- ql_engine_path,
- models_dir,
- ql_model_type,
- settings,
- self.ql_input_shape,
- )
- if self.ql_engine:
- self.ql_context = self.ql_engine.create_execution_context()
- (
- self.ql_inputs,
- self.ql_outputs,
- self.ql_data,
- self.ql_bindings,
- ) = trt_utils.allocate_buffers(self.ql_engine)
-
- self.fr_input_shape = utils.read_fr_input_shape(models_dir)
- fr_engine_path = os.path.join(
- engine_dirpath, FR_NAME, fr_model_type, FR_NAME + ENGINE_EXT
- )
- self.fr_engine = load_engine(
- FR_NAME,
- fr_engine_path,
- models_dir,
- fr_model_type,
- settings,
- self.fr_input_shape,
- )
- if self.fr_engine:
- self.fr_context = self.fr_engine.create_execution_context()
- (
- self.fr_inputs,
- self.fr_outputs,
- self.fr_data,
- self.fr_bindings,
- ) = trt_utils.allocate_buffers(self.fr_engine)
- self.fr_output_shape = utils.read_fr_output_shape(models_dir)
-
- self.at_input_shape = utils.read_at_input_shape(models_dir)
- at_engine_path = os.path.join(
- engine_dirpath, AT_NAME, at_model_type, AT_NAME + ENGINE_EXT
- )
- self.at_engine = load_engine(
- AT_NAME,
- at_engine_path,
- models_dir,
- at_model_type,
- settings,
- self.at_input_shape,
- )
- if self.at_engine:
- self.at_context = self.at_engine.create_execution_context()
- (
- self.at_inputs,
- self.at_outputs,
- self.at_data,
- self.at_bindings,
- ) = trt_utils.allocate_buffers(self.at_engine)
-
- # Mask input image is prepared separately as the shape can deviate from landmark input images.
- if "mask" in settings:
- md_model_path = settings["mask"]["models_dir"]
- md_engine_path = os.path.join(
- md_model_path, md_model_type, MD_NAME + ENGINE_EXT
- )
-
- self.md_input_shape = utils.read_md_input_shape(models_dir)
- self.md_engine = load_engine(
- MD_NAME,
- md_engine_path,
- md_model_path,
- md_model_type,
- settings,
- self.md_input_shape,
- )
- if self.md_engine:
- self.md_context = self.md_engine.create_execution_context()
- (
- self.md_inputs,
- self.md_outputs,
- self.md_data,
- self.md_bindings,
- ) = trt_utils.allocate_buffers(self.md_engine)
- self.mask_enabled = True
- else:
- self.mask_enabled = False
-
- def predict_bounding_boxes(self, np_imgs):
- """
- Args:
- np_imgs: (list) list of images loaded in numpy, of format (1, H, W, C)
-
- Returns:
- bboxes: (list) list containing arrays of bboxes for each image
- in order [x1, y1, x2, y2], scaled between 0, 1
- confs: (list) list containing arrays of confidences scores
- of the faces for each image
- """
- if not self.fd_engine:
- raise ModelLoadingException(ERR_ENGINE_UNINITIALIZED)
-
- max_batch_size = self.fd_engine.max_batch_size
- bboxes, confidences, img_idxs = [], [], []
-
- for i in range(0, len(np_imgs), max_batch_size):
- batch = np_imgs[i : min(len(np_imgs), i + max_batch_size)]
- (
- bboxes_batch,
- confidences_batch,
- img_idxs_batch,
- ) = self._batch_predict_bounding_boxes(batch)
-
- bboxes.extend(bboxes_batch)
- confidences.extend(confidences_batch)
- img_idxs.extend(img_idxs_batch + i)
-
- bboxes = np.asarray(bboxes).reshape(-1, 4)
- confidences = np.asarray(confidences).reshape(-1)
-
- return bboxes, confidences, img_idxs
-
- def _batch_predict_bounding_boxes(self, np_imgs):
- np_imgs = np.transpose(np.asarray(np_imgs), [0, 3, 1, 2]).astype(np.float32)
- batch_size = len(np_imgs)
-
- results = trt_utils.do_inference(
- self.fd_context,
- bindings=self.fd_bindings,
- inputs=self.fd_inputs,
- input_data=np_imgs,
- outputs=self.fd_outputs,
- output_data=self.fd_data,
- stream=self.stream,
- batch_size=batch_size,
- )
-
- num_detections = int(results[0])
- bboxes = results[1].reshape(-1, 4)[:num_detections]
- scores = results[2][:num_detections].tolist()
- indexes = results[3][:num_detections].astype(np.int32)
- return bboxes, scores, indexes
-
- def predict_landmarks(self, np_imgs):
- """
- Args:
- np_imgs: (list) imgs loaded in numpy of format (1, H, W, C)
- Returns:
- qualities: (numpy array) qualities values between 0 and 1
- lmks: (numpy array) landmarks in the shape of (N, 5, 2)
- acceptabilities: (numpy array) acceptabilities values between 0 and 1
- """
- if not self.lm_engine:
- raise ModelLoadingException(ERR_ENGINE_UNINITIALIZED)
-
- max_batch_size = self.lm_engine.max_batch_size
- lmks = []
-
- for i in range(0, len(np_imgs), max_batch_size):
- batch = np_imgs[i : min(len(np_imgs), i + max_batch_size)]
- lmks_batch = self._batch_predict_landmarks(batch)
-
- lmks.extend(lmks_batch)
-
- return np.asarray(lmks)
-
- def _batch_predict_landmarks(self, np_imgs):
- np_imgs = np.transpose(np_imgs, [0, 3, 1, 2]).astype(np.float32)
- batch_size = len(np_imgs)
- results = trt_utils.do_inference(
- self.lm_context,
- bindings=self.lm_bindings,
- inputs=self.lm_inputs,
- input_data=np_imgs,
- outputs=self.lm_outputs,
- output_data=self.lm_data,
- stream=self.stream,
- batch_size=batch_size,
- )
-
- # because we pre-allocating the buffer to accomodate the max batch size,
- # the last elements of the results will be 0 unless we're finding
- # landmarks for max_batch_size faces, so we need to explicitly grab
- # the elements we want
- landmarks = results[self.lm_engine[LANDMARKS_LANDMARKS_NAME] - 1].reshape(
- -1, 10
- )[:batch_size]
-
- return landmarks
-
- def predict_embeddings(self, np_imgs):
- """
- Args:
- np_imgs: (list) list of images loaded in numpy of format (1, H, W, C)
-
- Returns:
- embs: (numpy array) array of embedding arrays
- """
- if not self.fr_engine:
- raise ModelLoadingException(ERR_ENGINE_UNINITIALIZED)
-
- max_batch_size = self.fr_engine.max_batch_size
- batch_size = len(np_imgs)
-
- embeddings = []
-
- for i in range(0, batch_size, max_batch_size):
- batch = np_imgs[i : min(batch_size, i + max_batch_size)]
- embs = self._batch_predict_embeddings(batch)
-
- embeddings.extend(embs)
-
- return np.asarray(embeddings).reshape(batch_size, -1)
-
- def _batch_predict_embeddings(self, np_imgs):
- np_imgs = np.transpose(np_imgs, [0, 3, 1, 2]).astype(np.float32)
- batch_size = len(np_imgs)
- results = trt_utils.do_inference(
- self.fr_context,
- bindings=self.fr_bindings,
- inputs=self.fr_inputs,
- input_data=np_imgs,
- outputs=self.fr_outputs,
- output_data=self.fr_data,
- stream=self.stream,
- batch_size=batch_size,
- )
-
- return results[0]
-
- def predict_attributes(self, np_imgs):
- if not self.at_engine:
- raise ModelLoadingException(ERR_ENGINE_UNINITIALIZED)
-
- max_batch_size = self.at_engine.max_batch_size
- batch_size = len(np_imgs)
- all_ages, all_genders = [], []
-
- for i in range(0, batch_size, max_batch_size):
- batch = np_imgs[i : min(batch_size, i + max_batch_size)]
- ages, genders = self._batch_predict_attributes(batch)
- all_ages.extend(ages)
- all_genders.extend(genders)
-
- return all_ages, all_genders
-
- def _batch_predict_attributes(self, np_imgs):
- """
- Args:
- np_img: (numpy array) img loaded in numpy of format (1, H, W, C)
-
- Returns:
- age_probs: (numpy array) age probabilities in the shape of (N, 1, 7)
- gender_probs: (numpy array) gender probabilities in the shape of (N, 1, 2)
- """
- np_imgs = np.transpose(np_imgs, [0, 3, 1, 2]).astype(np.float32)
- batch_size = len(np_imgs)
- results = trt_utils.do_inference(
- self.at_context,
- bindings=self.at_bindings,
- inputs=self.at_inputs,
- input_data=np_imgs,
- outputs=self.at_outputs,
- output_data=self.at_data,
- batch_size=batch_size,
- stream=self.stream,
- )
-
- ages = results[self.at_engine[ATTRIBUTES_AGES_NAME] - 1].reshape(-1, 7)[
- :batch_size
- ]
- genders = results[self.at_engine[ATTRIBUTES_GENDERS_NAME] - 1].reshape(-1, 2)[
- :batch_size
- ]
-
- return [ages, genders]
-
- def get_qualities(self, np_imgs):
- """
- Args:
- np_imgs: (list) imgs loaded in numpy of format (1, H, W, C)
- Returns:
- qualities: (numpy array) qualities values between 0 and 1
- """
- if not self.ql_engine:
- raise ModelLoadingException(ERR_ENGINE_UNINITIALIZED)
-
- max_batch_size = self.ql_engine.max_batch_size
- qualities, acceptabilities = [], []
-
- for i in range(0, len(np_imgs), max_batch_size):
- batch = np_imgs[i : min(len(np_imgs), i + max_batch_size)]
- qualities_batch, acceptabilities_batch = self._batch_get_qualities(batch)
- qualities.extend(qualities_batch)
- acceptabilities.extend(acceptabilities_batch)
-
- return (
- np.clip(qualities, UNIT_LOWER_LIMIT, UNIT_UPPER_LIMIT),
- np.clip(acceptabilities, UNIT_LOWER_LIMIT, UNIT_UPPER_LIMIT),
- )
-
- def _batch_get_qualities(self, np_imgs):
- np_imgs = np.transpose(np_imgs, [0, 3, 1, 2]).astype(np.float32)
- batch_size = len(np_imgs)
- results = trt_utils.do_inference(
- self.ql_context,
- bindings=self.ql_bindings,
- inputs=self.ql_inputs,
- input_data=np_imgs,
- outputs=self.ql_outputs,
- output_data=self.ql_data,
- stream=self.stream,
- batch_size=batch_size,
- )
-
- qualities = results[self.ql_engine[QUALITIES_QUALITIES_NAME] - 1][:batch_size]
- acceptabilities = results[self.ql_engine[QUALITIES_ACCEPTABILTIES_NAME] - 1][
- :batch_size
- ]
-
- return qualities, acceptabilities
-
- def check_for_masks(self, np_imgs):
- if not self.md_engine:
- raise ModelLoadingException(ERR_MASK_MODEL_NOT_LOADED)
-
- max_batch_size = self.md_engine.max_batch_size
- batch_size = len(np_imgs)
- mask_probabilities = []
-
- for i in range(0, batch_size, max_batch_size):
- batch = np_imgs[i : min(batch_size, i + max_batch_size)]
- mask_probabilities.extend(self._batch_check_for_masks(batch))
-
- return np.asarray(mask_probabilities)
-
- def _batch_check_for_masks(self, np_imgs):
- """
- Args:
- np_imgs: (list) imgs loaded in numpy of format (1, H, W, C)
- Returns:
- mask_probs: (numpy array) mask probabilities in the shape of (N, 1, 1)
- """
- np_imgs = np.transpose(np_imgs, [0, 3, 1, 2]).astype(np.float32)
- results = trt_utils.do_inference(
- self.md_context,
- bindings=self.md_bindings,
- inputs=self.md_inputs,
- input_data=np_imgs,
- outputs=self.md_outputs,
- output_data=self.md_data,
- stream=self.stream,
- batch_size=len(np_imgs),
- )
- return results[0]
diff --git a/modules/paravision/recognition/tests/__pycache__/__init__.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index 1ee6c213e..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/__init__.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/__pycache__/constants.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/constants.cpython-36.pyc
deleted file mode 100644
index a6213c637..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/constants.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/__pycache__/test_sdk_enhanced.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/test_sdk_enhanced.cpython-36.pyc
deleted file mode 100644
index a1a288c97..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/test_sdk_enhanced.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/__pycache__/test_sdk_standard.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/test_sdk_standard.cpython-36.pyc
deleted file mode 100644
index d2a38af12..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/test_sdk_standard.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/__pycache__/test_transform.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/test_transform.cpython-36.pyc
deleted file mode 100644
index 1a6c97789..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/test_transform.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/__pycache__/test_types.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/test_types.cpython-36.pyc
deleted file mode 100644
index 03a45c51c..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/test_types.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/__pycache__/utils.cpython-36.pyc b/modules/paravision/recognition/tests/__pycache__/utils.cpython-36.pyc
deleted file mode 100644
index e30ce976a..000000000
Binary files a/modules/paravision/recognition/tests/__pycache__/utils.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision/recognition/tests/constants.py b/modules/paravision/recognition/tests/constants.py
deleted file mode 100644
index 425a20315..000000000
--- a/modules/paravision/recognition/tests/constants.py
+++ /dev/null
@@ -1,36 +0,0 @@
-IMG_NOFACE = "noface.jpg"
-IMG_ONEFACE = "oneface.jpg"
-IMG_MANYFACES = "manyfaces.jpg"
-IMG_ONEFACE_RECO_INPUT_IMG = "recognition_input_image_openvino.png"
-IMG_IDENTITY1_FACE1 = "bhargav.jpg"
-IMG_IDENTITY1_FACE2 = "bhargav-3.jpg"
-
-ERR_MISSING_BBOX = "missing bounding box values"
-ERR_MISSING_SCORE = "missing score value"
-ERR_MISSING_LANDMARKS = "missing landmarks values"
-ERR_MISSING_EMBEDDING = "missing embedding value"
-ERR_MISSING_MASK_PROB = "missing mask probability value"
-ERR_MISSING_FACES = "missing faces"
-
-ERR_UNEXPECTED_LANDMARKS = "unexpected landmarks found"
-ERR_UNEXPECTED_QUALITY = "unexpected quality found"
-ERR_UNEXPECTED_NUM_FACES = "unexpected number of faces found"
-ERR_UNEXPECTED_NUM_INFERENCES = "unexpected number of image inferences found"
-ERR_UNEXPECTED_AGES = "unexpected ages found"
-ERR_UNEXPECTED_GENDERS = "unexpected genders found"
-ERR_UNEXPECTED_AGE = "unexpected age found"
-ERR_UNEXPECTED_GENDER = "unexpected gender found"
-
-ERR_INVALID_MASK_PROB = "invalid mask probability value"
-ERR_INVALID_MPF = "invalid most prominent face"
-ERR_INVALID_SCORING_MODE = "invalid scoring mode"
-ERR_INVALID_EMBEDDING_SIZE = "invalid embedding size"
-ERR_INVALID_AGES = "invalid ages"
-
-ERR_JSON_FACE = "face is not JSON serializable"
-
-MAX_NO_MASK_SCORE = 0.5
-MASK_SCORE = 0.95
-
-EXPECTED_ENHANCED_EMBED_LEN = 257
-EXPECTED_STANDARD_EMBED_LEN = 256
diff --git a/modules/paravision/recognition/tests/test_sdk_enhanced.py b/modules/paravision/recognition/tests/test_sdk_enhanced.py
deleted file mode 100644
index bebcadee2..000000000
--- a/modules/paravision/recognition/tests/test_sdk_enhanced.py
+++ /dev/null
@@ -1,495 +0,0 @@
-import os
-import numpy as np
-from unittest import TestCase
-
-from ..sdk import SDK
-from ..types import ImageInferenceData
-from ..engine import Engine
-from ..utils import load_image
-from ..types import BoundingBox, ScoringMode, Embedding
-from ..exceptions import InvalidInputException
-
-from .utils import is_json_serializable
-from .constants import (
- IMG_NOFACE,
- IMG_ONEFACE,
- IMG_MANYFACES,
- IMG_IDENTITY1_FACE1,
- IMG_IDENTITY1_FACE2,
- IMG_ONEFACE_RECO_INPUT_IMG,
- ERR_MISSING_BBOX,
- ERR_MISSING_SCORE,
- ERR_MISSING_LANDMARKS,
- ERR_MISSING_EMBEDDING,
- ERR_MISSING_MASK_PROB,
- ERR_MISSING_FACES,
- ERR_JSON_FACE,
- ERR_UNEXPECTED_LANDMARKS,
- ERR_UNEXPECTED_QUALITY,
- ERR_UNEXPECTED_NUM_FACES,
- ERR_UNEXPECTED_NUM_INFERENCES,
- ERR_UNEXPECTED_AGES,
- ERR_UNEXPECTED_GENDERS,
- ERR_UNEXPECTED_AGE,
- ERR_UNEXPECTED_GENDER,
- ERR_INVALID_MASK_PROB,
- ERR_INVALID_MPF,
- MAX_NO_MASK_SCORE,
- MASK_SCORE,
- ERR_INVALID_SCORING_MODE,
- ERR_INVALID_EMBEDDING_SIZE,
- ERR_INVALID_AGES,
- EXPECTED_ENHANCED_EMBED_LEN,
-)
-
-ASSETS_PATH = os.path.join(os.path.dirname(__file__), "assets")
-engine_default = None
-scoring_mode = None
-sdk = None
-
-
-class TestSDK(TestCase):
- @classmethod
- def setUpClass(cls):
- global sdk
- global engine_default
- global scoring_mode
-
- engine_default = Engine.OPENVINO
- scoring_mode = ScoringMode.EnhancedEmbedding
-
- sdk = SDK(engine=engine_default, settings={"scoring_mode": scoring_mode})
-
- def setUp(self):
- self.sdk = sdk
-
- def test_load_image_invalid_input(self):
- with self.assertRaises(InvalidInputException):
- load_image("invalid-img.jpg")
-
- def test_empty_case(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_NOFACE))]
- detection_result = self.sdk.get_faces(
- imgs, qualities=True, landmarks=True, embeddings=True
- )
-
- faces = detection_result.faces
- self.assertEqual(len(faces), 0, msg=ERR_UNEXPECTED_NUM_FACES)
-
- image_inferences = detection_result.image_inferences
- self.assertEqual(len(image_inferences), 1, msg=ERR_UNEXPECTED_NUM_INFERENCES)
-
- detection_result = self.sdk.get_bounding_boxes(imgs)
- self.assertEqual(len(detection_result.faces), 0, msg=ERR_UNEXPECTED_NUM_FACES)
-
- def test_get_faces(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(
- imgs, qualities=True, landmarks=True, embeddings=True
- )
-
- faces = detection_result.faces
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- image_inferences = detection_result.image_inferences
- self.assertEqual(len(image_inferences), 1, msg=ERR_UNEXPECTED_NUM_INFERENCES)
-
- self.assert_faces(faces)
-
- def test_get_faces_multiple(self):
- oneface_img = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))
- noface_img = load_image(os.path.join(ASSETS_PATH, IMG_NOFACE))
- manyface_img = load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES))
-
- imgs = [oneface_img, noface_img, manyface_img]
- detection_result = self.sdk.get_faces(
- imgs, qualities=True, landmarks=True, embeddings=True
- )
-
- faces = detection_result.faces
- self.assertEqual(len(faces), 9, msg=ERR_UNEXPECTED_NUM_FACES)
-
- self.assert_faces(faces)
-
- image_inferences = detection_result.image_inferences
- self.assertEqual(len(image_inferences), 3, msg=ERR_UNEXPECTED_NUM_INFERENCES)
-
- expected_num_faces = [1, 0, 8]
- for i, faces in enumerate(expected_num_faces):
- self.assertEqual(
- len(image_inferences[i].faces),
- faces,
- msg=f"unexpected number of faces found in image inference {i}",
- )
-
- def test_get_attributes(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))]
- detection_result = self.sdk.get_faces(imgs, qualities=True, landmarks=True)
- faces = detection_result.faces
- self.assertIsNotNone(faces, msg=ERR_MISSING_FACES)
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- face = faces[0]
- self.assertIsNone(face.ages, msg=ERR_UNEXPECTED_AGES)
- self.assertIsNone(face.genders, msg=ERR_UNEXPECTED_GENDERS)
-
- self.assertIsNone(face.age, msg=ERR_UNEXPECTED_AGE)
- self.assertIsNone(face.gender, msg=ERR_UNEXPECTED_GENDER)
-
- self.sdk.get_attributes(faces)
- self.assertIsNotNone(face.ages, msg="missing ages")
- self.assertIsNotNone(face.genders, msg="missing genders")
-
- self.assertIsNotNone(face.age, msg="missing age")
- self.assertTrue(face.age == "20-30", msg="incorrect age")
- self.assertIsNotNone(face.gender, msg="missing gender")
- self.assertTrue(face.gender == "male", msg="incorrect gender")
-
- self.assertTrue(face.ages[2] > face.ages[0], msg=ERR_INVALID_AGES)
- self.assertTrue(face.ages[2] > face.ages[1], msg=ERR_INVALID_AGES)
- self.assertTrue(face.ages[2] > face.ages[3], msg=ERR_INVALID_AGES)
- self.assertTrue(face.ages[2] > face.ages[4], msg=ERR_INVALID_AGES)
- self.assertTrue(face.ages[2] > face.ages[5], msg=ERR_INVALID_AGES)
- self.assertTrue(face.ages[2] > face.ages[6], msg=ERR_INVALID_AGES)
- self.assertTrue(face.genders[0] > face.genders[1], msg="invalid genders")
-
- self.assertTrue(is_json_serializable(face.asdict()), msg=ERR_JSON_FACE)
-
- def test_get_qualities(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))]
- faces = self.sdk.get_faces(imgs).faces
- self.sdk.get_qualities(faces)
- self.assertAlmostEqual(faces[0].quality, 0.925, delta=0.001)
- self.assertAlmostEqual(faces[0].acceptability, 0.999, delta=0.001)
- self.assertTrue(is_json_serializable(faces[0].asdict()), msg=ERR_JSON_FACE)
-
- def test_get_faces_qualties(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))]
- faces = self.sdk.get_faces(imgs, qualities=True).faces
-
- self.assertAlmostEqual(faces[0].quality, 0.925, delta=0.001)
- self.assertTrue(is_json_serializable(faces[0].asdict()), msg=ERR_JSON_FACE)
-
- def test_get_bounding_boxes(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_bounding_boxes(imgs)
- faces = detection_result.faces
-
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- f = faces[0]
- self.assertIsNotNone(f.bounding_box, msg=ERR_MISSING_BBOX)
- self.assertIsNotNone(f.bounding_box.score, msg=ERR_MISSING_SCORE)
- self.assertIsNone(f.landmarks, msg=ERR_UNEXPECTED_LANDMARKS)
- self.assertIsNone(f.quality, msg=ERR_UNEXPECTED_QUALITY)
- self.assertIsNone(f.acceptability, msg="unexpected acceptability")
- self.assertIsNone(f.embedding, msg="unexpected embedding")
- self.assertTrue(is_json_serializable(f.asdict()), msg=ERR_JSON_FACE)
-
- def test_get_landmarks(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(imgs)
- faces = detection_result.faces
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
- self.assertIsNone(faces[0].landmarks, msg=ERR_UNEXPECTED_LANDMARKS)
-
- self.sdk.get_landmarks(faces)
- self.assertIsNotNone(faces[0].landmarks, msg=ERR_MISSING_LANDMARKS)
- self.assertTrue(is_json_serializable(faces[0].asdict()), msg=ERR_JSON_FACE)
-
- def test_get_landmarks_from_bounding_box(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_bounding_boxes(imgs)
- faces = detection_result.faces
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
- self.assertIsNotNone(faces[0].bounding_box, msg=ERR_MISSING_BBOX)
- self.assertIsNone(faces[0].landmarks, msg=ERR_UNEXPECTED_LANDMARKS)
-
- bbox = faces[0].bounding_box
- bounding_box = BoundingBox(
- bbox.origin.x,
- bbox.origin.y,
- bbox.origin.x + bbox.width,
- bbox.origin.y + bbox.height,
- )
- result = self.sdk.get_landmarks_from_bounding_boxes(imgs[0], [bounding_box])
-
- self.assertIsNotNone(result.faces[0].landmarks, msg=ERR_MISSING_LANDMARKS)
- self.assertTrue(
- is_json_serializable(result.faces[0].asdict()), msg=ERR_JSON_FACE
- )
-
- def test_get_embeddings(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(imgs, qualities=True, landmarks=True)
- faces = detection_result.faces
-
- self.sdk.get_embeddings(faces)
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- f = faces[0]
- self.assertIsNotNone(f.bounding_box, msg=ERR_MISSING_BBOX)
- self.assertIsNotNone(f.landmarks, msg=ERR_MISSING_LANDMARKS)
- self.assertIsNotNone(f.embedding, msg=ERR_MISSING_EMBEDDING)
- self.assertEqual(
- f.embedding.scoring_mode,
- ScoringMode.EnhancedEmbedding,
- msg=ERR_INVALID_SCORING_MODE,
- )
- self.assertTrue(is_json_serializable(f.asdict()), msg=ERR_JSON_FACE)
-
- def test_get_embedding_from_landmarks(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(imgs, embeddings=True)
-
- faces = detection_result.faces
- f = faces[0]
- landmarks = f.landmarks
-
- embeddings = self.sdk.get_embeddings_from_landmarks(
- imgs[0], [landmarks, landmarks]
- )
- self.assertEqual(len(embeddings), 2)
- embedding = embeddings[0]
-
- self.assertTrue(embedding.scoring_mode == ScoringMode.EnhancedEmbedding)
- similarity = SDK.get_similarity(f.embedding, embedding)
- self.assertAlmostEqual(similarity, 1.51, delta=0.01)
-
- def test_check_for_mask(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, "woman-wearing-mask.jpg"))]
- detection_result = self.sdk.get_bounding_boxes(imgs)
- faces = detection_result.faces
-
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- self.sdk.get_masks(faces)
- f = faces[0]
- self.assertIsNotNone(f.mask, msg=ERR_MISSING_MASK_PROB)
- self.assertTrue(f.mask >= MASK_SCORE, msg=ERR_INVALID_MASK_PROB)
- self.assertTrue(is_json_serializable(f.asdict()), msg=ERR_JSON_FACE)
-
- def test_check_for_no_mask(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_bounding_boxes(imgs)
- faces = detection_result.faces
-
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- self.sdk.get_masks(faces)
- f = faces[0]
- self.assertIsNotNone(f.mask, msg=ERR_MISSING_MASK_PROB)
- self.assertTrue(f.mask < MAX_NO_MASK_SCORE, msg=ERR_INVALID_MASK_PROB)
- self.assertTrue(is_json_serializable(f.asdict()), msg=ERR_JSON_FACE)
-
- def test_check_for_no_mask_in_many_faces(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES))]
- detection_result = self.sdk.get_bounding_boxes(imgs)
- faces = detection_result.faces
-
- self.assertTrue(len(faces) > 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- self.sdk.get_masks(faces)
-
- for f in faces:
- self.assertIsNotNone(f.mask, msg=ERR_MISSING_MASK_PROB)
- self.assertTrue(f.mask < MAX_NO_MASK_SCORE, msg=ERR_INVALID_MASK_PROB)
-
- def test_get_most_prominent_face_index_oneface(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- infer_result = self.sdk.get_bounding_boxes(imgs)
-
- self.assertTrue(
- len(infer_result.image_inferences) == 1, msg=ERR_UNEXPECTED_NUM_INFERENCES
- )
- self.assertNotEqual(len(infer_result.faces), 0, msg=ERR_UNEXPECTED_NUM_FACES)
-
- infer_image = infer_result.image_inferences[0]
- index = infer_image.most_prominent_face_index()
- self.assertTrue(index == 0, msg=ERR_INVALID_MPF)
-
- def test_get_most_prominent_face_index_manyfaces(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES))]
- infer_result = self.sdk.get_bounding_boxes(imgs)
-
- self.assertTrue(
- len(infer_result.image_inferences) == 1, msg=ERR_UNEXPECTED_NUM_INFERENCES
- )
- self.assertTrue(len(infer_result.faces) > 0, msg=ERR_UNEXPECTED_NUM_FACES)
-
- infer_image = infer_result.image_inferences[0]
- index = infer_image.most_prominent_face_index()
- self.assertTrue(index == 3, msg=ERR_INVALID_MPF)
-
- def test_get_most_prominent_face_index_noface(self):
- infer_image = ImageInferenceData(128, 128)
- index = infer_image.most_prominent_face_index()
- self.assertTrue(index == -1, msg=ERR_INVALID_MPF)
-
- def test_get_most_prominent_face_index_invalid_image_dims(self):
- infer_image = ImageInferenceData(0, 0)
- index = infer_image.most_prominent_face_index()
- self.assertTrue(index == -1, msg=ERR_INVALID_MPF)
-
- def test_scoring_same_image(self):
- img = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))
- faces = self.sdk.get_faces([img, img], embeddings=True).faces
-
- similarity = SDK.get_similarity(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(similarity, 1.51, delta=0.01)
-
- confidence = self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(confidence, 1.0, delta=0.01)
-
- match_score = SDK.get_match_score(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(match_score, 951, delta=2)
-
- def test_scoring_same_identity(self):
- img1 = load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))
- img2 = load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE2))
- faces = self.sdk.get_faces([img1, img2], embeddings=True).faces
-
- similarity = SDK.get_similarity(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(similarity, 0.788, delta=0.001)
-
- confidence = self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(confidence, 1.0, delta=0.01)
-
- match_score = SDK.get_match_score(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(match_score, 788, delta=2)
-
- def test_scoring_diff_identity(self):
- img1 = load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))
- img2 = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))
- faces = self.sdk.get_faces([img1, img2], embeddings=True).faces
-
- similarity = SDK.get_similarity(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(similarity, 0.05, delta=0.01)
-
- confidence = self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(confidence, 0, delta=0.01)
-
- match_score = SDK.get_match_score(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(match_score, 403, delta=2)
-
- def test_get_confidence_invalid_faces(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES))]
- faces = self.sdk.get_faces(imgs).faces
- with self.assertRaises(InvalidInputException):
- self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
-
- def test_get_similarity_no_embedding(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES))]
- faces = self.sdk.get_faces(imgs).faces
- with self.assertRaises(InvalidInputException):
- SDK.get_similarity(faces[0].embedding, faces[1].embedding)
-
- def test_multi_inference_images(self):
- imgs = [
- load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES)),
- load_image(os.path.join(ASSETS_PATH, IMG_MANYFACES)),
- load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE)),
- ]
- infer_result = self.sdk.get_bounding_boxes(imgs)
-
- self.assertTrue(
- len(infer_result.image_inferences) == 3, msg=ERR_UNEXPECTED_NUM_INFERENCES
- )
- self.assertTrue(
- len(infer_result.image_inferences[0].faces)
- + len(infer_result.image_inferences[1].faces)
- + len(infer_result.image_inferences[2].faces)
- == len(infer_result.faces),
- msg="inference image data mismatches faces len",
- )
-
- def test_inference_image_data(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- infer_result = self.sdk.get_bounding_boxes(imgs)
- faces = infer_result.faces
-
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- self.sdk.get_qualities(faces)
- self.assertAlmostEqual(faces[0].quality, 0.895, delta=0.001)
- self.assertTrue(
- infer_result.image_inferences[0].faces[0].quality == faces[0].quality,
- msg="image inference data and face mismatch",
- )
-
- def test_check_embedding(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- ground_truth = np.load(
- os.path.join(ASSETS_PATH, "oneface_gen5_fast_enhanced_embedding.npy")
- )
-
- detection_result = self.sdk.get_faces(imgs, qualities=True, landmarks=True)
- faces = detection_result.faces
-
- self.sdk.get_embeddings(faces)
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- f = faces[0]
- self.assertEqual(
- len(f.embedding.data), len(ground_truth), msg="Mismatched embedding size"
- )
- self.assertTrue(
- np.allclose(f.embedding.data, ground_truth, rtol=0, atol=35e-4),
- msg="Invalid embedding value",
- )
- self.assertTrue(is_json_serializable(f.asdict()), msg=ERR_JSON_FACE)
-
- def test_get_embedding_from_prepared_image(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(imgs, embeddings=True)
- faces = detection_result.faces
- f = faces[0]
-
- reco_img = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE_RECO_INPUT_IMG))
- embedding = self.sdk.get_embedding_from_prepared_image(reco_img)
- self.assertTrue(len(embedding.data) == EXPECTED_ENHANCED_EMBED_LEN)
- self.assertTrue(embedding.scoring_mode == scoring_mode)
- self.assertTrue(
- np.allclose(f.embedding.data, embedding.data, rtol=0, atol=0.001),
- msg="Invalid embedding value",
- )
-
- def test_get_embedding_from_prepared_image_none(self):
- with self.assertRaises(InvalidInputException):
- self.sdk.get_embedding_from_prepared_image(None)
-
- def assert_faces(self, faces):
- for f in faces:
- self.assertIsNotNone(f.bounding_box, msg=ERR_MISSING_BBOX)
- self.assertIsNotNone(f.landmarks, msg=ERR_MISSING_LANDMARKS)
- self.assertIsNotNone(f.quality, msg="missing quality")
- self.assertIsNotNone(f.acceptability, msg="missing acceptability")
- self.assertIsNotNone(
- f.recognition_input_image, msg="missing recognition input image"
- )
- self.assertIsNotNone(
- f.landmarks_input_image, msg="missing landmarks input image"
- )
- self.assertIsNotNone(
- f.landmarks_input_bounding_box,
- msg="missing landmarks input bounding box",
- )
- self.assertIsNotNone(f.alignment_image, msg="missing alignment image")
- self.assertIsNotNone(
- f.alignment_bounding_box, msg="missing alignment bounding box"
- )
- self.assertIsNotNone(f.embedding, msg=ERR_MISSING_EMBEDDING)
- self.assertEqual(
- f.embedding.scoring_mode,
- ScoringMode.EnhancedEmbedding,
- msg=ERR_INVALID_SCORING_MODE,
- )
- self.assertTrue(
- len(f.embedding.data) in Embedding.ENHANCED_SIZES,
- msg=ERR_INVALID_EMBEDDING_SIZE,
- )
- self.assertIsNone(f.ages, msg=ERR_UNEXPECTED_AGES)
- self.assertIsNone(f.genders, msg=ERR_UNEXPECTED_GENDERS)
- self.assertIsNone(f.age, msg=ERR_UNEXPECTED_AGE)
- self.assertIsNone(f.gender, msg=ERR_UNEXPECTED_GENDER)
- self.assertTrue(is_json_serializable(f.asdict()), msg=ERR_JSON_FACE)
diff --git a/modules/paravision/recognition/tests/test_sdk_standard.py b/modules/paravision/recognition/tests/test_sdk_standard.py
deleted file mode 100644
index 72881ec56..000000000
--- a/modules/paravision/recognition/tests/test_sdk_standard.py
+++ /dev/null
@@ -1,141 +0,0 @@
-import os
-import numpy as np
-from unittest import TestCase
-
-from ..sdk import SDK
-from ..engine import Engine
-from ..utils import load_image
-from ..types import ScoringMode, Embedding
-
-from .constants import (
- IMG_ONEFACE,
- IMG_IDENTITY1_FACE1,
- IMG_IDENTITY1_FACE2,
- IMG_ONEFACE_RECO_INPUT_IMG,
- ERR_UNEXPECTED_NUM_FACES,
- ERR_UNEXPECTED_NUM_INFERENCES,
- ERR_MISSING_EMBEDDING,
- ERR_INVALID_SCORING_MODE,
- ERR_INVALID_EMBEDDING_SIZE,
- EXPECTED_STANDARD_EMBED_LEN,
-)
-
-ASSETS_PATH = os.path.join(os.path.dirname(__file__), "assets")
-engine_default = None
-scoring_mode = None
-sdk = None
-
-
-class TestSDK(TestCase):
- @classmethod
- def setUpClass(cls):
- global sdk
- global engine_default
- global scoring_mode
-
- engine_default = Engine.OPENVINO
- scoring_mode = ScoringMode.StandardEmbedding
-
- sdk = SDK(engine=engine_default, settings={"scoring_mode": scoring_mode})
-
- def setUp(self):
- self.sdk = sdk
-
- def test_get_faces_with_embeddings(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(imgs, embeddings=True)
-
- faces = detection_result.faces
- self.assertEqual(len(faces), 1, msg=ERR_UNEXPECTED_NUM_FACES)
-
- image_inferences = detection_result.image_inferences
- self.assertEqual(len(image_inferences), 1, msg=ERR_UNEXPECTED_NUM_INFERENCES)
-
- self.assert_embeddings(faces)
-
- def test_get_embedding_from_landmarks(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(imgs, embeddings=True)
-
- faces = detection_result.faces
- f = faces[0]
- landmarks = f.landmarks
-
- embeddings = self.sdk.get_embeddings_from_landmarks(imgs[0], [landmarks, landmarks])
- self.assertEqual(len(embeddings), 2)
- embedding = embeddings[0]
-
- self.assertTrue(embedding.scoring_mode == ScoringMode.StandardEmbedding)
- similarity = SDK.get_similarity(f.embedding, embedding)
- self.assertAlmostEqual(similarity, 4.0, delta=0.01)
-
- def test_scoring_same_image(self):
- img = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))
- faces = self.sdk.get_faces([img, img], embeddings=True).faces
-
- similarity = SDK.get_similarity(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(similarity, 4.0, delta=0.01)
-
- confidence = self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(confidence, 1.0, delta=0.01)
-
- match_score = SDK.get_match_score(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(match_score, 957, delta=1)
-
- def test_scoring_same_identity(self):
- img1 = load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))
- img2 = load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE2))
- faces = self.sdk.get_faces([img1, img2], embeddings=True).faces
-
- similarity = SDK.get_similarity(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(similarity, 3.58, delta=0.01)
-
- confidence = self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(confidence, 1.0, delta=0.01)
-
- match_score = SDK.get_match_score(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(match_score, 903, delta=2)
-
- def test_scoring_diff_identity(self):
- img1 = load_image(os.path.join(ASSETS_PATH, IMG_IDENTITY1_FACE1))
- img2 = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))
- faces = self.sdk.get_faces([img1, img2], embeddings=True).faces
-
- similarity = SDK.get_similarity(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(similarity, 1.85, delta=0.01)
-
- confidence = self.sdk.get_confidence(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(confidence, 0, delta=0.01)
-
- match_score = SDK.get_match_score(faces[0].embedding, faces[1].embedding)
- self.assertAlmostEqual(match_score, 198, delta=2)
-
- def test_get_embedding_from_prepared_image(self):
- imgs = [load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE))]
- detection_result = self.sdk.get_faces(
- imgs, qualities=True, landmarks=True, embeddings=True
- )
- faces = detection_result.faces
- f = faces[0]
-
- reco_img = load_image(os.path.join(ASSETS_PATH, IMG_ONEFACE_RECO_INPUT_IMG))
- embedding = self.sdk.get_embedding_from_prepared_image(reco_img)
- self.assertTrue(len(embedding.data) == EXPECTED_STANDARD_EMBED_LEN)
- self.assertTrue(embedding.scoring_mode == scoring_mode)
- self.assertTrue(
- np.allclose(f.embedding.data, embedding.data, rtol=0, atol=0.001),
- msg="Invalid embedding value",
- )
-
- def assert_embeddings(self, faces):
- for f in faces:
- self.assertIsNotNone(f.embedding, msg=ERR_MISSING_EMBEDDING)
- self.assertEqual(
- f.embedding.scoring_mode,
- ScoringMode.StandardEmbedding,
- msg=ERR_INVALID_SCORING_MODE,
- )
- self.assertTrue(
- len(f.embedding.data) in Embedding.STANDARD_SIZES,
- msg=ERR_INVALID_EMBEDDING_SIZE,
- )
diff --git a/modules/paravision/recognition/tests/test_transform.py b/modules/paravision/recognition/tests/test_transform.py
deleted file mode 100644
index d8e07e460..000000000
--- a/modules/paravision/recognition/tests/test_transform.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import numpy as np
-
-from unittest import TestCase
-
-from .._utils import compute_transform
-
-
-class TestTransform(TestCase):
- def test_transform(self):
- src_points = [
- [146.08132502, 155.9912228],
- [218.04209101, 153.17409003],
- [176.5086686, 207.03067255],
- [153.90101734, 240.53104055],
- [214.96274501, 237.63263655],
- ]
-
- dst_points = [
- [38.2946, 51.6963],
- [73.5318, 51.5014],
- [56.0252, 71.7366],
- [41.5493, 92.3655],
- [70.7299, 92.2041],
- ]
-
- trans = compute_transform(src_points, dst_points)
-
- out = np.asarray(
- [
- [4.79823508e-01, -1.35817363e-02, -2.85523114e01],
- [1.35817363e-02, 4.79823508e-01, -2.59931548e01],
- ]
- )
-
- self.assertTrue(
- (np.isclose(trans.flatten(), out.flatten()).all()),
- msg="The transform wasn't computed sucessfully",
- )
diff --git a/modules/paravision/recognition/tests/test_types.py b/modules/paravision/recognition/tests/test_types.py
deleted file mode 100644
index 09d3a280b..000000000
--- a/modules/paravision/recognition/tests/test_types.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from unittest import TestCase
-
-import numpy as np
-
-from .utils import is_json_serializable
-from ..types import BoundingBox, Face, Embedding, Landmarks, Point, ScoringMode
-
-
-class TestBoundingBox(TestCase):
- def setUp(self):
- self.bb = BoundingBox(1.1, 2.2, 3.3, 4.4)
-
- def test_as_dict(self):
- d = self.bb.asdict()
- props = ["origin", "width", "height"]
-
- for p in props:
- self.assertIn(p, d)
-
- self.assertTrue(is_json_serializable(d))
-
-
-class TestFaceWithStandardScoringMode(TestCase):
- def setUp(self):
- self.face = Face(
- bounding_box=BoundingBox(*np.random.rand(4)),
- )
- self.face.quality = 0.5
- self.face.embedding = Embedding(
- np.random.rand(512), ScoringMode.StandardEmbedding
- )
- landmarks = np.random.rand(5, 2)
- self.face.landmarks = Landmarks(*landmarks)
-
- def test_as_dict(self):
- d = self.face.asdict()
-
- self.assertIn("quality", d)
- self.assertEqual(d["quality"], 0.5)
-
- props = ["bounding_box", "landmarks", "embedding"]
- for p in props:
- self.assertIn(p, d)
- self.assertIsInstance(d[p], dict)
-
- self.assertTrue(is_json_serializable(d))
-
-
-class TestFaceWithEnhancedScoringMode(TestCase):
- def setUp(self):
- self.face = Face(
- bounding_box=BoundingBox(*np.random.rand(4)),
- )
- self.face.embedding = Embedding(
- np.random.rand(513), ScoringMode.EnhancedEmbedding
- )
-
- def test_as_dict(self):
- d = self.face.asdict()
-
- props = ["bounding_box", "embedding"]
- for p in props:
- self.assertIn(p, d)
- self.assertIsInstance(d[p], dict)
-
- self.assertTrue(is_json_serializable(d))
-
-
-class TestLandmarks(TestCase):
- def setUp(self):
- self.landmarks = Landmarks((0, 1), (2, 3), (4, 5), (6, 7), (8, 9))
-
- def test_as_dict(self):
- d = self.landmarks.asdict()
-
- props = ["left_eye", "right_eye", "nose", "left_mouth", "right_mouth"]
- for p in props:
- self.assertIn(p, d)
- self.assertIsInstance(d[p], dict)
-
- self.assertTrue(is_json_serializable(d))
-
-
-class TestPoint(TestCase):
- def setUp(self):
- self.p = Point(1, 2)
-
- def test_as_dict(self):
- d = self.p.asdict()
-
- self.assertIn("x", d)
- self.assertIn("y", d)
- self.assertEqual(d["x"], 1)
- self.assertEqual(d["y"], 2)
- self.assertTrue(is_json_serializable(d))
diff --git a/modules/paravision/recognition/tests/utils.py b/modules/paravision/recognition/tests/utils.py
deleted file mode 100644
index 19a059a17..000000000
--- a/modules/paravision/recognition/tests/utils.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import json
-
-
-def is_json_serializable(x):
- try:
- json.dumps(x)
- return True
- except TypeError:
- return False
diff --git a/modules/paravision/recognition/types.py b/modules/paravision/recognition/types.py
deleted file mode 100644
index 6b32c3c3c..000000000
--- a/modules/paravision/recognition/types.py
+++ /dev/null
@@ -1,624 +0,0 @@
-from typing import Any, Dict, List, Optional, Tuple, Union
-import numpy as np
-from enum import IntEnum
-
-from .exceptions import InvalidInputException
-
-AGE_LABELS = ["2-12", "13-19", "20-30", "31-40", "41-50", "51-60", "60+"]
-GENDER_LABELS = ["male", "female"]
-
-
-class Point:
- """
- A point within an image, represented by x- and y-coordinates.
-
- Attributes
- ----------
- x : float
- The x-coordinate.
- y : float
- The y-coordinate.
- """
-
- def __init__(self, x: float, y: float):
- self._x = x
- self._y = y
-
- @property
- def x(self):
- return self._x
-
- @x.setter
- def x(self, x: float):
- self._x = x
-
- @property
- def y(self):
- return self._y
-
- @y.setter
- def y(self, y: float):
- self._y = y
-
- def __iter__(self):
- yield self.x
- yield self.y
-
- def __repr__(self):
- return f""
-
- def asdict(self):
- """Convert this object to a dictionary"""
- return {"x": self._x, "y": self._y}
-
- def astuple(self):
- """Convert this object to a tuple"""
- return self._x, self._y
-
-
-PointLike = Union[Point, np.ndarray, List[float], Tuple[float, float]]
-
-
-class ScoringMode(IntEnum):
- StandardEmbedding = 1
- EnhancedEmbedding = 2
-
-
-class BoundingBox:
- """
- A bounding box, represented by origin(top-left point), width, and height.
-
- Attributes
- ----------
- origin : Point
- Point object including coordinates of the top-left corner of the rectangle.
- width : float
- The width of the rectangle.
- height : float
- The height of the rectangle.
- _score : float
- The score for confidence for a face in the bounding box
- """
-
- def __init__(self, x1: float, y1: float, x2: float, y2: float):
- self._origin = Point(x1, y1)
- self._width = x2 - x1
- self._height = y2 - y1
- self._score = None
-
- @property
- def origin(self):
- return self._origin
-
- @origin.setter
- def origin(self, origin: Point):
- self._origin = origin
-
- @property
- def width(self):
- return self._width
-
- @width.setter
- def width(self, width: float):
- self._width = width
-
- @property
- def height(self):
- return self._height
-
- @height.setter
- def height(self, height: float):
- self._height = height
-
- @property
- def score(self):
- return self._score
-
- @score.setter
- def score(self, score):
- self._score = score
-
- def __repr__(self):
- return f""
-
- def asdict(self):
- """Convert this object to a dictionary"""
- return {
- "origin": self._origin.asdict(),
- "width": self._width,
- "height": self._height,
- }
-
- def astuple(self) -> Tuple[float, float, float, float]:
- """Convert this object to a tuple"""
- x, y = self._origin.astuple()
- return x, y, self._width, self._height
-
-
-class Landmarks:
- """
- A set of facial landmarks, represented by Points.
-
- Attributes
- ----------
- left_eye : Point
- The center of the left eye.
- right_eye : Point
- The center of the right eye.
- nose : Point
- The tip of the nose.
- left_mouth : Point
- The left corner of the mouth.
- right_mouth : Point
- The right corner of the mouth.
- """
-
- def __init__(
- self,
- left_eye: PointLike,
- right_eye: PointLike,
- nose: PointLike,
- left_mouth: PointLike,
- right_mouth: PointLike,
- ):
- self._left_eye = Point(*left_eye)
- self._right_eye = Point(*right_eye)
- self._nose = Point(*nose)
- self._left_mouth = Point(*left_mouth)
- self._right_mouth = Point(*right_mouth)
-
- @property
- def left_eye(self):
- return self._left_eye
-
- @left_eye.setter
- def left_eye(self, left_eye: PointLike):
- self._left_eye = Point(*left_eye)
-
- @property
- def right_eye(self):
- return self._right_eye
-
- @right_eye.setter
- def right_eye(self, right_eye: PointLike):
- self._right_eye = Point(*right_eye)
-
- @property
- def nose(self):
- return self._nose
-
- @nose.setter
- def nose(self, nose: PointLike):
- self._nose = Point(*nose)
-
- @property
- def left_mouth(self):
- return self._left_mouth
-
- @left_mouth.setter
- def left_mouth(self, left_mouth: PointLike):
- self._left_mouth = Point(*left_mouth)
-
- @property
- def right_mouth(self):
- return self._right_mouth
-
- @right_mouth.setter
- def right_mouth(self, right_mouth: PointLike):
- self._right_mouth = Point(*right_mouth)
-
- def __repr__(self):
- return (
- ""
- )
-
- def asdict(self):
- """Convert this object to a dictionary"""
- return {
- "left_eye": self._left_eye.asdict(),
- "right_eye": self._right_eye.asdict(),
- "nose": self._nose.asdict(),
- "left_mouth": self._left_mouth.asdict(),
- "right_mouth": self._right_mouth.asdict(),
- }
-
- def astuple(self):
- """Convert this object to a tuple"""
- return (
- self._left_eye.astuple(),
- self._right_eye.astuple(),
- self._nose.astuple(),
- self._left_mouth.astuple(),
- self._right_mouth.astuple(),
- )
-
-
-class Embedding:
- """
- A numerical representation of a face found in an image.
-
- Attributes
- ----------
- data : numpy.ndarray
- The embedding data representing a face.
- """
-
- STANDARD_SIZES = np.array([256, 512, 1024])
- ENHANCED_SIZES = STANDARD_SIZES + 1
-
- def __init__(
- self,
- data: np.ndarray,
- scoring_mode: ScoringMode = ScoringMode.EnhancedEmbedding,
- ):
- self._validate_data(data, scoring_mode)
- self._data = data
- self._scoring_mode = scoring_mode
-
- @property
- def data(self) -> np.ndarray:
- if (
- self._scoring_mode == ScoringMode.StandardEmbedding
- and len(self._data) in self.ENHANCED_SIZES
- ):
- return self._data[:-1]
- return self._data
-
- @property
- def scoring_mode(self):
- return self._scoring_mode
-
- def asdict(self):
- return {
- "data": self._data.tolist(),
- "scoring_mode": self._scoring_mode.name,
- }
-
- @classmethod
- def _validate_data(cls, data: np.ndarray, scoring_mode: ScoringMode):
- if scoring_mode == ScoringMode.EnhancedEmbedding:
- if len(data) not in cls.ENHANCED_SIZES:
- raise InvalidInputException(
- f"Invalid embedding size, enhanced embedding size must be one of {cls.ENHANCED_SIZES}"
- )
- elif scoring_mode == ScoringMode.StandardEmbedding:
- if (
- len(data) not in cls.ENHANCED_SIZES
- and len(data) not in cls.STANDARD_SIZES
- ):
- raise InvalidInputException(
- f"Invalid embedding size, standard embedding size must be one of "
- f"{cls.ENHANCED_SIZES + cls.STANDARD_SIZES}"
- )
- else:
- raise InvalidInputException("Invalid scoring mode")
-
-
-class BaseAttributes:
- """
- This is an empty class inherited by the AttributesSDK.
- For internal use only.
- Customers should use the implementation from Attributes SDK:
-
- from paravision.attributes.types import get_attributes, Attributes
- attr: Attributes = get_attributes(face)
- """
-
-
-class Face:
- """
- A face, minimally represented by a bounding box.
-
- Attributes
- ----------
- bounding_box : BoundingBox
- The bounding box of the face.
-
- landmarks : Landmarks
- The locations of various parts of the face.
- embedding : Embedding
- The embedding representing the face.
- ages: numpy.ndarray
- The probability distribution over the given age groups.
- genders: numpy.ndarry
- The probability distribution over the given genders.
- quality : float
- An overall assessment of how acceptable the face is for facial
- recognition. Overall range of [0, 1], "acceptable" quality is >= 0.15.
- acceptability: float
- The assessment of the acceptability of the face. Overall range [0, 1]
- mask: float
- The probability of a face wearing mask. Overall range [0, 1]
- recognition_input_image, alignment_input_image, landmarks_input_image: numpy.ndarray
- Images used at different stages of the detection pipeline.
- alignment_bounding_box, landmarks_input_bounding_box: BoundingBox
- The bounding boxes that are associated to alignment_input_image and landmarks_input_image
- """
-
- def __init__(self, bounding_box: Optional[BoundingBox] = None):
- self._bounding_box = bounding_box
-
- self._landmarks = None
- self._embedding = None
-
- self._ages = None
- self._genders = None
-
- self._quality = None
- self._acceptability = None
- self._mask = None
-
- self._recognition_input_image = None
- self._landmarks_input_image = None
- self._landmarks_input_bounding_box = None
- self._alignment_image = None
- self._alignment_bounding_box = None
- self._attributes = None
-
- @property
- def bounding_box(self):
- return self._bounding_box
-
- @bounding_box.setter
- def bounding_box(self, bbox: BoundingBox):
- self._bounding_box = bbox
-
- @property
- def landmarks(self):
- return self._landmarks
-
- @landmarks.setter
- def landmarks(self, landmarks: Landmarks):
- self._landmarks = landmarks
-
- @property
- def embedding(self):
- return self._embedding
-
- @embedding.setter
- def embedding(self, embedding: Embedding):
- self._embedding = embedding
-
- @property
- def age(self):
- return None if self._ages is None else AGE_LABELS[np.argmax(self._ages)]
-
- @property
- def ages(self):
- return self._ages
-
- @ages.setter
- def ages(self, ages: Union[List[float], np.ndarray]):
- self._ages: Optional[List[float]] = [*ages]
-
- @property
- def gender(self):
- return (
- None if self._genders is None else GENDER_LABELS[np.argmax(self._genders)]
- )
-
- @property
- def genders(self):
- return self._genders
-
- @genders.setter
- def genders(self, genders: Union[List[float], np.ndarray]):
- self._genders: Optional[List[float]] = [*genders]
-
- @property
- def quality(self):
- return self._quality
-
- @quality.setter
- def quality(self, quality: float):
- self._quality = quality
-
- @property
- def acceptability(self):
- return self._acceptability
-
- @acceptability.setter
- def acceptability(self, acceptability: float):
- self._acceptability = acceptability
-
- @property
- def mask(self):
- return self._mask
-
- @mask.setter
- def mask(self, mask: float):
- self._mask = mask
-
- @property
- def recognition_input_image(self):
- return self._recognition_input_image
-
- @recognition_input_image.setter
- def recognition_input_image(self, recognition_input_image: np.ndarray):
- self._recognition_input_image = recognition_input_image
-
- @property
- def landmarks_input_image(self):
- return self._landmarks_input_image
-
- @landmarks_input_image.setter
- def landmarks_input_image(self, landmarks_input_image: np.ndarray):
- self._landmarks_input_image = landmarks_input_image
-
- @property
- def landmarks_input_bounding_box(self):
- return self._landmarks_input_bounding_box
-
- @landmarks_input_bounding_box.setter
- def landmarks_input_bounding_box(self, landmarks_input_bbox: BoundingBox):
- self._landmarks_input_bounding_box = landmarks_input_bbox
-
- @property
- def alignment_image(self):
- return self._alignment_image
-
- @alignment_image.setter
- def alignment_image(self, alignment_image: np.ndarray):
- self._alignment_image = alignment_image
-
- @property
- def alignment_bounding_box(self):
- return self._alignment_bounding_box
-
- @alignment_bounding_box.setter
- def alignment_bounding_box(self, alignment_bbox: BoundingBox):
- self._alignment_bounding_box = alignment_bbox
-
- @property
- def attributes(self):
- return self._attributes
-
- @attributes.setter
- def attributes(self, attributes: BaseAttributes):
- self._attributes = attributes
-
- def __repr__(self):
- return "" % (str(self._bounding_box),)
-
- def asdict(self):
- """Convert this object to a dictionary representation."""
- if self._bounding_box is None:
- return {}
-
- face: Dict[str, Any] = {"bounding_box": self._bounding_box.asdict()}
-
- if self._landmarks is not None:
- face["landmarks"] = self._landmarks.asdict()
-
- if self._embedding is not None:
- face["embedding"] = self._embedding.asdict()
-
- if self._quality is not None:
- face["quality"] = self._quality
-
- if self._acceptability is not None:
- face["acceptability"] = self._acceptability
-
- if self._mask is not None:
- face["mask"] = self._mask
-
- return face
-
-
-class ImageInferenceData:
- """
- The result of running get faces pipeline for a single image
-
- Attributes
- ----------
- faces : List
- A list of Face objects detected in the image
- width : Int
- width of the inference image
- height : Int
- height of the inference image
- """
-
- def __init__(self, width: int, height: int):
- self._width = width
- self._height = height
- self._faces: List[Face] = []
-
- @property
- def width(self):
- return self._width
-
- @width.setter
- def width(self, width):
- self._width = width
-
- @property
- def height(self):
- return self._height
-
- @height.setter
- def height(self, height):
- self._height = height
-
- @property
- def faces(self):
- return self._faces
-
- @faces.setter
- def faces(self, faces):
- self._faces = faces
-
- def __repr__(self):
- return f""
-
- def most_prominent_face_index(self) -> int:
- if self._height <= 0 or self._width <= 0 or len(self._faces) == 0:
- return -1
-
- face_areas = []
-
- for face in self._faces:
- bb = face.bounding_box
-
- if bb is None:
- continue
-
- x1 = max(min(bb.origin.x, self._width), 0)
- y1 = max(min(bb.origin.y, self._height), 0)
-
- x2 = max(min((bb.origin.x + bb.width), self._width), 0)
- y2 = max(min((bb.origin.y + bb.height), self._height), 0)
-
- area = (x2 - x1) * (y2 - y1)
- face_areas.append(area)
-
- return int(np.argmax(face_areas)) if face_areas else -1
-
- def asdict(self):
- """Convert this object to a dictionary"""
- return {"faces": self._faces}
-
-
-class InferenceResult:
- """
- The result of running the get faces pipeline on a group of images
-
- Attributes
- ----------
- faces : List
- A list of Face objects detected in all images
- image_inferences : List
- A list of detection results grouped by an image
- """
-
- def __init__(self, image_inferences: List[ImageInferenceData]):
- self._image_inferences = image_inferences
- self._faces: List[Face] = []
- for image_inference in image_inferences:
- self._faces.extend(image_inference.faces)
-
- @property
- def image_inferences(self):
- return self._image_inferences
-
- @image_inferences.setter
- def image_inferences(self, image_inferences: List[ImageInferenceData]):
- self._image_inferences = image_inferences
-
- @property
- def faces(self):
- return self._faces
-
- @faces.setter
- def faces(self, faces: List[Face]):
- self._faces = faces
-
- def __repr__(self):
- return f""
-
- def asdict(self):
- """Convert this object to a dictionary"""
- return {"image_inferences": self._image_inferences}
diff --git a/modules/paravision/recognition/types/__init__.pyi b/modules/paravision/recognition/types/__init__.pyi
new file mode 100644
index 000000000..dd850b216
--- /dev/null
+++ b/modules/paravision/recognition/types/__init__.pyi
@@ -0,0 +1,1373 @@
+from __future__ import annotations
+import paravision.recognition.types
+import typing
+import numpy
+_Shape = typing.Tuple[int, ...]
+
+__all__ = [
+ "BaseAttributes",
+ "BoundingBox",
+ "Embedding",
+ "Engine",
+ "Face",
+ "GPUConfig",
+ "Image",
+ "ImageInferenceData",
+ "ImageManipulator",
+ "ImageSource",
+ "InferenceResult",
+ "Landmarks",
+ "Point",
+ "ScoringMode",
+ "Settings",
+ "ValidnessCheck",
+ "ValidnessFeedback",
+ "ValidnessResult"
+]
+
+
+class BaseAttributes():
+ """
+ This is an empty class inherited by the AttributesSDK.
+ For internal use only.
+ Customers should use the implementation from Attributes SDK:
+
+ from paravision.attributes.types import get_attributes, Attributes
+ attr: Attributes = get_attributes(face)
+ """
+ pass
+
+
+class BoundingBox():
+ """
+ A bounding box, represented by origin(top-left point), width, and height.
+
+ Attributes
+ ----------
+ origin : Point
+ Point object including coordinates of the top-left corner of the rectangle.
+ width : float
+ The width of the rectangle.
+ height : float
+ The height of the rectangle.
+ score : float
+ The score for confidence for a face in the bounding box
+ """
+ def __bool__(self) -> bool: ...
+
+ def __eq__(self, arg0: BoundingBox) -> bool: ...
+
+ @typing.overload
+ def __init__(self, cords: Annotated[list[float], FixedSize(4)]) -> None:
+ """
+ Construct a BoundingBox from x1, y1, x2, and y2.
+ """
+
+ @typing.overload
+ def __init__(self, origin: Point, width: float, height: float) -> None:
+ """
+ Construct a BoundingBox from an origin Point, width, and height.
+ """
+
+ @typing.overload
+ def __init__(self, x1: float, y1: float, x2: float, y2: float) -> None:
+ """
+ Construct a BoundingBox from an array of four floats.
+ """
+
+ def __ne__(self, arg0: BoundingBox) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def asdict(self) -> dict:
+ """
+ Convert this object to a dictionary
+ """
+
+ def empty(self) -> bool:
+ """
+ Check if this object is empty
+ """
+
+ @property
+ def height(self) -> float:
+ """
+ The height of the bounding box.
+
+ :type: float
+ """
+ @height.setter
+ def height(self, arg1: float) -> None:
+ """
+ The height of the bounding box.
+ """
+ @property
+ def origin(self) -> Point:
+ """
+ The origin of the bounding box.
+
+ :type: Point
+ """
+ @origin.setter
+ def origin(self, arg1: float, arg2: float) -> None:
+ """
+ The origin of the bounding box.
+ """
+ @property
+ def score(self) -> float:
+ """
+ The score of the bounding box.
+
+ :type: float
+ """
+ @score.setter
+ def score(self, arg1: float) -> None:
+ """
+ The score of the bounding box.
+ """
+ @property
+ def width(self) -> float:
+ """
+ The width of the bounding box.
+
+ :type: float
+ """
+ @width.setter
+ def width(self, arg1: float) -> None:
+ """
+ The width of the bounding box.
+ """
+ __hash__ = None
+ pass
+
+
+class Embedding():
+ """
+ A numerical representation of a face found in an image.
+
+ Attributes
+ ----------
+ data : numpy.ndarray
+ The embedding data representing a face.
+ """
+ def __bool__(self) -> bool: ...
+
+ def __eq__(self, arg0: Embedding) -> bool: ...
+
+ def __init__(self, data: list[float]) -> None:
+ """
+ Construct an Embedding from a vector of floats.
+ """
+
+ def __ne__(self, arg0: Embedding) -> bool: ...
+
+ def asdict(self, scoring_mode: ScoringMode = ScoringMode.EnhancedEmbedding) -> dict:
+ """
+ Convert this object to a dictionary
+ """
+
+ def data(self, scoring_mode: ScoringMode = ScoringMode.EnhancedEmbedding) -> numpy.ndarray:
+ """
+ The data of the embedding.
+ """
+
+ def empty(self) -> bool:
+ """
+ Check if this object is empty
+ """
+
+ ENHANCED_SIZES = [257, 513, 1025]
+ STANDARD_SIZES = [256, 512, 1024]
+ __hash__ = None
+ pass
+
+
+class Engine():
+ """
+ Members:
+
+ AUTO : Automatically select the engine
+
+ OPENVINO : Use the OpenVINO engine
+
+ TENSORRT : Use the TensorRT engine
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ AUTO: paravision.recognition.types.Engine # value =
+ OPENVINO: paravision.recognition.types.Engine # value =
+ TENSORRT: paravision.recognition.types.Engine # value =
+ __members__: dict # value = {'AUTO': , 'OPENVINO': , 'TENSORRT': }
+ pass
+
+
+class Face():
+ """
+ A face, minimally represented by a bounding box.
+
+ Attributes
+ ----------
+ bounding_box : BoundingBox
+ The bounding box of the face.
+ landmarks : Landmarks
+ The locations of various parts of the face.
+ embedding : Embedding
+ The embedding representing the face.
+ quality : float
+ An overall assessment of how acceptable the face is for facial
+ recognition. Overall range of [0, 1], "acceptable" quality is >= 0.15.
+ acceptability: float
+ The assessment of the acceptability of the face. Overall range [0, 1]
+ recognition_input_image, alignment_input_image, landmarks_input_image: numpy.ndarray
+ Images used at different stages of the detection pipeline.
+ alignment_bounding_box, landmarks_input_bounding_box: BoundingBox
+ The bounding boxes that are associated to alignment_input_image and landmarks_input_image
+ """
+ def __bool__(self) -> bool: ...
+
+ def __init__(self, bounding_box: BoundingBox) -> None:
+ """
+ Create a Face object from a BoundingBox.
+ """
+
+ def __repr__(self) -> str: ...
+
+ def asdict(self) -> dict:
+ """
+ Returns the face as a dictionary.
+ """
+
+ @property
+ def acceptability(self) -> float:
+ """
+ The acceptability of the face.
+
+ :type: float
+ """
+ @acceptability.setter
+ def acceptability(self, arg1: float) -> None:
+ """
+ The acceptability of the face.
+ """
+ @property
+ def alignment_bounding_box(self) -> BoundingBox:
+ """
+ The alignment bounding box of the face.
+
+ :type: BoundingBox
+ """
+ @alignment_bounding_box.setter
+ def alignment_bounding_box(self, arg1: BoundingBox) -> None:
+ """
+ The alignment bounding box of the face.
+ """
+ @property
+ def alignment_image(self) -> Image:
+ """
+ The alignment image of the face.
+
+ :type: Image
+ """
+ @alignment_image.setter
+ def alignment_image(self, arg1: Image) -> None:
+ """
+ The alignment image of the face.
+ """
+ @property
+ def attributes(self) -> BaseAttributes:
+ """
+ The attributes of the face.
+
+ :type: BaseAttributes
+ """
+ @attributes.setter
+ def attributes(self, arg1: BaseAttributes) -> None:
+ """
+ The attributes of the face.
+ """
+ @property
+ def bounding_box(self) -> BoundingBox:
+ """
+ The bounding box of the face.
+
+ :type: BoundingBox
+ """
+ @bounding_box.setter
+ def bounding_box(self, arg1: BoundingBox) -> None:
+ """
+ The bounding box of the face.
+ """
+ @property
+ def embedding(self) -> Embedding:
+ """
+ The embedding of the face.
+
+ :type: Embedding
+ """
+ @embedding.setter
+ def embedding(self, arg1: Embedding) -> None:
+ """
+ The embedding of the face.
+ """
+ @property
+ def image_source(self) -> ImageSource:
+ """
+ The source of the image where the face was detected. Possible values are unknown, mobile and webcam
+
+ :type: ImageSource
+ """
+ @image_source.setter
+ def image_source(self, arg1: ImageSource) -> None:
+ """
+ The source of the image where the face was detected. Possible values are unknown, mobile and webcam
+ """
+ @property
+ def landmarks(self) -> Landmarks:
+ """
+ The landmarks of the face.
+
+ :type: Landmarks
+ """
+ @landmarks.setter
+ def landmarks(self, arg1: Landmarks) -> None:
+ """
+ The landmarks of the face.
+ """
+ @property
+ def landmarks_input_bounding_box(self) -> BoundingBox:
+ """
+ The landmarks input bounding box of the face.
+
+ :type: BoundingBox
+ """
+ @landmarks_input_bounding_box.setter
+ def landmarks_input_bounding_box(self, arg1: BoundingBox) -> None:
+ """
+ The landmarks input bounding box of the face.
+ """
+ @property
+ def landmarks_input_image(self) -> Image:
+ """
+ The landmarks input image of the face.
+
+ :type: Image
+ """
+ @landmarks_input_image.setter
+ def landmarks_input_image(self, arg1: Image) -> None:
+ """
+ The landmarks input image of the face.
+ """
+ @property
+ def quality(self) -> float:
+ """
+ The quality of the face.
+
+ :type: float
+ """
+ @quality.setter
+ def quality(self, arg1: float) -> None:
+ """
+ The quality of the face.
+ """
+ @property
+ def recognition_input_image(self) -> Image:
+ """
+ The recognition input image of the face.
+
+ :type: Image
+ """
+ @recognition_input_image.setter
+ def recognition_input_image(self, arg1: Image) -> None:
+ """
+ The recognition input image of the face.
+ """
+ pass
+
+
+class GPUConfig():
+ def __init__(self) -> None: ...
+
+ @property
+ def gpu_id(self) -> int:
+ """
+ The index of the GPU device to use.
+
+ :type: int
+ """
+ @gpu_id.setter
+ def gpu_id(self, arg0: int) -> None:
+ """
+ The index of the GPU device to use.
+ """
+ @property
+ def worker_count(self) -> int:
+ """
+ The number of workers to allocate.
+
+ :type: int
+ """
+ @worker_count.setter
+ def worker_count(self, arg0: int) -> None:
+ """
+ The number of workers to allocate.
+ """
+ pass
+
+
+class Image():
+ """
+ An image, could be either GPU backed or CPU backed.
+
+ Attributes
+ ----------
+ data : numpy.ndarray
+ The image data, if the image is GPU backed the data will be copied to CPU.
+ is_gpu : bool
+ Whether the image is GPU backed.
+ """
+ def __init__(self, img_data_or_path: object) -> None:
+ """
+ Construct an Image from either a path or a numpy array.
+ """
+
+ @property
+ def data(self) -> numpy.ndarray:
+ """
+ The image data, if the image is GPU backed the data will be copied to CPU.
+
+ :type: numpy.ndarray
+ """
+ @property
+ def is_gpu(self) -> bool:
+ """
+ Whether the image is GPU backed.
+
+ :type: bool
+ """
+ pass
+
+
+class ImageInferenceData():
+ """
+ The result of running get faces pipeline for a single image
+
+ Attributes
+ ----------
+ faces : List
+ A list of Face objects detected in the image
+ width : Int
+ width of the inference image
+ height : Int
+ height of the inference image
+ """
+ def most_prominent_face_index(self) -> int:
+ """
+ The index of the most prominent face in the image.
+ """
+
+ @property
+ def faces(self) -> list[Face]:
+ """
+ The faces detected in the image.
+
+ :type: list[Face]
+ """
+ @faces.setter
+ def faces(self, arg1: list[Face]) -> None:
+ """
+ The faces detected in the image.
+ """
+ @property
+ def height(self) -> int:
+ """
+ The height of the image.
+
+ :type: int
+ """
+ @property
+ def width(self) -> int:
+ """
+ The width of the image.
+
+ :type: int
+ """
+ pass
+
+
+class ImageManipulator():
+ """
+ Members:
+
+ AUTO : Automatically select the image manipulation device
+
+ CPU : Use the CPU for image manipulation
+
+ GPU : Use the GPU for image manipulation
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ AUTO: paravision.recognition.types.ImageManipulator # value =
+ CPU: paravision.recognition.types.ImageManipulator # value =
+ GPU: paravision.recognition.types.ImageManipulator # value =
+ __members__: dict # value = {'AUTO': , 'CPU': , 'GPU': }
+ pass
+
+
+class ImageSource():
+ """
+ Members:
+
+ UNKNOWN : Unknown image source
+
+ MOBILE : Mobile image source
+
+ WEBCAM : Webcam image source
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ MOBILE: paravision.recognition.types.ImageSource # value =
+ UNKNOWN: paravision.recognition.types.ImageSource # value =
+ WEBCAM: paravision.recognition.types.ImageSource # value =
+ __members__: dict # value = {'UNKNOWN': , 'MOBILE': , 'WEBCAM': }
+ pass
+
+
+class InferenceResult():
+ """
+ The result of running the get faces pipeline on a group of images
+
+ Attributes
+ ----------
+ faces : List
+ A list of Face objects detected in all images
+ inferences : List
+ A list of detection results grouped by an image
+ """
+ @property
+ def faces(self) -> list[Face]:
+ """
+ List of Face objects
+
+ :type: list[Face]
+ """
+ @property
+ def image_inferences(self) -> list[ImageInferenceData]:
+ """
+ "Deprecated: This property will be removed in the next major release. Use the inferences property instead.
+
+ :type: list[ImageInferenceData]
+ """
+ @property
+ def inferences(self) -> list[ImageInferenceData]:
+ """
+ List of ImageInferenceData objects
+
+ :type: list[ImageInferenceData]
+ """
+ pass
+
+
+class Landmarks():
+ """
+ A set of facial landmarks, represented by Points.
+
+ Attributes
+ ----------
+ left_eye : Point
+ The center of the left eye.
+ right_eye : Point
+ The center of the right eye.
+ nose : Point
+ The tip of the nose.
+ left_mouth : Point
+ The left corner of the mouth.
+ right_mouth : Point
+ The right corner of the mouth.
+ """
+ def __bool__(self) -> bool: ...
+
+ def __eq__(self, arg0: Landmarks) -> bool: ...
+
+ @typing.overload
+ def __init__(self, left_eye: Point, right_eye: Point, nose: Point, left_mouth: Point, right_mouth: Point) -> None:
+ """
+ Construct a Landmarks from five Points.
+ """
+
+ @typing.overload
+ def __init__(self, left_eye: typing.Iterable, right_eye: typing.Iterable, nose: typing.Iterable, left_mouth: typing.Iterable, right_mouth: typing.Iterable) -> None:
+ """
+ Construct a Landmarks from five iterables of two floats.
+ """
+
+ def __ne__(self, arg0: Landmarks) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def asdict(self) -> dict:
+ """
+ Convert this object to a dictionary
+ """
+
+ def empty(self) -> bool:
+ """
+ Check if this object is empty
+ """
+
+ @property
+ def left_eye(self) -> Point:
+ """
+ The left eye.
+
+ :type: Point
+ """
+ @left_eye.setter
+ def left_eye(self, arg1: Point) -> None:
+ """
+ The left eye.
+ """
+ @property
+ def left_mouth(self) -> Point:
+ """
+ The left mouth.
+
+ :type: Point
+ """
+ @left_mouth.setter
+ def left_mouth(self, arg1: Point) -> None:
+ """
+ The left mouth.
+ """
+ @property
+ def nose(self) -> Point:
+ """
+ The nose.
+
+ :type: Point
+ """
+ @nose.setter
+ def nose(self, arg1: Point) -> None:
+ """
+ The nose.
+ """
+ @property
+ def right_eye(self) -> Point:
+ """
+ The right eye.
+
+ :type: Point
+ """
+ @right_eye.setter
+ def right_eye(self, arg1: Point) -> None:
+ """
+ The right eye.
+ """
+ @property
+ def right_mouth(self) -> Point:
+ """
+ The right mouth.
+
+ :type: Point
+ """
+ @right_mouth.setter
+ def right_mouth(self, arg1: Point) -> None:
+ """
+ The right mouth.
+ """
+ __hash__ = None
+ pass
+
+
+class Point():
+ """
+ A point within an image, represented by x- and y-coordinates.
+
+ Attributes
+ ----------
+ x : float
+ The x-coordinate.
+ y : float
+ The y-coordinate.
+ """
+ def __bool__(self) -> bool: ...
+
+ def __eq__(self, arg0: Point) -> bool: ...
+
+ @typing.overload
+ def __init__(self) -> None:
+ """
+ Construct a point from x- and y-coordinates.
+ """
+
+ @typing.overload
+ def __init__(self, point: typing.Iterable) -> None:
+ """
+ Construct a Point from an iterable of two floats
+ """
+
+ @typing.overload
+ def __init__(self, x: float, y: float) -> None: ...
+
+ def __iter__(self) -> typing.Iterator: ...
+
+ def __ne__(self, arg0: Point) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def asdict(self) -> dict:
+ """
+ Convert this object to a dictionary
+ """
+
+ def empty(self) -> bool:
+ """
+ Check if this object is empty
+ """
+
+ @property
+ def x(self) -> float:
+ """
+ The x-coordinate.
+
+ :type: float
+ """
+ @x.setter
+ def x(self, arg1: float) -> None:
+ """
+ The x-coordinate.
+ """
+ @property
+ def y(self) -> float:
+ """
+ The y-coordinate.
+
+ :type: float
+ """
+ @y.setter
+ def y(self, arg1: float) -> None:
+ """
+ The y-coordinate.
+ """
+ __hash__ = None
+ pass
+
+
+class ScoringMode():
+ """
+ Members:
+
+ StandardEmbedding
+
+ EnhancedEmbedding
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ EnhancedEmbedding: paravision.recognition.types.ScoringMode # value =
+ StandardEmbedding: paravision.recognition.types.ScoringMode # value =
+ __members__: dict # value = {'StandardEmbedding': , 'EnhancedEmbedding': }
+ pass
+
+
+class Settings():
+ def __init__(self) -> None: ...
+
+ @property
+ def additional_detection_models(self) -> list[str]:
+ """
+ additional detection models to load
+
+ :type: list[str]
+ """
+ @additional_detection_models.setter
+ def additional_detection_models(self, arg0: list[str]) -> None:
+ """
+ additional detection models to load
+ """
+ @property
+ def cache_generated_engine(self) -> bool:
+ """
+ TensorRT cache generated engine file
+
+ :type: bool
+ """
+ @cache_generated_engine.setter
+ def cache_generated_engine(self, arg0: bool) -> None:
+ """
+ TensorRT cache generated engine file
+ """
+ @property
+ def detection_model(self) -> str:
+ """
+ detection model, default, streaming, mobile
+
+ :type: str
+ """
+ @detection_model.setter
+ def detection_model(self, arg0: str) -> None:
+ """
+ detection model, default, streaming, mobile
+ """
+ @property
+ def engine(self) -> Engine:
+ """
+ Engine to use
+
+ :type: Engine
+ """
+ @engine.setter
+ def engine(self, arg0: Engine) -> None:
+ """
+ Engine to use
+ """
+ @property
+ def gpu_configs(self) -> list[GPUConfig]:
+ """
+ List of GPU configs for worker allocation in multiple GPUs
+
+ :type: list[GPUConfig]
+ """
+ @gpu_configs.setter
+ def gpu_configs(self, arg0: list[GPUConfig]) -> None:
+ """
+ List of GPU configs for worker allocation in multiple GPUs
+ """
+ @property
+ def image_manipulator(self) -> ImageManipulator:
+ """
+ Image manipulator
+
+ :type: ImageManipulator
+ """
+ @image_manipulator.setter
+ def image_manipulator(self, arg0: ImageManipulator) -> None:
+ """
+ Image manipulator
+ """
+ @property
+ def load_landmarks_model(self) -> bool:
+ """
+ Whether to load the landmarks model. Default is true.
+
+ :type: bool
+ """
+ @load_landmarks_model.setter
+ def load_landmarks_model(self, arg0: bool) -> None:
+ """
+ Whether to load the landmarks model. Default is true.
+ """
+ @property
+ def load_quality_model(self) -> bool:
+ """
+ Whether to load the quality model. Default is true.
+
+ :type: bool
+ """
+ @load_quality_model.setter
+ def load_quality_model(self, arg0: bool) -> None:
+ """
+ Whether to load the quality model. Default is true.
+ """
+ @property
+ def load_recognition_model(self) -> bool:
+ """
+ Whether to load the recognition model. Default is true.
+
+ :type: bool
+ """
+ @load_recognition_model.setter
+ def load_recognition_model(self, arg0: bool) -> None:
+ """
+ Whether to load the recognition model. Default is true.
+ """
+ @property
+ def openvino_threads_limit(self) -> int:
+ """
+ OpenVINO model threads
+
+ :type: int
+ """
+ @openvino_threads_limit.setter
+ def openvino_threads_limit(self, arg0: int) -> None:
+ """
+ OpenVINO model threads
+ """
+ @property
+ def tensorrt_engine_cache_path(self) -> str:
+ """
+ Location of where to store serialized model file
+
+ :type: str
+ """
+ @tensorrt_engine_cache_path.setter
+ def tensorrt_engine_cache_path(self, arg0: str) -> None:
+ """
+ Location of where to store serialized model file
+ """
+ @property
+ def tensorrt_fp16_layer_selection(self) -> bool:
+ """
+ TensorRT FP16 layer selection
+
+ :type: bool
+ """
+ @tensorrt_fp16_layer_selection.setter
+ def tensorrt_fp16_layer_selection(self, arg0: bool) -> None:
+ """
+ TensorRT FP16 layer selection
+ """
+ @property
+ def tensorrt_max_batch_size(self) -> int:
+ """
+ TensorRT batch size
+
+ :type: int
+ """
+ @tensorrt_max_batch_size.setter
+ def tensorrt_max_batch_size(self, arg0: int) -> None:
+ """
+ TensorRT batch size
+ """
+ @property
+ def tensorrt_optimization_level(self) -> int:
+ """
+ TensorRT optimization level
+
+ :type: int
+ """
+ @tensorrt_optimization_level.setter
+ def tensorrt_optimization_level(self, arg0: int) -> None:
+ """
+ TensorRT optimization level
+ """
+ @property
+ def use_cached_engine(self) -> bool:
+ """
+ TensorRT use cached engine file
+
+ :type: bool
+ """
+ @use_cached_engine.setter
+ def use_cached_engine(self, arg0: bool) -> None:
+ """
+ TensorRT use cached engine file
+ """
+ @property
+ def validness_checks(self) -> ValidnessCheck:
+ """
+ validness check flags
+
+ :type: ValidnessCheck
+ """
+ @validness_checks.setter
+ def validness_checks(self, arg0: ValidnessCheck) -> None:
+ """
+ validness check flags
+ """
+ @property
+ def validness_models_dir(self) -> str:
+ """
+ path to validness models directory
+
+ :type: str
+ """
+ @validness_models_dir.setter
+ def validness_models_dir(self, arg0: str) -> None:
+ """
+ path to validness models directory
+ """
+ @property
+ def worker_count(self) -> int:
+ """
+ The number of workers to allocate
+
+ :type: int
+ """
+ @worker_count.setter
+ def worker_count(self, arg0: int) -> None:
+ """
+ The number of workers to allocate
+ """
+ pass
+
+
+class ValidnessCheck():
+ """
+ Members:
+
+ NONE : No validness check flag
+
+ LIVENESS : Liveness validness check flag
+
+ AGE : Age validness check flag
+
+ DEEPFAKE : Deepfake validness check
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __or__(self, arg0: ValidnessCheck) -> ValidnessCheck: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ AGE: paravision.recognition.types.ValidnessCheck # value =
+ DEEPFAKE: paravision.recognition.types.ValidnessCheck # value =
+ LIVENESS: paravision.recognition.types.ValidnessCheck # value =
+ NONE: paravision.recognition.types.ValidnessCheck # value =
+ __members__: dict # value = {'NONE': , 'LIVENESS': , 'AGE': , 'DEEPFAKE': }
+ pass
+
+
+class ValidnessFeedback():
+ """
+ Members:
+
+ UNKNOWN : The result is unknown
+
+ FACE_QUALITY_POOR : The image quality is poor
+
+ FACE_ACCEPTABILITY_POOR : The image quality is poor
+
+ IMG_LIGHTING_DARK : The image lighting is too dark
+
+ IMG_LIGHTING_BRIGHT : The image lighting is too bright
+
+ FACE_SIZE_SMALL : The face is too small
+
+ FACE_SIZE_LARGE : The face is too large
+
+ FACE_POS_LEFT : The face is too far left
+
+ FACE_POS_RIGHT : The face is too far right
+
+ FACE_POS_HIGH : The face is too far up
+
+ FACE_POS_LOW : The face is too far down
+
+ FACE_MASK_FOUND : The face contains a mask
+
+ FACE_FRONTALITY_POOR : The face is not facing forward
+
+ FACE_SHARPNESS_POOR : The face sharpness is poor
+
+ FACE_RES_LOW : The face width is too low
+
+ TOO_MANY_FACES : The face contains one or more faces nearby
+
+ FACE_ROLL_ANGLE_EXCEEDED : The maximum roll angle of the face has exceeded.
+ """
+ def __eq__(self, other: object) -> bool: ...
+
+ def __getstate__(self) -> int: ...
+
+ def __hash__(self) -> int: ...
+
+ def __index__(self) -> int: ...
+
+ def __init__(self, value: int) -> None: ...
+
+ def __int__(self) -> int: ...
+
+ def __ne__(self, other: object) -> bool: ...
+
+ def __repr__(self) -> str: ...
+
+ def __setstate__(self, state: int) -> None: ...
+
+ def __str__(self) -> str: ...
+
+ @property
+ def name(self) -> str:
+ """
+ :type: str
+ """
+ @property
+ def value(self) -> int:
+ """
+ :type: int
+ """
+ FACE_ACCEPTABILITY_POOR: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_FRONTALITY_POOR: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_MASK_FOUND: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_POS_HIGH: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_POS_LEFT: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_POS_LOW: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_POS_RIGHT: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_QUALITY_POOR: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_RES_LOW: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_ROLL_ANGLE_EXCEEDED: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_SHARPNESS_POOR: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_SIZE_LARGE: paravision.recognition.types.ValidnessFeedback # value =
+ FACE_SIZE_SMALL: paravision.recognition.types.ValidnessFeedback # value =
+ IMG_LIGHTING_BRIGHT: paravision.recognition.types.ValidnessFeedback # value =
+ IMG_LIGHTING_DARK: paravision.recognition.types.ValidnessFeedback # value =
+ TOO_MANY_FACES: paravision.recognition.types.ValidnessFeedback # value =
+ UNKNOWN: paravision.recognition.types.ValidnessFeedback # value =
+ __members__: dict # value = {'UNKNOWN': , 'FACE_QUALITY_POOR': , 'FACE_ACCEPTABILITY_POOR': , 'IMG_LIGHTING_DARK': , 'IMG_LIGHTING_BRIGHT': , 'FACE_SIZE_SMALL': , 'FACE_SIZE_LARGE': , 'FACE_POS_LEFT': , 'FACE_POS_RIGHT': , 'FACE_POS_HIGH': , 'FACE_POS_LOW': , 'FACE_MASK_FOUND': , 'FACE_FRONTALITY_POOR': , 'FACE_SHARPNESS_POOR': , 'FACE_RES_LOW': , 'TOO_MANY_FACES': , 'FACE_ROLL_ANGLE_EXCEEDED': }
+ pass
+
+
+class ValidnessResult():
+ def __init__(self) -> None:
+ """
+ Construct an empty ValidnessResult
+ """
+
+ def __repr__(self) -> str: ...
+
+ def asdict(self) -> dict:
+ """
+ Convert this object to a dictionary
+ """
+
+ @property
+ def face_acceptability(self) -> float:
+ """
+ The acceptability of the face
+
+ :type: float
+ """
+ @property
+ def face_frontality(self) -> float:
+ """
+ The frontality of the face
+
+ :type: float
+ """
+ @property
+ def face_height(self) -> float:
+ """
+ The absolute face height found in the image
+
+ :type: float
+ """
+ @property
+ def face_height_pct(self) -> float:
+ """
+ The percentage of the face height relative to the image
+
+ :type: float
+ """
+ @property
+ def face_mask_prob(self) -> float:
+ """
+ The probability that the face has a mask
+
+ :type: float
+ """
+ @property
+ def face_position_pcts(self) -> tuple[float, float, float, float]:
+ """
+ The percentage of the face position relative to the image
+
+ :type: tuple[float, float, float, float]
+ """
+ @property
+ def face_quality(self) -> float:
+ """
+ The quality of the face
+
+ :type: float
+ """
+ @property
+ def face_roll_angle(self) -> float:
+ """
+ The roll angle of the face
+
+ :type: float
+ """
+ @property
+ def face_sharpness(self) -> float:
+ """
+ The sharpness of the face
+
+ :type: float
+ """
+ @property
+ def face_width(self) -> float:
+ """
+ The absolute face width found in the image
+
+ :type: float
+ """
+ @property
+ def face_width_pct(self) -> float:
+ """
+ The percentage of the face width relative to the image
+
+ :type: float
+ """
+ @property
+ def feedbacks(self) -> list[ValidnessFeedback]:
+ """
+ The list of validness check feedbacks
+
+ :type: list[ValidnessFeedback]
+ """
+ @property
+ def image_average_pixel_value(self) -> float:
+ """
+ The average pixel value of the image
+
+ :type: float
+ """
+ @property
+ def image_bright_pixel_pct(self) -> float:
+ """
+ The percentage of bright pixels in the image
+
+ :type: float
+ """
+ @property
+ def image_dark_pixel_pct(self) -> float:
+ """
+ The percentage of dark pixels in the image
+
+ :type: float
+ """
+ @property
+ def is_valid(self) -> bool:
+ """
+ Whether all the validness checks passed
+
+ :type: bool
+ """
+ pass
+
+
diff --git a/modules/paravision/recognition/utils.py b/modules/paravision/recognition/utils.py
deleted file mode 100644
index 2160b4f0e..000000000
--- a/modules/paravision/recognition/utils.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import cv2
-import numpy as np
-from typing import Union
-
-from .exceptions import InvalidInputException
-
-
-def load_image(img_data_or_path: Union[str, bytes]) -> np.ndarray:
- if isinstance(img_data_or_path, str):
- img = cv2.imread(img_data_or_path, cv2.IMREAD_COLOR)
- else:
- img = cv2.imdecode(
- np.frombuffer(img_data_or_path.read(), dtype=np.uint8), cv2.IMREAD_COLOR
- )
-
- if img is None:
- raise InvalidInputException("Couldn't load the invalid input image")
-
- return cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
diff --git a/modules/paravision/recognition/utils/__init__.pyi b/modules/paravision/recognition/utils/__init__.pyi
new file mode 100644
index 000000000..b63188560
--- /dev/null
+++ b/modules/paravision/recognition/utils/__init__.pyi
@@ -0,0 +1,16 @@
+from __future__ import annotations
+import paravision.recognition.utils
+import typing
+import numpy
+_Shape = typing.Tuple[int, ...]
+
+__all__ = [
+ "load_image"
+]
+
+
+def load_image(img_data_or_path: str) -> numpy.ndarray:
+ """
+ Loads an image from a given path
+ """
+
diff --git a/modules/paravision_models/__init__.py b/modules/paravision_models/__init__.py
deleted file mode 100644
index 3eb6673ae..000000000
--- a/modules/paravision_models/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-try:
- from .recognition import *
-except ImportError:
- pass
diff --git a/modules/paravision_models/__pycache__/__init__.cpython-310.pyc b/modules/paravision_models/__pycache__/__init__.cpython-310.pyc
deleted file mode 100644
index 0d1456440..000000000
Binary files a/modules/paravision_models/__pycache__/__init__.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision_models/__pycache__/__init__.cpython-36.pyc b/modules/paravision_models/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index a92d17d58..000000000
Binary files a/modules/paravision_models/__pycache__/__init__.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision_models/recognition/__init__.py b/modules/paravision_models/recognition/__init__.py
deleted file mode 100644
index 98efde90d..000000000
--- a/modules/paravision_models/recognition/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os
-
-# Variables populated by release build. Do not edit.
-_MODEL_NAME = "gen5-balanced"
-_MODEL_VERSION = "v1.1.2"
-_MODEL_ENGINE = "openvino-2021.4"
-
-
-def name():
- return _MODEL_NAME
-
-
-def version():
- return _MODEL_VERSION
-
-
-def engine():
- return _MODEL_ENGINE
-
-
-def location():
- return os.path.join(os.path.dirname(os.path.realpath(__file__)), "models")
-
-
-if engine() == "tensorrt":
- import glob
-
- TRT_ENGINE_PATH = location()
-
- def clear_cached_trt_engine():
- engine_files = glob.glob("{}/**/*.engine".format(TRT_ENGINE_PATH))
-
- for f in engine_files:
- try:
- os.remove(f)
- except Exception:
- raise Exception("Error deleting engine file: ", f)
diff --git a/modules/paravision_models/recognition/__pycache__/__init__.cpython-310.pyc b/modules/paravision_models/recognition/__pycache__/__init__.cpython-310.pyc
deleted file mode 100644
index 10c644dab..000000000
Binary files a/modules/paravision_models/recognition/__pycache__/__init__.cpython-310.pyc and /dev/null differ
diff --git a/modules/paravision_models/recognition/__pycache__/__init__.cpython-36.pyc b/modules/paravision_models/recognition/__pycache__/__init__.cpython-36.pyc
deleted file mode 100644
index f2c4b24cf..000000000
Binary files a/modules/paravision_models/recognition/__pycache__/__init__.cpython-36.pyc and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/attributes/default/attributes.bin b/modules/paravision_models/recognition/models/attributes/default/attributes.bin
deleted file mode 100644
index 734bf2502..000000000
Binary files a/modules/paravision_models/recognition/models/attributes/default/attributes.bin and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/attributes/default/attributes.xml b/modules/paravision_models/recognition/models/attributes/default/attributes.xml
deleted file mode 100644
index d136840b2..000000000
--- a/modules/paravision_models/recognition/models/attributes/default/attributes.xml
+++ /dev/null
@@ -1,11319 +0,0 @@
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 64
- 3
- 3
- 3
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 64
- 3
- 3
- 3
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 8
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 8
- 64
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 64
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 64
- 8
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 4
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 8
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 8
- 64
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 64
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 64
- 8
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 4
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 128
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 128
- 64
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 128
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 128
- 64
- 3
- 3
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 256
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 256
- 128
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 256
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 256
- 128
- 3
- 3
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 512
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 512
- 256
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 512
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 512
- 256
- 3
- 3
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 4
-
-
-
-
- 1
- 7
- 7
- 512
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 128
- 25088
-
-
-
-
-
-
-
- 128
- 25088
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
- 1
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 7
- 7
- 512
-
-
- 2
-
-
-
-
- 1
- 25088
-
-
-
-
-
-
-
- 1
- 25088
-
-
- 128
- 25088
-
-
-
-
- 1
- 128
-
-
-
-
-
-
-
- 1
- 128
-
-
-
-
-
-
- 1
- 128
-
-
- 1
- 128
-
-
-
-
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 128
-
-
- 1
- 1
-
-
-
-
- 1
- 128
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
- 128
-
-
- 1
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
- 1
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
-
-
- 2
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 1
-
-
- 2
-
-
-
-
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 128
-
-
- 1
- 1
-
-
-
-
- 1
- 128
-
-
-
-
-
-
- 1
- 128
-
-
- 1
- 128
-
-
-
-
- 1
- 128
-
-
-
-
-
-
-
- 256
- 128
-
-
-
-
-
-
-
- 1
- 128
-
-
- 256
- 128
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
-
-
-
-
- 1
- 256
-
-
- 1
- 256
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
-
-
- 1
-
-
-
-
- 1
- 256
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
-
-
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
-
-
-
-
- 1
- 256
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
-
-
- 1
-
-
-
-
- 1
- 256
-
-
-
-
-
-
- 1
- 256
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 7
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
- 7
- 256
-
-
-
-
- 1
- 7
-
-
-
-
-
-
-
- 1
- 7
-
-
-
-
-
-
- 1
- 7
-
-
- 1
- 7
-
-
-
-
- 1
- 7
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 7
-
-
- 2
-
-
-
-
- 1
- 7
-
-
-
-
-
-
-
- 1
- 7
-
-
-
-
- 1
- 7
-
-
-
-
-
-
-
- 1
- 7
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 7
-
-
- 2
-
-
-
-
- 1
- 7
-
-
-
-
-
-
- 1
- 7
-
-
-
-
-
-
-
- 2
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
- 2
- 256
-
-
-
-
- 1
- 2
-
-
-
-
-
-
-
- 1
- 2
-
-
-
-
-
-
- 1
- 2
-
-
- 1
- 2
-
-
-
-
- 1
- 2
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 2
-
-
- 2
-
-
-
-
- 1
- 2
-
-
-
-
-
-
-
- 1
- 2
-
-
-
-
- 1
- 2
-
-
-
-
-
-
-
- 1
- 2
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 2
-
-
- 2
-
-
-
-
- 1
- 2
-
-
-
-
-
-
- 1
- 2
-
-
-
-
-
-
-
- 4
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
- 4
- 256
-
-
-
-
- 1
- 4
-
-
-
-
-
-
-
- 1
- 4
-
-
-
-
-
-
- 1
- 4
-
-
- 1
- 4
-
-
-
-
- 1
- 4
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 4
-
-
- 2
-
-
-
-
- 1
- 4
-
-
-
-
-
-
-
- 1
- 4
-
-
-
-
- 1
- 4
-
-
-
-
-
-
-
- 1
- 4
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 4
-
-
- 2
-
-
-
-
- 1
- 4
-
-
-
-
-
-
- 1
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/paravision_models/recognition/models/detection/default/detection.bin b/modules/paravision_models/recognition/models/detection/default/detection.bin
deleted file mode 100644
index 185bbb5e7..000000000
Binary files a/modules/paravision_models/recognition/models/detection/default/detection.bin and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/detection/default/detection.xml b/modules/paravision_models/recognition/models/detection/default/detection.xml
deleted file mode 100644
index eddda3430..000000000
--- a/modules/paravision_models/recognition/models/detection/default/detection.xml
+++ /dev/null
@@ -1,13760 +0,0 @@
-
-
-
-
-
-
-
- 1
- 3
- 513
- 513
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 3
- 1
- 1
-
-
-
-
-
-
-
- 1
- 3
- 513
- 513
-
-
- 1
- 3
- 1
- 1
-
-
-
-
- 1
- 3
- 513
- 513
-
-
-
-
-
-
-
- 48
- 3
- 5
- 5
-
-
-
-
-
-
-
- 1
- 3
- 513
- 513
-
-
- 48
- 3
- 5
- 5
-
-
-
-
- 1
- 48
- 257
- 257
-
-
-
-
-
-
- 1
- 48
- 257
- 257
-
-
-
-
- 1
- 48
- 257
- 257
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 257
- 257
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 257
- 257
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 257
- 257
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 257
- 257
-
-
-
-
-
-
-
- 1
- 48
- 257
- 257
-
-
-
-
- 1
- 48
- 129
- 129
-
-
-
-
-
-
-
- 16
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 129
- 129
-
-
- 16
- 48
- 1
- 1
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
-
- 32
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 32
- 16
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 32
- 16
- 3
- 3
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 32
- 16
- 3
- 3
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 64
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 129
- 129
-
-
- 64
- 48
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 16
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 16
- 64
- 1
- 1
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 129
- 129
-
-
-
-
-
-
-
- 32
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 32
- 16
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 32
- 16
- 3
- 3
-
-
-
-
-
-
-
- 1
- 16
- 129
- 129
-
-
- 32
- 16
- 3
- 3
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 32
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 32
- 64
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 129
- 129
-
-
-
-
-
-
-
- 64
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 64
- 32
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 64
- 32
- 3
- 3
-
-
-
-
-
-
-
- 1
- 32
- 129
- 129
-
-
- 64
- 32
- 3
- 3
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 129
- 129
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 1
- 64
- 129
- 129
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
-
-
-
- 128
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 129
- 129
-
-
- 128
- 64
- 1
- 1
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 129
- 129
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 129
- 129
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
-
-
-
- 1
- 128
- 129
- 129
-
-
- 1
- 128
- 129
- 129
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
-
-
-
- 1
- 128
- 129
- 129
-
-
-
-
- 1
- 128
- 65
- 65
-
-
-
-
-
-
-
- 32
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 65
- 65
-
-
- 32
- 128
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 65
-
-
-
-
-
-
- 1
- 32
- 65
- 65
-
-
-
-
- 1
- 32
- 65
- 65
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 65
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 65
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 65
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 65
-
-
-
-
-
-
-
- 64
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 65
-
-
- 64
- 32
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
-
- 64
- 32
- 3
- 3
-
-
-
-
-
-
-
- 1
- 32
- 65
- 65
-
-
- 64
- 32
- 3
- 3
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 65
-
-
-
-
-
-
-
- 1
- 64
- 65
- 65
-
-
- 1
- 64
- 65
- 65
-
-
-
-
- 1
- 128
- 65
- 65
-
-
-
-
-
-
-
- 1
- 128
- 65
- 65
-
-
- 1
- 128
- 65
- 65
-
-
-
-
- 1
- 128
- 65
- 65
-
-
-
-
-
-
-
- 48
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 65
- 65
-
-
- 48
- 128
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 192
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 65
- 65
-
-
- 192
- 128
- 1
- 1
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 192
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 192
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 65
- 65
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 65
- 65
-
-
-
-
-
-
-
- 1
- 96
- 65
- 65
-
-
- 1
- 96
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 65
- 65
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 65
- 65
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 65
- 65
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 65
- 65
-
-
- 3
-
-
-
-
- 1
- 4
- 4225
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 1
- 192
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 1
- 192
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 1
- 192
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 33
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 33
-
-
-
-
-
-
-
- 1
- 96
- 33
- 33
-
-
- 1
- 96
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 1
- 192
- 33
- 33
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 33
- 33
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 33
- 33
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 33
- 33
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 33
- 33
-
-
- 3
-
-
-
-
- 1
- 4
- 1089
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 1
- 192
- 17
- 17
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 1
- 192
- 17
- 17
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 17
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 17
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 17
-
-
-
-
-
-
-
- 1
- 96
- 17
- 17
-
-
- 1
- 96
- 17
- 17
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 1
- 192
- 17
- 17
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 17
- 17
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 17
- 17
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 17
- 17
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 17
- 17
-
-
- 3
-
-
-
-
- 1
- 4
- 289
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 192
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 192
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 1
- 192
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 1
- 192
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 9
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 9
- 9
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 9
-
-
-
-
-
-
-
- 1
- 96
- 9
- 9
-
-
- 1
- 96
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 1
- 192
- 9
- 9
-
-
-
-
- 1
- 192
- 9
- 9
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 9
- 9
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 9
- 9
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 9
- 9
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 9
- 9
-
-
- 3
-
-
-
-
- 1
- 4
- 81
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 5
- 5
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 192
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 192
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 1
- 192
- 5
- 5
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 5
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 5
- 5
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 5
-
-
-
-
-
-
-
- 1
- 96
- 5
- 5
-
-
- 1
- 96
- 5
- 5
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 1
- 192
- 5
- 5
-
-
-
-
- 1
- 192
- 5
- 5
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 5
- 5
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 5
- 5
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 5
- 5
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 5
- 5
-
-
- 3
-
-
-
-
- 1
- 4
- 25
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 192
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 192
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 3
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 3
- 3
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 3
-
-
-
-
-
-
-
- 1
- 96
- 3
- 3
-
-
- 1
- 96
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 3
- 3
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 3
- 3
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 3
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 3
- 3
-
-
- 3
-
-
-
-
- 1
- 4
- 9
-
-
-
-
-
-
-
- 192
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 192
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 3
- 3
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 3
- 3
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 3
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 3
- 3
-
-
- 3
-
-
-
-
- 1
- 4
- 9
-
-
-
-
-
-
-
- 1
- 4
- 4225
-
-
- 1
- 4
- 1089
-
-
- 1
- 4
- 289
-
-
- 1
- 4
- 81
-
-
- 1
- 4
- 25
-
-
- 1
- 4
- 9
-
-
- 1
- 4
- 9
-
-
-
-
- 1
- 4
- 5727
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
- 1
- 4
- 5727
-
-
- 3
-
-
-
-
- 1
- 5727
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 5727
- 4
-
-
-
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 5727
- 4
-
-
-
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 5727
- 4
-
-
- 3
-
-
- 3
-
-
- 3
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 5727
- 4
-
-
- 3
-
-
- 3
-
-
- 3
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 2
-
-
-
-
-
-
-
- 1
- 5727
- 2
-
-
- 1
- 5727
- 2
-
-
-
-
- 1
- 5727
- 4
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 65
- 65
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 65
- 65
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 65
- 65
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 65
- 65
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 65
- 65
-
-
- 3
-
-
-
-
- 1
- 1
- 4225
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 33
- 33
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 33
- 33
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 33
- 33
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 33
- 33
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 33
- 33
-
-
- 3
-
-
-
-
- 1
- 1
- 1089
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 17
- 17
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 17
- 17
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 17
- 17
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 17
- 17
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 17
- 17
-
-
- 3
-
-
-
-
- 1
- 1
- 289
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 9
- 9
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 9
- 9
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 9
- 9
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 9
- 9
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 9
- 9
-
-
- 3
-
-
-
-
- 1
- 1
- 81
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 5
- 5
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 5
- 5
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 5
- 5
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 5
- 5
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 5
- 5
-
-
- 3
-
-
-
-
- 1
- 1
- 25
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 3
- 3
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 3
- 3
-
-
- 3
-
-
-
-
- 1
- 1
- 9
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 3
- 3
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 3
- 3
-
-
- 3
-
-
-
-
- 1
- 1
- 9
-
-
-
-
-
-
-
- 1
- 1
- 4225
-
-
- 1
- 1
- 1089
-
-
- 1
- 1
- 289
-
-
- 1
- 1
- 81
-
-
- 1
- 1
- 25
-
-
- 1
- 1
- 9
-
-
- 1
- 1
- 9
-
-
-
-
- 1
- 1
- 5727
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
- 1
- 1
- 5727
-
-
- 3
-
-
-
-
- 1
- 5727
- 1
-
-
-
-
-
-
- 1
- 5727
- 1
-
-
-
-
- 1
- 5727
- 1
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
- 1
- 5727
- 1
-
-
- 3
-
-
-
-
- 1
- 1
- 5727
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
- 5727
- 4
-
-
- 1
- 1
- 5727
-
-
- 1
-
-
- 1
-
-
- 1
-
-
-
-
- 200
- 3
-
-
- 200
- 3
-
-
- 1
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 200
- 3
-
-
- 2
-
-
-
-
- 200
- 3
-
-
-
-
-
-
- 200
- 3
-
-
-
-
-
-
- 1
- 1
- 5727
-
-
-
-
-
-
- 1
- 5727
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/paravision_models/recognition/models/detection/streaming/detection.bin b/modules/paravision_models/recognition/models/detection/streaming/detection.bin
deleted file mode 100644
index 339ad8182..000000000
Binary files a/modules/paravision_models/recognition/models/detection/streaming/detection.bin and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/detection/streaming/detection.xml b/modules/paravision_models/recognition/models/detection/streaming/detection.xml
deleted file mode 100644
index a48ab3fe1..000000000
--- a/modules/paravision_models/recognition/models/detection/streaming/detection.xml
+++ /dev/null
@@ -1,12563 +0,0 @@
-
-
-
-
-
-
-
- 1
- 3
- 257
- 449
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 3
- 1
- 1
-
-
-
-
-
-
-
- 1
- 3
- 257
- 449
-
-
- 1
- 3
- 1
- 1
-
-
-
-
- 1
- 3
- 257
- 449
-
-
-
-
-
-
-
- 48
- 3
- 7
- 7
-
-
-
-
-
-
-
- 1
- 3
- 257
- 449
-
-
- 48
- 3
- 7
- 7
-
-
-
-
- 1
- 48
- 128
- 224
-
-
-
-
-
-
- 1
- 48
- 128
- 224
-
-
-
-
- 1
- 48
- 128
- 224
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 128
- 224
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 128
- 224
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 128
- 224
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 128
- 224
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 48
- 128
- 224
-
-
-
-
-
-
-
-
-
-
- 1
- 48
- 128
- 224
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 48
- 129
- 225
-
-
-
-
-
-
-
- 1
- 48
- 129
- 225
-
-
-
-
- 1
- 48
- 65
- 113
-
-
-
-
-
-
-
- 16
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 113
-
-
- 16
- 48
- 1
- 1
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
-
- 32
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 32
- 16
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 32
- 16
- 3
- 3
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 32
- 16
- 3
- 3
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 64
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 65
- 113
-
-
- 64
- 48
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 16
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 16
- 64
- 1
- 1
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 65
- 113
-
-
-
-
-
-
-
- 32
- 16
- 1
- 1
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 32
- 16
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 32
- 16
- 3
- 3
-
-
-
-
-
-
-
- 1
- 16
- 65
- 113
-
-
- 32
- 16
- 3
- 3
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 32
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 32
- 64
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 65
- 113
-
-
-
-
-
-
-
- 64
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 64
- 32
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 64
- 32
- 3
- 3
-
-
-
-
-
-
-
- 1
- 32
- 65
- 113
-
-
- 64
- 32
- 3
- 3
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 65
- 113
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 1
- 64
- 65
- 113
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
-
-
-
- 128
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 65
- 113
-
-
- 128
- 64
- 1
- 1
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 65
- 113
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 65
- 113
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
-
-
-
- 1
- 128
- 65
- 113
-
-
- 1
- 128
- 65
- 113
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
-
-
-
- 1
- 128
- 65
- 113
-
-
-
-
- 1
- 128
- 33
- 57
-
-
-
-
-
-
-
- 32
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 33
- 57
-
-
- 32
- 128
- 1
- 1
-
-
-
-
- 1
- 32
- 33
- 57
-
-
-
-
-
-
- 1
- 32
- 33
- 57
-
-
-
-
- 1
- 32
- 33
- 57
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 33
- 57
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 33
- 57
-
-
-
-
-
-
-
- 1
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 33
- 57
-
-
- 1
- 32
- 1
- 1
-
-
-
-
- 1
- 32
- 33
- 57
-
-
-
-
-
-
-
- 64
- 32
- 1
- 1
-
-
-
-
-
-
-
- 1
- 32
- 33
- 57
-
-
- 64
- 32
- 1
- 1
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
-
- 64
- 32
- 3
- 3
-
-
-
-
-
-
-
- 1
- 32
- 33
- 57
-
-
- 64
- 32
- 3
- 3
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 33
- 57
-
-
-
-
-
-
-
- 1
- 64
- 33
- 57
-
-
- 1
- 64
- 33
- 57
-
-
-
-
- 1
- 128
- 33
- 57
-
-
-
-
-
-
-
- 1
- 128
- 33
- 57
-
-
- 1
- 128
- 33
- 57
-
-
-
-
- 1
- 128
- 33
- 57
-
-
-
-
-
-
-
- 48
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 33
- 57
-
-
- 48
- 128
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 192
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 33
- 57
-
-
- 192
- 128
- 1
- 1
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 192
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 192
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 33
- 57
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 33
- 57
-
-
-
-
-
-
-
- 1
- 96
- 33
- 57
-
-
- 1
- 96
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 33
- 57
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 33
- 57
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 33
- 57
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 33
- 57
-
-
- 3
-
-
-
-
- 1
- 4
- 1881
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 1
- 192
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 1
- 192
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 1
- 192
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 17
- 29
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 17
- 29
-
-
-
-
-
-
-
- 1
- 96
- 17
- 29
-
-
- 1
- 96
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 1
- 192
- 17
- 29
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 17
- 29
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 17
- 29
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 17
- 29
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 17
- 29
-
-
- 3
-
-
-
-
- 1
- 4
- 493
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 1
- 192
- 9
- 15
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 1
- 192
- 9
- 15
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 9
- 15
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 9
- 15
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 9
- 15
-
-
-
-
-
-
-
- 1
- 96
- 9
- 15
-
-
- 1
- 96
- 9
- 15
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 1
- 192
- 9
- 15
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 9
- 15
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 9
- 15
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 9
- 15
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 9
- 15
-
-
- 3
-
-
-
-
- 1
- 4
- 135
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 192
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 192
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 1
- 192
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 1
- 192
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 5
- 8
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 5
- 8
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 5
- 8
-
-
-
-
-
-
-
- 1
- 96
- 5
- 8
-
-
- 1
- 96
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 1
- 192
- 5
- 8
-
-
-
-
- 1
- 192
- 5
- 8
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 5
- 8
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 5
- 8
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 5
- 8
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 5
- 8
-
-
- 3
-
-
-
-
- 1
- 4
- 40
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 3
- 4
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 192
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 192
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 3
- 4
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 48
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 48
- 192
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
-
- 1
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 1
- 48
- 1
- 1
-
-
-
-
- 1
- 48
- 3
- 4
-
-
-
-
-
-
-
- 96
- 48
- 1
- 1
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 96
- 48
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 96
- 48
- 3
- 3
-
-
-
-
-
-
-
- 1
- 48
- 3
- 4
-
-
- 96
- 48
- 3
- 3
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 1
- 1
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 1
- 1
-
-
-
-
- 1
- 96
- 3
- 4
-
-
-
-
-
-
-
- 1
- 96
- 3
- 4
-
-
- 1
- 96
- 3
- 4
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 3
- 4
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 3
- 4
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 3
- 4
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 3
- 4
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 3
- 4
-
-
- 3
-
-
-
-
- 1
- 4
- 12
-
-
-
-
-
-
-
- 192
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 192
- 192
- 3
- 3
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 1
- 192
- 1
- 1
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 1
- 1
-
-
-
-
- 1
- 192
- 3
- 4
-
-
-
-
-
-
-
- 4
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 4
- 192
- 3
- 3
-
-
-
-
- 1
- 4
- 3
- 4
-
-
-
-
-
-
-
- 1
- 4
- 1
- 1
-
-
-
-
-
-
-
- 1
- 4
- 3
- 4
-
-
- 1
- 4
- 1
- 1
-
-
-
-
- 1
- 4
- 3
- 4
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 4
- 3
- 4
-
-
- 3
-
-
-
-
- 1
- 4
- 12
-
-
-
-
-
-
-
- 1
- 4
- 1881
-
-
- 1
- 4
- 493
-
-
- 1
- 4
- 135
-
-
- 1
- 4
- 40
-
-
- 1
- 4
- 12
-
-
- 1
- 4
- 12
-
-
-
-
- 1
- 4
- 2573
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
- 1
- 4
- 2573
-
-
- 3
-
-
-
-
- 1
- 2573
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 2573
- 4
-
-
-
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 2573
- 4
-
-
-
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 2573
- 4
-
-
- 3
-
-
- 3
-
-
- 3
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 2573
- 4
-
-
- 3
-
-
- 3
-
-
- 3
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 1
- 1
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 2
-
-
-
-
-
-
-
- 1
- 2573
- 2
-
-
- 1
- 2573
- 2
-
-
-
-
- 1
- 2573
- 4
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 33
- 57
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 33
- 57
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 33
- 57
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 33
- 57
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 33
- 57
-
-
- 3
-
-
-
-
- 1
- 1
- 1881
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 17
- 29
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 17
- 29
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 17
- 29
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 17
- 29
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 17
- 29
-
-
- 3
-
-
-
-
- 1
- 1
- 493
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 9
- 15
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 9
- 15
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 9
- 15
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 9
- 15
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 9
- 15
-
-
- 3
-
-
-
-
- 1
- 1
- 135
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 5
- 8
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 5
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 5
- 8
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 5
- 8
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 5
- 8
-
-
- 3
-
-
-
-
- 1
- 1
- 40
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 3
- 4
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 3
- 4
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 3
- 4
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 3
- 4
-
-
- 3
-
-
-
-
- 1
- 1
- 12
-
-
-
-
-
-
-
- 1
- 192
- 3
- 3
-
-
-
-
-
-
-
- 1
- 192
- 3
- 4
-
-
- 1
- 192
- 3
- 3
-
-
-
-
- 1
- 1
- 3
- 4
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
- 3
- 4
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 1
- 3
- 4
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
-
- 1
- 1
- 3
- 4
-
-
- 3
-
-
-
-
- 1
- 1
- 12
-
-
-
-
-
-
-
- 1
- 1
- 1881
-
-
- 1
- 1
- 493
-
-
- 1
- 1
- 135
-
-
- 1
- 1
- 40
-
-
- 1
- 1
- 12
-
-
- 1
- 1
- 12
-
-
-
-
- 1
- 1
- 2573
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
- 1
- 1
- 2573
-
-
- 3
-
-
-
-
- 1
- 2573
- 1
-
-
-
-
-
-
- 1
- 2573
- 1
-
-
-
-
- 1
- 2573
- 1
-
-
-
-
-
-
-
- 3
-
-
-
-
-
-
- 1
- 2573
- 1
-
-
- 3
-
-
-
-
- 1
- 1
- 2573
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
- 2573
- 4
-
-
- 1
- 1
- 2573
-
-
- 1
-
-
- 1
-
-
- 1
-
-
-
-
- 200
- 3
-
-
- 200
- 3
-
-
- 1
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 200
- 3
-
-
- 2
-
-
-
-
- 200
- 3
-
-
-
-
-
-
- 200
- 3
-
-
-
-
-
-
- 1
- 1
- 2573
-
-
-
-
-
-
- 1
- 2573
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/paravision_models/recognition/models/landmarks/default/landmarks.bin b/modules/paravision_models/recognition/models/landmarks/default/landmarks.bin
deleted file mode 100644
index 67e214b9e..000000000
Binary files a/modules/paravision_models/recognition/models/landmarks/default/landmarks.bin and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/landmarks/default/landmarks.xml b/modules/paravision_models/recognition/models/landmarks/default/landmarks.xml
deleted file mode 100644
index 33a767814..000000000
--- a/modules/paravision_models/recognition/models/landmarks/default/landmarks.xml
+++ /dev/null
@@ -1,5243 +0,0 @@
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 64
- 3
- 3
- 3
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 64
- 3
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
- 2
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
- 2
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 64
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 64
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 64
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 28
- 28
-
-
- 1
- 64
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 128
- 14
- 14
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
- 1
- 128
- 14
- 14
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 14
- 14
-
-
-
-
-
-
-
- 88
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 14
- 14
-
-
- 88
- 128
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 88
- 128
- 1
- 1
-
-
-
-
- 1
- 88
- 28
- 28
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 28
- 28
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 28
- 28
-
-
-
-
-
-
- 1
- 88
- 28
- 28
-
-
-
-
- 1
- 88
- 28
- 28
-
-
-
-
-
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 88
- 28
- 28
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
- 2
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
- 2
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
- 2
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
- 2
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 88
- 88
- 1
- 1
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 88
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 1
- 1
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 1
- 1
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 88
- 14
- 14
-
-
-
-
-
-
-
- 1
- 88
- 14
- 14
-
-
- 1
- 88
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
- 1
- 176
- 14
- 14
-
-
-
-
-
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 176
- 14
- 14
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
- 2
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
- 2
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
- 2
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 1
- 1
- 3
- 3
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 176
- 176
- 1
- 1
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 176
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 1
- 1
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 1
- 1
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 176
- 7
- 7
-
-
-
-
-
-
-
- 1
- 176
- 7
- 7
-
-
- 1
- 176
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
- 1
- 352
- 7
- 7
-
-
-
-
-
-
-
- 512
- 352
- 1
- 1
-
-
-
-
-
-
-
- 1
- 352
- 7
- 7
-
-
- 512
- 352
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 10
- 512
-
-
-
-
-
-
-
- 10
- 512
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
- 1
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 2
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
- 10
- 512
-
-
-
-
- 1
- 10
-
-
-
-
-
-
-
- 1
- 10
-
-
-
-
-
-
- 1
- 10
-
-
- 1
- 10
-
-
-
-
- 1
- 10
-
-
-
-
-
-
- 1
- 10
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/paravision_models/recognition/models/quality/default/quality.bin b/modules/paravision_models/recognition/models/quality/default/quality.bin
deleted file mode 100644
index 39f274b91..000000000
Binary files a/modules/paravision_models/recognition/models/quality/default/quality.bin and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/quality/default/quality.xml b/modules/paravision_models/recognition/models/quality/default/quality.xml
deleted file mode 100644
index bbe995538..000000000
--- a/modules/paravision_models/recognition/models/quality/default/quality.xml
+++ /dev/null
@@ -1,1447 +0,0 @@
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
- 1
- 3
- 56
- 56
-
-
-
-
-
-
-
- 16
- 3
- 7
- 7
-
-
-
-
-
-
-
- 1
- 3
- 56
- 56
-
-
- 16
- 3
- 7
- 7
-
-
-
-
- 1
- 16
- 50
- 50
-
-
-
-
-
-
-
- 1
- 16
- 1
- 1
-
-
-
-
-
-
- 1
- 16
- 50
- 50
-
-
- 1
- 16
- 1
- 1
-
-
-
-
- 1
- 16
- 50
- 50
-
-
-
-
-
-
- 1
- 16
- 50
- 50
-
-
-
-
- 1
- 16
- 50
- 50
-
-
-
-
-
-
-
- 1
- 16
- 50
- 50
-
-
-
-
- 1
- 16
- 25
- 25
-
-
-
-
-
-
-
- 64
- 16
- 3
- 3
-
-
-
-
-
-
-
- 1
- 16
- 25
- 25
-
-
- 64
- 16
- 3
- 3
-
-
-
-
- 1
- 64
- 23
- 23
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 23
- 23
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 23
- 23
-
-
-
-
-
-
- 1
- 64
- 23
- 23
-
-
-
-
- 1
- 64
- 23
- 23
-
-
-
-
-
-
-
- 1
- 64
- 23
- 23
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
- 1
- 64
- 11
- 11
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 11
- 11
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 256
- 64
-
-
-
-
-
-
-
- 256
- 64
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
- 1
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 2
-
-
-
-
- 1
- 64
-
-
-
-
-
-
-
- 1
- 64
-
-
- 256
- 64
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
-
-
-
-
- 1
- 256
-
-
- 1
- 256
-
-
-
-
- 1
- 256
-
-
-
-
-
-
- 1
- 256
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
-
-
- 1
-
-
-
-
- 1
- 256
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
-
-
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
-
-
-
-
- 1
- 256
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
-
-
- 1
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
- 1
- 256
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 256
- 64
-
-
-
-
-
-
-
- 1
- 64
-
-
- 256
- 64
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
-
-
-
-
- 1
- 256
-
-
- 1
- 256
-
-
-
-
- 1
- 256
-
-
-
-
-
-
- 1
- 256
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
-
-
- 1
-
-
-
-
- 1
- 256
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
-
-
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 1
-
-
-
-
- 1
- 256
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
- 256
- 1
-
-
- 1
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
-
-
-
-
-
- 1
- 256
-
-
- 1
- 256
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/paravision_models/recognition/models/recognition/default/recognition.bin b/modules/paravision_models/recognition/models/recognition/default/recognition.bin
deleted file mode 100644
index f3bc75281..000000000
Binary files a/modules/paravision_models/recognition/models/recognition/default/recognition.bin and /dev/null differ
diff --git a/modules/paravision_models/recognition/models/recognition/default/recognition.xml b/modules/paravision_models/recognition/models/recognition/default/recognition.xml
deleted file mode 100644
index bbf877846..000000000
--- a/modules/paravision_models/recognition/models/recognition/default/recognition.xml
+++ /dev/null
@@ -1,23912 +0,0 @@
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 1
- 1
- 1
- 1
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 1
- 1
- 1
- 1
-
-
-
-
- 1
- 3
- 112
- 112
-
-
-
-
-
-
-
- 64
- 3
- 3
- 3
-
-
-
-
-
-
-
- 1
- 3
- 112
- 112
-
-
- 64
- 3
- 3
- 3
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 64
- 113
- 113
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 113
- 113
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 112
- 112
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 64
- 113
- 113
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 113
- 113
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 8
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 8
- 64
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 64
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 64
- 8
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 4
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 8
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 8
- 64
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 64
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 64
- 8
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 4
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 8
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 8
- 64
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 64
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 64
- 8
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 4
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 64
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 64
- 64
- 3
- 3
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 2
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 8
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 8
- 64
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
- 1
- 1
- 1
- 8
-
-
-
-
-
-
-
- 64
- 8
-
-
-
-
-
-
-
- 1
- 1
- 1
- 8
-
-
- 64
- 8
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 4
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 56
- 56
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 64
- 57
- 57
-
-
-
-
-
-
-
- 128
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 57
- 57
-
-
- 128
- 64
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 1
- 64
- 1
- 1
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 1
- 64
- 1
- 1
-
-
-
-
- 1
- 64
- 56
- 56
-
-
-
-
-
-
-
- 128
- 64
- 3
- 3
-
-
-
-
-
-
-
- 1
- 64
- 56
- 56
-
-
- 128
- 64
- 3
- 3
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 56
- 56
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 128
- 57
- 57
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 57
- 57
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 128
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 128
- 128
- 3
- 3
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 2
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 16
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 16
- 128
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
- 1
- 1
- 1
- 16
-
-
-
-
-
-
-
- 128
- 16
-
-
-
-
-
-
-
- 1
- 1
- 1
- 16
-
-
- 128
- 16
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
- 1
- 1
- 1
- 128
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 128
-
-
- 4
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 28
- 28
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 128
- 29
- 29
-
-
-
-
-
-
-
- 256
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 29
- 29
-
-
- 256
- 128
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 1
- 128
- 1
- 1
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 1
- 128
- 1
- 1
-
-
-
-
- 1
- 128
- 28
- 28
-
-
-
-
-
-
-
- 256
- 128
- 3
- 3
-
-
-
-
-
-
-
- 1
- 128
- 28
- 28
-
-
- 256
- 128
- 3
- 3
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 28
- 28
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 256
- 29
- 29
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 29
- 29
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 256
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 256
- 256
- 3
- 3
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 2
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 32
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 32
- 256
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
- 1
- 1
- 1
- 32
-
-
-
-
-
-
-
- 256
- 32
-
-
-
-
-
-
-
- 1
- 1
- 1
- 32
-
-
- 256
- 32
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
- 1
- 1
- 1
- 256
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 256
-
-
- 4
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 14
- 14
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 256
- 15
- 15
-
-
-
-
-
-
-
- 512
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 15
- 15
-
-
- 512
- 256
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 1
- 256
- 1
- 1
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 1
- 256
- 1
- 1
-
-
-
-
- 1
- 256
- 14
- 14
-
-
-
-
-
-
-
- 512
- 256
- 3
- 3
-
-
-
-
-
-
-
- 1
- 256
- 14
- 14
-
-
- 512
- 256
- 3
- 3
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 14
- 14
-
-
- 4
-
-
- 4
-
-
-
-
-
- 1
- 512
- 15
- 15
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 15
- 15
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 512
- 512
- 3
- 3
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 512
- 512
- 3
- 3
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 4
-
-
-
-
-
-
-
- 4
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
- 4
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 64
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 64
- 512
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
- 1
- 1
- 1
- 64
-
-
-
-
-
-
-
- 512
- 64
-
-
-
-
-
-
-
- 1
- 1
- 1
- 64
-
-
- 512
- 64
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
- 1
- 1
- 1
- 512
-
-
-
-
-
-
-
- 4
-
-
-
-
-
-
- 1
- 1
- 1
- 512
-
-
- 4
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 7
- 7
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 25088
-
-
-
-
-
-
-
- 512
- 25088
-
-
-
-
-
-
-
- 1
- 25088
-
-
- 512
- 25088
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 512
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 1
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
-
-
-
-
-
- 1
-
-
- 1
-
-
-
-
- 1
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
-
-
- 2
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 1
-
-
- 2
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 1
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 512
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 1
- 512
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 1
- 512
- 1
- 1
-
-
-
-
- 1
- 512
- 7
- 7
-
-
-
-
-
-
-
- 2
-
-
-
-
-
-
-
- 1
- 512
- 7
- 7
-
-
- 2
-
-
-
-
- 1
- 25088
-
-
-
-
-
-
-
- 512
- 25088
-
-
-
-
-
-
-
- 1
- 25088
-
-
- 512
- 25088
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 512
-
-
-
-
- 1
- 512
-
-
-
-
-
-
- 1
- 512
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 512
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 1
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
- 1
- 1
-
-
-
-
- 1
- 1
-
-
-
-
-
-
-
- 1
- 512
-
-
- 1
- 1
-
-
-
-
- 1
- 513
-
-
-
-
-
-
- 1
- 513
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/modules/paravision_models/recognition/models/spec.json b/modules/paravision_models/recognition/models/spec.json
deleted file mode 100644
index 6ab6a0dbf..000000000
--- a/modules/paravision_models/recognition/models/spec.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "name": "gen5-balanced",
- "fd_input_shape": [
- 513,
- 513
- ],
- "fd_streaming_input_shape": [
- 257,
- 449
- ],
- "lm_input_shape": [
- 112,
- 112
- ],
- "md_input_shape": [
- 112,
- 112
- ],
- "fr_input_shape": [
- 112,
- 112
- ],
- "embedding_size": 513,
- "weight": 20.967800306995212,
- "bias": -58.33307640674702,
- "at_input_shape": [
- 112,
- 112
- ],
- "em_input_shape": [
- 112,
- 112
- ]
-}
\ No newline at end of file
diff --git a/sbin/ctl.sh b/sbin/ctl.sh
index 91f197d11..e3e8d2925 100755
--- a/sbin/ctl.sh
+++ b/sbin/ctl.sh
@@ -68,12 +68,12 @@ mkdir -p $UKDI_yolo_outdir
# Determina all the possible libs based on whats in UKDI_yolo_devices
-PYP_OPENVINO="modules"
+PYP_OPENVINO="modules/para_models/gen6-balanced/venv_balanced/lib/python3.10/site-packages"
LIB_OPENVINO="modules/openvino/inference_engine/lib/intel64/:\
modules/openvino/inference_engine:\
modules/openvino/python3.10/dist-packages/openvino/libs:\
modules/openvino/python3.10/dist-packages/openvino/inference_engine"
-PYP_PARAVISION="modules"
+PYP_PARAVISION="modules/para_models/gen6-balanced/venv_balanced/lib/python3.10/site-packages"
PYP_DEEPFACE="modules"
PYP_YOLOV5="modules/yolov5-face_Jan1"
LIB_SEEK="modules/seek/Seekware_SDK_3.6.0.0/lib/x86_64-linux-gnu/"
@@ -103,6 +103,7 @@ do
"regula") ;;
"traffic") ;;
"camera") ;;
+ "camera_stream") ;;
*) echo "yoloserv does not implement backend $i. Edit /etc/ukdi.json::yolo_devices and try again."
exit 1
esac
@@ -153,6 +154,16 @@ function f_apt(){
# --trusted-host paravision.mycloudrepo.io
+ # . /usr/local/lib/python3.10/dist-packages/
+ # ROOTURL="http://2a76e3b5-733a-4c93-98fb-339927b0f90c:453a6a2d-2935-430b-a22f-9b2880ca381@paravision.mycloudrepo.io"
+ # pip3 install cmake --upgrade
+ # pip3 install --no-cache-dir\
+ # --extra-index-url $ROOTURL/repositories/python-sdk\
+ # --extra-index-url $ROOTURL/repositories/python-recognition\
+ # "paravision-recognition" "paravision-models-gen6-balanced-openvino-2022-3" "openvino==2022.3"\
+ # --trusted-host paravision.mycloudrepo.io
+
+
wget -O downloads/facerec.zip https://github.com/ageitgey/face_recognition/archive/refs/heads/master.zip
cd downloads
unzip facerec.zip
diff --git a/src/camera_livefeed.py b/src/camera_livefeed.py
new file mode 100644
index 000000000..08496f003
--- /dev/null
+++ b/src/camera_livefeed.py
@@ -0,0 +1,55 @@
+# camera_livefeed.py
+import cherrypy
+import cv2
+import time
+from camera_stream import Camera_Stream # your class with __init__
+
+# init camera
+cam = Camera_Stream()
+cam.open(cam=0, width=640, height=480)
+cam.start(fps=10)
+print("camera started on /dev/video0")
+
+class Root:
+ @cherrypy.expose
+ def index(self):
+ # note: explicit full URL if your UI is on another port
+ return "MJPEG stream "
+
+ @cherrypy.expose
+ def mjpeg(self):
+ # tell cherrypy: this is a streaming response
+ cherrypy.response.stream = True
+
+ # set content type to multipart MJPEG
+ cherrypy.response.headers['Content-Type'] = 'multipart/x-mixed-replace; boundary=frame'
+
+ def generate():
+ while True:
+ frame = cam.get_frame()
+ if frame is None:
+ time.sleep(0.05)
+ continue
+
+ ok, jpg = cv2.imencode('.jpg', frame, [int(cv2.IMWRITE_JPEG_QUALITY), 80])
+ if not ok:
+ continue
+ data = jpg.tobytes()
+
+ # one frame
+ yield (b'--frame\r\n'
+ b'Content-Type: image/jpeg\r\n'
+ b'Content-Length: ' + str(len(data)).encode() + b'\r\n\r\n' +
+ data + b'\r\n')
+
+ # control fps
+ time.sleep(0.15)
+
+ return generate()
+
+if __name__ == "__main__":
+ cherrypy.config.update({
+ "server.socket_host": "0.0.0.0",
+ "server.socket_port": 8007,
+ })
+ cherrypy.quickstart(Root())
diff --git a/src/camera_stream.py b/src/camera_stream.py
new file mode 100644
index 000000000..19193ee2d
--- /dev/null
+++ b/src/camera_stream.py
@@ -0,0 +1,230 @@
+# camera_live.py
+import cv2, base64, time, threading, cherrypy
+from logger import logger
+
+
+class Camera_Stream(object):
+ """
+ @brief A class to provide a live camera stream for the user
+ @details commonly used with the face recognition process this class allows us to stream image back to the UI for the user
+ to view. Allows for higher scores as the user can postion themselves properly within the frame. These images are also passed
+ to other face match class allowing models to run the imgs providing a facematch score.
+ """
+
+ CAM = None
+ frame = None
+
+ def __init__(self):
+ """
+ @brief class constructor for camera_Stream
+ @var cap is the capture device to be used set to None by default allows us to check if the camera state is opened. Stops persistent opens being called.
+ @var running controls the life cycle of the capture thread when the thread should start to capture and when it should stop creating clean starts and exits
+ @var lock not really needed but allows the main program to read the data while the thread captures would allow us to get data mid-update if needed just good practice
+ @var thread stores the reference to the background thread that will handle the cont capture of frames
+ @var last_err stores last known error for debuggin uses
+ """
+ self.cap = None
+ self.running = False
+ self.lock = threading.Lock()
+ self.thread = None
+ self.last_err = None
+
+ """
+ @brief Method to open the camera
+ @details this method is used to open the camera it completes a handshake with the linux camera driver.Before opening a new camera it first closes any existing sessions.
+ Multiple handles to the same device with cv2 through V4L2 are not allowed. We then create a camera object with CV2s way of connecting a video device.
+ We make sure the value passed is an int V4L2 forces linux to use the Vide4Linux2 backend giving direct access to /dev/video. Essentially
+ this forms a low level link to the camera. We then perform a check to see if the camera opened correctly .isOpened verifies that the OS driver and hardware successfully responded.
+ We then set the cameras capture resolution which is a optional step. IE pixel resolution could be set to 640x480. We then set buffer size this reduces the latency between the camera
+ and the frame returned. By default V4L2 gives old frams setting the buffersize to 1 ensures the latest frame is always returned. Set the self.cap value and return successful JSON.
+ @var cam tells open CV which physical camera to open ie 0 will open /dev/video0
+ @var width provide a request frame width
+ @var height provide a request frame height
+ """
+
+ def open(self, cam=0, width=None, height=None):
+ """Explicit open for a live session."""
+ self.close()
+ # CAP_V4L2 Tells CV2 to use native linux drivers to open the camera
+ cap = cv2.VideoCapture(int(cam), cv2.CAP_V4L2)
+ if not cap.isOpened():
+ self.last_err = f"could not open {cam}"
+ return '{"status": 1, "remark": f"Could not open camera {cam}"}'
+
+ if width is not None:
+ cap.set(cv2.CAP_PROP_FRAME_WIDTH, int(width))
+ if height is not None:
+ cap.set(cv2.CAP_PROP_FRAME_HEIGHT, int(height))
+ try:
+ cap.set(cv2.CAP_PROP_BUFFERSIZE, 1)
+ except Exception:
+ pass
+ self.cap = cap
+ return '{"status": 0, "remark": "camera opened successfully"}'
+
+ """
+ @brief provide a live cont running vid feed
+ @details This method is responsible for launching the live camera background thread to cont read frames. We first perform the good practice check to see if the cam is ready we
+ also check to see if the camera is already running. This acts as a control flag for the loop. We then create a thread that will run the __loop function within this method. It will
+ grab frames from the camer.
+ @var fps is the frames per second set to 10 by default
+ @var PIXLO optional lower bound for pixel brightness needed
+ @var PIXHI optional upper bound for pixel brightness
+ """
+
+ def start(self, fps=10, PIXLO=80, PIXHI=250):
+ """Begin background capture loop; call after open()."""
+ if self.cap is None or not self.cap.isOpened():
+ self.last_err = "camera not open"
+ return False
+ if self.running:
+ return True
+ self.running = True
+ self.thread = threading.Thread(
+ target=self._loop, args=(fps, PIXLO, PIXHI), daemon=True
+ )
+ self.thread.start()
+ return '{"status": 0, "remark": "camera started successfully"}'
+
+ """
+ @brief function to produce thread frames
+ @details this function is used to constantly capture frames from the opened thread it keeps the latest good frame in memory under self.frame. The period is used to
+ convert the fps into a delay between reads. Loop will cont to run until the self.stop function is called or self.running is equal to false. Two things are returned when we
+ read from our camera it will return True if the frame was captured properly and the frm is the img itself in the form of a numpy array. We have two returns incase the
+ camera is to miss a frame which can happen the ok check prevents crashing. frm.mean computes pixel avg of the frame quick and dirty brightness check on the img optional if needed.
+ THIS LOOP WILL ONLY RETURN FRAMES WITHIN THE SPECIFIED PIXEL VALS
+ """
+
+ def _loop(self, fps, PIXLO, PIXHI):
+ period = 1.0 / max(1, int(fps))
+ while self.running:
+ ok, frm = self.cap.read()
+ if ok:
+ # optional light sanity
+ pa = frm.mean()
+ if PIXLO <= pa <= PIXHI:
+ with self.lock:
+ self.frame = frm
+ else:
+ # brief backoff to avoid pegging a core if camera hiccups
+ time.sleep(0.02)
+ time.sleep(period)
+
+ def stop(self):
+ """Stop background capture (keeps device open)."""
+ if not self.running:
+ return
+ self.running = False
+ if self.thread:
+ self.thread.join(timeout=1.0)
+ self.thread = None
+
+ """
+ @brief
+ @details
+ """
+
+ def close(self):
+ self.stop()
+ if self.cap is not None:
+ try:
+ self.cap.release()
+ except Exception:
+ pass
+ self.cap = None
+ return '{"status": 0, "remark": "camera closed successfully"}'
+
+ # --- accessors/encoders for middle/UI layers ---
+ def get_frame(self):
+ with self.lock:
+ return None if self.frame is None else self.frame.copy()
+
+ def dump(self):
+ return self.get_frame()
+
+ """
+ @brief get a base64 string of the latest img
+ @details we get the latest frame if the format of the img is set to jpeg or jpg we convert the img into a numpy array here you can see that the img quality is converted
+ to the compression number that is provided. If it is not JPG we convert it to a png numpy array. b64 turns the binary img bytes into a text string that is safe for json transport
+ @var fmt format of the img
+ @var quality tells lower layers like imencode how much compresion to apply to imgs not used in PNG only jpegs kept for API consistency for now
+ @var data_uri if set to true it will prefix the img string with data:image/png;base64 allowing it to be directly dropped into a img html tag helps the front end
+ """
+
+ def dump64(self, frm, fmt="png", quality=80, data_uri=True):
+ # print(pa)
+ if frm is None:
+ return None
+ fmt = fmt.lower()
+ if fmt in ("jpg", "jpeg"):
+ ok, buf = cv2.imencode(
+ ".jpg", frm, [int(cv2.IMWRITE_JPEG_QUALITY), int(quality)]
+ )
+ mime = "image/jpeg"
+ else:
+ ok, buf = cv2.imencode(".png", frm)
+ mime = "image/png"
+ if not ok:
+ return None, None
+ b64 = base64.b64encode(buf.tobytes()).decode("ascii")
+ s = f"data:{mime};base64," + b64 if data_uri else b64
+ return s, mime
+
+ """
+ @brief return the most frame
+ @details returns the most recent frame ready for use in the form of json. We get a
+
+
+ @var quality tells lower layers like imencode how much compresion to apply to imgs not used in PNG only jpegs kept for API consistency for now
+ @var data_uri if set to true it will prefix the img string with data:image/png;base64 allowing it to be directly dropped into a img html tag helps the front end
+ """
+
+ def frame_json(self, quality=80, data_uri=True):
+ frm = self.get_frame()
+ if frm is None:
+ return {"status": 1, "remark": self.last_err or "no_frame"}
+
+ h, w = frm.shape[:2] if frm is not None else (None, None)
+
+ s, mime = self.dump64(frm, fmt="png", quality=quality, data_uri=data_uri)
+ if s is None:
+ return {"status": 1, "remark": self.last_err or "no_frame"}
+ return (
+ '{ "status": 0, "remark": "img being returned", "img": "%s", "mime": "%s", "w": %d, "h": %d }'
+ % (s, mime, w, h)
+ )
+
+ def sharpness_score(self, frame):
+ gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
+ return float(cv2.Laplacian(gray, cv2.CV_64F).var())
+
+
+def main():
+ cam = Camera_Stream()
+ if not cam.open(cam=0, width=640, height=480):
+ print(f"[ERROR] Could not open camera: {cam.last_err}")
+ return
+ cam.start(fps=10)
+
+ try:
+ while True:
+ frame = cam.get_frame()
+ if frame is None:
+ time.sleep(0.05)
+ continue
+
+ sharp = cam.sharpness_score(frame)
+ print(f"\rSharpness: {sharp:.1f}", end="")
+
+ cv2.imshow("Live Camera Feed", frame)
+ k = cv2.waitKey(1) & 0xFF
+ if k == ord("q"):
+ break
+ finally:
+ cam.stop()
+ cam.close()
+ cv2.destroyAllWindows()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/src/face_recognitionx.py b/src/face_recognitionx.py
index 4e2243c72..1f25d6cd1 100644
--- a/src/face_recognitionx.py
+++ b/src/face_recognitionx.py
@@ -159,7 +159,7 @@ if __name__ == '__main__':
d = FaceRecognition()
if sys.argv[1]=="regula":
- jsonstr = d.crowd_vs_govid("pic1", "/tmp/localcam.jpg", 0, "pic2", "/tmp/regula/Portrait_0.jpg", 0.25)
+ jsonstr = d.crowd_vs_govid("pic1", "/tmp/localcam.jpg", 0, "pic2", "/tmp/regula/graphic_6_Portrait.jpg", 0.25)
print(jsonstr)
if sys.argv[1]=="messi":
diff --git a/src/logger.py b/src/logger.py
new file mode 100644
index 000000000..41299a8c3
--- /dev/null
+++ b/src/logger.py
@@ -0,0 +1,39 @@
+import os
+import logging
+from logging.handlers import TimedRotatingFileHandler
+
+# Define log directory
+LOG_DIR = "/tmp/logs/yolo"
+os.makedirs(LOG_DIR, exist_ok=True)
+
+# Define log file paths
+ACCESS_LOG_FILE = os.path.join(LOG_DIR, "access.log") # INFO, DEBUG
+ERROR_LOG_FILE = os.path.join(LOG_DIR, "error.log") # WARNING, ERROR, CRITICAL
+
+# Create a logger
+logger = logging.getLogger("yolo")
+logger.setLevel(logging.DEBUG) # Capture all log levels
+
+# Define log format
+log_format = "%(asctime)s || %(levelname)s || %(filename)s:%(lineno)d || %(funcName)s || %(message)s"
+formatter = logging.Formatter(log_format)
+
+# --- Handler for Access Logs (INFO and lower) ---
+access_handler = TimedRotatingFileHandler(ACCESS_LOG_FILE, when="midnight", interval=1, backupCount=7)
+access_handler.setLevel(logging.INFO)
+access_handler.setFormatter(formatter)
+
+# --- Handler for Error Logs (WARNING and higher) ---
+error_handler = TimedRotatingFileHandler(ERROR_LOG_FILE, when="midnight", interval=1, backupCount=7)
+error_handler.setLevel(logging.WARNING)
+error_handler.setFormatter(formatter)
+
+# Attach handlers to the logger
+logger.addHandler(access_handler)
+logger.addHandler(error_handler)
+
+# --- Console Output for Debugging ---
+console_handler = logging.StreamHandler()
+console_handler.setFormatter(formatter)
+console_handler.setLevel(logging.DEBUG)
+logger.addHandler(console_handler)
\ No newline at end of file
diff --git a/src/paravisionox.py b/src/paravisionox.py
new file mode 100644
index 000000000..ec3c41b77
--- /dev/null
+++ b/src/paravisionox.py
@@ -0,0 +1,134 @@
+from paravision.recognition.sdk import SDK
+from paravision.recognition.types import Settings, ValidnessCheck
+from paravision.recognition.exceptions import ParavisionException
+from paravision.liveness import SDK as Liveness2DSDK
+from paravision.liveness.types import (
+ Settings as Liveness2DSettings,
+ ValidnessSettings,
+)
+import paravision.recognition.utils as pru
+import json
+import sys
+import os
+
+
+
+class Paravisionox(object):
+
+ def init(self):
+ try:
+ settings = Settings()
+ # 1 is the default
+ settings.worker_count = 1
+ settings.detection_model = "default"
+ settings.validness_checks = ValidnessCheck.LIVENESS
+ self.sdk = SDK(settings=settings)
+ liveness2d_settings = Liveness2DSettings()
+ self.liveness2d_sdk = Liveness2DSDK(settings=liveness2d_settings)
+ except ParavisionException as e:
+ # error handling logic
+ print("Exception:", e)
+
+
+ # @doc Load a pic using the device label
+ def load1(self, name,fname):
+ print(" Loading image '%s' from file %s" % (name, fname))
+ if not os.path.isfile(fname):
+ print(" * file not found: %s" % (fname))
+ return '{ "status":442565, "remark":"file name not found", "guilty_param":"fname", "guilty_value":"%s" }' % (fname)
+ self.files[name] = fname
+ self.imgs[name] = pru.load_image(fname)
+ print(" Loaded %s from file %s" % (name, fname))
+ return '{ "status":0, "remark":"OK", "name":"%s", "fname":"%s" }' % (name,fname)
+
+
+ # @doc find all the faces in the named image
+ def detect(self, name):
+ self.boxes = []
+ print("** face_recognition::detect ... %s" % name)
+ try:
+ # Get all faces from images with qualities, landmarks, and embeddings
+ faces = self.sdk.get_faces([self.imgs[name]], qualities=True, landmarks=True, embeddings=True)
+ inferences = faces.image_inferences
+ ix = inferences[0].most_prominent_face_index()
+ self.validness(ix, self.sdk)
+ self.models[name] = inferences[0].faces[ix].embedding
+ self.quality[name] = round(1000*inferences[0].faces[ix].quality)
+ self.boxes = [ (0,0,0,0) ]
+ except Exception as ex:
+ self.errstr = "image processing exception at get_faces: "+str(ex)
+ return '{ "status":222310, "remark":"image processing exception", "guilty_param":"error", "guilty_value":"%s" }' % str(ex)
+ return '{ "status":0, "remark":"OK", "faces":%d, "boxes":%s }' % (len(self.boxes), json.dumps(self.boxes))
+
+
+ # @doc This does everything for you.
+ # If you are smartserv, "crowd" means cam and "govid" means regula pic
+ def crowd_vs_govid(self, name1,file1,scale1, name2,file2,scale2):
+ print("##1##")
+ if self.json2obj(self.load1(name1, file1))["status"] != 0:
+ return self.jsonx
+ if self.json2obj(self.detect(name1))["status"] != 0:
+ return self.jsonx
+ self.save(name1,"/tmp")
+
+ print("##2##")
+ if self.json2obj(self.load1(name2, file2))["status"] != 0:
+ return self.jsonx
+ self.save(name2,"/tmp")
+ if self.json2obj(self.detect(name2))["status"]!=0:
+ return self.jsonx
+ self.save(name2,"/tmp")
+
+ print("##R##")
+ jsonstr = self.compare(name1,name2)
+ print(jsonstr)
+ return jsonstr
+
+
+ # @doc compare two named images, previously loaded
+ def compare(self, name1, name2):
+ print("** face_recognition::compare ... %s vs %s" % (name1,name2))
+ try:
+ res = self.sdk.get_match_score(self.models[name1], self.models[name2])
+ print("Match is ",res)
+ self.match_score = res
+ except Exception as ex:
+ print("** paravision::compare exception ... " + str(ex) )
+ self.errstr = "image comparison exception at compute_scores: "+str(ex)
+ return '{ "status":332410, "remark":"%s" }' % self.errstr
+ return '{ "status":0, "threshold": 800, "device1_qual": 0.5, "device2_qual": 0.5, "remark":"OK", "score":%d }' % self.match_score
+
+ def validness(self, face, sdk):
+ validness_settings = ValidnessSettings(face)
+ validness_result = self.liveness2d_sdk.check_validness(face, validness_settings, sdk)
+ print(validness_result)
+
+ def scores(self):
+ return '{ "status":0, "threshold": 800, "device1_qual": 0.5, "device2_qual": 0.5, "remark":"OK", "score":%d }' % self.match_score
+
+if __name__ == '__main__':
+
+ d = Paravisionox()
+ d.init()
+
+ if sys.argv[1]=="messia":
+ jsonstr = d.load1("pic1", "testimg/messi4.jpg")
+ print(jsonstr)
+ jsonstr = d.detect("pic1")
+ print(jsonstr)
+
+ if sys.argv[1]=="test":
+ d.load1("pic1", "testimg/ox.jpg")
+ d.detect("pic1")
+
+ if sys.argv[1]=="kiosk":
+ jsonstr = d.crowd_vs_govid("pic1", "testimg/ox.jpg", 0, "pic2", "testimg/ox_govid.jpg", 0.25)
+ print(jsonstr)
+
+ if sys.argv[1]=="messi":
+ jsonstr = d.crowd_vs_govid("pic1", "testimg/messi4.jpg", 0, "pic2", "testimg/messi2.jpg", 0)
+ print(jsonstr)
+
+ if sys.argv[1]=="maiden":
+ jsonstr = d.crowd_vs_govid("pic1", "testimg/ironmaiden.jpg", 0, "pic2", "testimg/davemurray.jpg", 0)
+ print(jsonstr)
diff --git a/src/yoloserv.py b/src/yoloserv.py
index 9b6fb709b..db1c94276 100644
--- a/src/yoloserv.py
+++ b/src/yoloserv.py
@@ -6,6 +6,7 @@ import os
import sys
import json
from pathlib import Path
+from logger import logger
# General image processing
import cv2
@@ -15,8 +16,9 @@ import numpy
# Yoloserv contains references to a number of packages that do different things.
-#
#
+#
+
class yoloserv(object):
@@ -33,10 +35,10 @@ class yoloserv(object):
facematcher = None
palmmatcher = None
traffic = None
- ir_camera = None
+ ir_camera = None
devices = []
points = []
- state = "READY"
+ state = "READY"
# Nature of init depends on the required algotithms listed in /etc/ukdi.conf
# eg :: "yolo_devices": "detect_face,facematch"
@@ -49,10 +51,13 @@ class yoloserv(object):
# palmvein - - palm vein detection
# @doc Initialisation takes device specs from /etc/ukdi.json
def initialise(self):
- with open("/etc/ukdi.json","r") as f:
+ with open("/etc/ukdi.json", "r") as f:
self.conf = json.loads(f.read())
- print("Init yoloserv: %s @ %s %s " % (self.conf["yolo_devices"], self.indir, self.outdir) )
+ print(
+ "Init yoloserv: %s @ %s %s "
+ % (self.conf["yolo_devices"], self.indir, self.outdir)
+ )
self.devices = self.conf["yolo_devices"].split(",")
self.indir = self.conf["yolo_indir"]
self.outdir = self.conf["yolo_outdir"]
@@ -61,18 +66,21 @@ class yoloserv(object):
if "paravision" in self.devices:
print("Loading paravision facematch...")
from paravisionx import Paravision
+
self.facematcher = Paravision()
self.facematcher.init()
if "deepface" in self.devices:
print("Loading deepface facematch...")
from deepfacex import Deepfacex
+
self.facematcher = Deepfacex()
- self.facematcher.init("dlib","Facenet512")
+ self.facematcher.init("dlib", "Facenet512")
if "face_recognition" in self.devices:
print("Loading face_recognition facematch...")
from face_recognitionx import FaceRecognition
+
self.facematcher = FaceRecognition()
self.facematcher.init()
@@ -80,6 +88,7 @@ class yoloserv(object):
if "facematch" in self.devices:
print("Loading paravision facematch...")
from facematch_open import Facematch
+
self.facematcher = Facematch()
self.facematcher.init()
@@ -87,24 +96,33 @@ class yoloserv(object):
if "camera" in self.devices:
print("AAAAAAAAAAA Loading YUV standard camera...")
from camera import Camera
- self.camera = Camera()
+
+ self.camera = Camera()
self.camera.init()
+ if "camera_stream" in self.devices:
+ print("Loading Camera with Live img Feed functionality")
+ from camera_stream import Camera_Stream
+
+ self.camera_stream = Camera_Stream()
+ self.camera_stream.__init__()
if "seek" in self.devices:
from seek import Seek
- self.seek = Seek()
+
+ self.seek = Seek()
if "flir" in self.devices:
print("AAAAAAAAAAA Loading flir IR... [NOT YET IMPLEMETED]")
- self.ircamera = Flir()
+ self.ircamera = Flir()
self.ircamera.init()
if "fjpalmvein" in self.devices:
print("AAAAAAAAAAA Loading fjpalmvein... [NOT YET IMPLEMETED]")
- self.palmvein = self.FJPalmvein()
+ self.palmvein = self.FJPalmvein()
self.palmvein.init()
# Simple processing with opencv
if "opencv" in self.devices:
print("AAAAAAAAAAA Loading opencv...")
from opencv import OpenCV
+
self.opencv = OpenCV()
self.opencv.init()
@@ -112,22 +130,23 @@ class yoloserv(object):
if "yolov5" in self.devices:
print("AAAAAAAAAAA Loading yolov5 object detection...")
from yolov5 import Yolov5
+
self.detector = Yolov5()
- #self.detector.init(self.indir,self.outdir)
+ # self.detector.init(self.indir,self.outdir)
if "yolov8" in self.devices:
print("AAAAAAAAAAA Loading yolov8 object detection...")
from yolov8 import Yolov8
+
self.detector = Yolov8()
- #self.detector.init(self.indir,self.outdir)
-
+ # self.detector.init(self.indir,self.outdir)
+
# Intoxication
if "intox" in self.devices:
print("AAAAAAAAAAA Loading intoxivision...")
self.intox_detector = Intox()
self.intox_detector.init()
-
# @doc clear memory of all image data
@cherrypy.expose
def svc_init(self):
@@ -137,10 +156,10 @@ class yoloserv(object):
# @doc the device used depends on the device list and what actual file was loaded as self.camera
# @doc acquires an image from the camera (test OK CG 2024-0724)
@cherrypy.expose
- def svc_acquire(self,camidx=0):
+ def svc_acquire(self, camidx=0):
self.camera.acquire(camidx)
return '{ "status":0, "remark":"OK" }'
-
+
# Test- (opencv)
@cherrypy.expose
def svc_show(self):
@@ -149,9 +168,13 @@ class yoloserv(object):
# @doc saves the camera image to a file (test OK CG 2024-0724)
@cherrypy.expose
- def svc_save(self,filename,extn="png"):
- self.camera.save(self.outdir + filename,extn)
- return '{ "status":0, "remark":"OK", "outfile": "%s/%s.%s" }' % (self.outdir,filename,extn)
+ def svc_save(self, filename, extn="png"):
+ self.camera.save(self.outdir + filename, extn)
+ return '{ "status":0, "remark":"OK", "outfile": "%s/%s.%s" }' % (
+ self.outdir,
+ filename,
+ extn,
+ )
# @doc dumps the camera image as an array (test OK CG 2024-0724)
@cherrypy.expose
@@ -171,126 +194,129 @@ class yoloserv(object):
buf = self.camera.dump64().decode()
return " " % buf
-
-
# Find faces - the algorithm used depends on the device list and what actual file was loaded as self.camera
# @doc load an image from a file using the specified yoloserv plugin (test OK CG 2024-0724)
@cherrypy.expose
- def svc_load_img(self,name,infile):
+ def svc_load_img(self, name, infile):
return self.facematcher.load1(name, self.indir + infile)
# @doc load images from two files using the specified yoloserv plugin (test OK CG 2024-0724)
@cherrypy.expose
- def svc_load_imgs(self,name1,infile1,name2,infile2):
- return self.facematcher.load2(name1, self.indir + infile1, name2, self.indir + infile2)
+ def svc_load_imgs(self, name1, infile1, name2, infile2):
+ return self.facematcher.load2(
+ name1, self.indir + infile1, name2, self.indir + infile2
+ )
@cherrypy.expose
- def svc_detect_faces(self,name):
- return self.facematcher.detect(name)
+ def svc_detect_faces(self, name):
+ return self.facematcher.detect(name)
# @doc find all the faces in the named image that was loaded using the above calls (test OK CG 2024-0724)
@cherrypy.expose
- def svc_faces(self,which):
+ def svc_faces(self, which):
return self.facematcher.get_faces(which)
# @doc find the most prominent face in the named image that was loaded using the above calls (test OK CG 2024-0724)
# you can access the new ideal face (if present) with the image name "_crop"
@cherrypy.expose
- def svc_ideal(self,which):
- return self.facematcher.ideal(which,which+"_rect",which+"_crop")
+ def svc_ideal(self, which):
+ return self.facematcher.ideal(which, which + "_rect", which + "_crop")
# @doc dumps the named image as an tag for straight to HTML output (test OK CG 2024-0724)
@cherrypy.expose
- def svc_imgtag(self,which):
+ def svc_imgtag(self, which):
buf = self.facematcher.dump64(which).decode()
return " " % buf
@cherrypy.expose
- def svc_save_face(self,which):
- return self.facematcher.save(which, self.conf["yolo_outdir"]);
+ def svc_save_face(self, which):
+ return self.facematcher.save(which, self.conf["yolo_outdir"])
# Match faces together
@cherrypy.expose
- def svc_compare(self,name1,name2):
- return self.facematcher.compare(name1,name2)
+ def svc_compare(self, name1, name2):
+ return self.facematcher.compare(name1, name2)
-
# Traffic analysis
@cherrypy.expose
- def svc_traffic(self,infile=None):
+ def svc_traffic(self, infile=None):
return self.facematcher.traffic(infile)
-
-
-
@cherrypy.expose
def shutdown(self):
- print ("Shutdown on client action")
+ print("Shutdown on client action")
os._exit(0)
-
-
-
-
# @doc find the landmarks in the named image (test OK CG 2024-0724)
@cherrypy.expose
- def svc_get_landmarks(self,which):
+ def svc_get_landmarks(self, which):
return self.facematcher.get_landmarks(which)
# Match faces together
@cherrypy.expose
- def svc_match_faces(self,dev1,fil1,scl1s,dev2,fil2,scl2s):
+ def svc_match_faces(self, dev1, fil1, scl1s, dev2, fil2, scl2s):
scl1 = float(scl1s)
scl2 = float(scl2s)
- jsonstr = self.facematcher.crowd_vs_govid(dev1,self.conf["yolo_indir"]+fil1,scl1, dev2,self.conf["yolo_indir"]+fil2,scl2)
+ jsonstr = self.facematcher.crowd_vs_govid(
+ dev1,
+ self.conf["yolo_indir"] + fil1,
+ scl1,
+ dev2,
+ self.conf["yolo_indir"] + fil2,
+ scl2,
+ )
obj = self.json2obj(jsonstr)
return jsonstr
-
- def json2obj(self,jsonx):
+
+ def json2obj(self, jsonx):
return json.loads(jsonx)
# @doc put all the steps for a retail facematch into one convenient functions
@cherrypy.expose
- def svc_facematch(self,dev1,dev2):
+ def svc_facematch(self, dev1, dev2):
if self.facematcher is None:
- return '{ "status":777244, "remark":"suitable yolo_device" }'
+ return '{ "status":777244, "remark":"suitable yolo_device" }'
if self.conf["emulate_facematch"]:
- return '{ "status":0, "remark":"OK(Emulated)", "data":{"device1":"%s","device2":"%s","device1_qual":123,"device2_qual":234,"score":600} }' % (dev1,dev2)
+ return (
+ '{ "status":0, "remark":"OK(Emulated)", "data":{"device1":"%s","device2":"%s","device1_qual":123,"device2_qual":234,"score":600} }'
+ % (dev1, dev2)
+ )
if dev1 == "regula":
- fil1 = "/tmp/regula/Portrait_0.jpg"
+ fil1 = "/tmp/regula/graphic_6_Portrait.jpg"
scl1 = 0.25
if dev1 == "localcam":
fil1 = "/tmp/localcam.png"
scl1 = 0.5
if dev2 == "regula":
- fil2 = "/tmp/regula/Portrait_0.jpg"
+ fil2 = "/tmp/regula/graphic_6_Portrait.jpg"
scl2 = 0.25
if dev2 == "localcam":
fil2 = "/tmp/localcam.png"
scl2 = 0.5
if self.conf["emulate_facematch"]:
- return '{ "status":0, "remark":"OK", "data":{} }'
+ return '{ "status":0, "remark":"OK", "data":{} }'
- jsonstr = self.facematcher.crowd_vs_govid(dev1,fil1,scl1, dev2,fil2,scl2)
+ jsonstr = self.facematcher.crowd_vs_govid(dev1, fil1, scl1, dev2, fil2, scl2)
obj = self.json2obj(jsonstr)
if obj["status"] > 0:
return jsonstr
jsonstr = self.facematcher.scores()
return '{ "status":0, "remark":"OK", "data": %s }' % (jsonstr)
-
-
@cherrypy.expose
def realsense(self):
- #return '{ "status":0, "remark":"OK" }'
+ # return '{ "status":0, "remark":"OK" }'
# camera is not reliable so bypassing this for now
self.devices["realsense"].reset()
rc = self.devices["realsense"].open()
if rc is not None:
- return '{ "status":233413, "remark":"Realsense problem", "guilty_param":"realsense cam", "guilty_value":"%s" }' % (rc)
+ return (
+ '{ "status":233413, "remark":"Realsense problem", "guilty_param":"realsense cam", "guilty_value":"%s" }'
+ % (rc)
+ )
rc = self.devices["realsense"].hid_read()
self.devices["realsense"].close()
@@ -299,10 +325,104 @@ class yoloserv(object):
return '{ "status":0, "remark":"OK" }'
-
+ """
+ @brief call camera_stream to open and start the camera stream
+ @details
+ @var
+ @var
+ @var
+ """
@cherrypy.expose
- def get_regula_data(self,type="json"):
+ def open_camera_stream(self):
+ if self.camera_stream.running:
+ return '{"status": 0, "remark": "camera already streaming"}'
+ data = self.camera_stream.open()
+ logger.info(data)
+ data = json.loads(data)
+ print(data)
+ if data["status"] != 0:
+ return '{"status": 1, "remark": "camera failed to open"}'
+ data_start = self.camera_stream.start()
+ logger.info(data_start)
+ data_start = json.loads(data_start)
+ if data_start["status"] != 0:
+ return '{"status": 1, "remark": "camera failed to start"}'
+ return '{"status": 0, "remark": "camera opened and is streaming"}'
+
+ """
+ @brief
+ @details
+ """
+
+ @cherrypy.expose
+ def close_camera_stream(self):
+ self.camera_stream.close()
+
+ """
+ @brief
+ @details
+ """
+
+ @cherrypy.expose
+ def get_camera_frame(self):
+ data = self.camera_stream.frame_json()
+ return data
+
+ @cherrypy.expose
+ def cam_livefeed(self):
+ # Stops infinite loop
+ if not self.camera_stream.running:
+ return
+
+ # Do not buffer the response
+ cherrypy.response.stream = True
+ # Resp in many parts each new part replaces the previous
+ cherrypy.response.headers["Content-Type"] = (
+ "multipart/x-mixed-replace; boundary=frame"
+ )
+ # Prevents caching
+ cherrypy.response.headers["Cache-Control"] = (
+ "no-cache, no-store, must-revalidate"
+ )
+ # Prevents caching
+ cherrypy.response.headers["Pragma"] = "no-cache"
+ # Tell the browser that the content is expired
+ cherrypy.response.headers["Expires"] = "0"
+ # Set the timeout to 60 seconds
+ cherrypy.response.timeout = 60
+
+ # Generator function
+ def gen():
+ try:
+ while True:
+ frm = self.camera_stream.get_frame()
+ if frm is None:
+ time.sleep(0.01)
+ continue
+ ok, buf = cv2.imencode(
+ ".jpg", frm, [int(cv2.IMWRITE_JPEG_QUALITY), 70]
+ )
+ if not ok:
+ continue
+ jpg = buf.tobytes()
+ # Pause the fucntion and send this value out dumping data directly into the HTTP response stream
+ yield (
+ b"--frame\r\n"
+ b"Content-Type: image/jpeg\r\n"
+ b"Content-Length: "
+ + str(len(jpg)).encode("ascii")
+ + b"\r\n\r\n"
+ + jpg
+ + b"\r\n"
+ )
+ except GeneratorExit:
+ return # client disconnected
+
+ return gen()
+
+ @cherrypy.expose
+ def get_regula_data(self, type="json"):
# # TODO for testing only - see /etc/ukdi.conf to switch it in or out.
# # See etc/regula.json for an example file
# if type=="json":
@@ -313,11 +433,11 @@ class yoloserv(object):
if self.conf["emulate_identity"]:
# TODO for testing only - see /etc/ukdi.conf to switch it in or out.
# See etc/regula.json for an example file
- if type=="json":
+ if type == "json":
f = open("/tmp/regula.json")
rdata = f.read()
f.close()
- return '{ "status":0, "remark":"EMULATED", "data":%s }' % (rdata)
+ return '{ "status":0, "remark":"EMULATED", "data":%s }' % (rdata)
self.devices["regula"].open()
if not self.devices["regula"].has_data():
return '{ "status":249412, "remark":"No data available", "guilty_param":"id reader", "guilty_value":"no data available" }'
@@ -327,40 +447,38 @@ class yoloserv(object):
if rdata is None:
return '{ "status":249413, "remark":"No data available" }'
self.devices["regula"].close()
- return '{ "status":0, "remark":"OK", "data":%s }' % (rdata)
-
+ return '{ "status":0, "remark":"OK", "data":%s }' % (rdata)
@cherrypy.expose
def photoval(self, ident):
- # TO DO - match ident to pic,
+ # TO DO - match ident to pic,
# TO DO - acquire cam pic,
# TO DO - compare photo ID pic to cam pic,
return '{ "status":0, "remark":"OK", "data": { "match":90, "confidence":91 } }'
-
@cherrypy.expose
- def irstill(self,ident,type="png"):
+ def irstill(self, ident, type="png"):
if self.state == self.BUSY:
return '{ "status":9, "remark":"BUSY" }'
- self.state = self.BUSY
+ self.state = self.BUSY
self.seek.open()
self.seek.hid_read()
self.seek.close()
self.state = self.READY
- f = open("/tmp/%s.%s" % (ident,type), "wb")
- f.write( self.seek.img(".%s" % type) )
+ f = open("/tmp/%s.%s" % (ident, type), "wb")
+ f.write(self.seek.img(".%s" % type))
f.close()
- return self.seek.png64()
+ return self.seek.png64()
@cherrypy.expose
- def yolo(self,ident):
- return self.devices["seek"].detect(ident)
+ def yolo(self, ident):
+ return self.devices["seek"].detect(ident)
@cherrypy.expose
def irface(self):
if self.state == self.BUSY:
return '{ "status":9, "remark":"BUSY" }'
- self.state = self.BUSY
+ self.state = self.BUSY
self.devices["camera"].open()
self.devices["camera"].hid_read()
self.devices["camera"].haar("face.xml")
@@ -368,13 +486,12 @@ class yoloserv(object):
self.state = self.READY
return self.devices["camera"].png()
-
# Take a still, find a face in it and return only the face region
@cherrypy.expose
def face(self):
if self.state == self.BUSY:
return '{ "status":9, "remark":"BUSY" }'
- self.state = self.BUSY
+ self.state = self.BUSY
self.devices["camera"].open()
self.devices["camera"].hid_read()
self.devices["camera"].haar("face.xml")
@@ -382,13 +499,12 @@ class yoloserv(object):
self.state = self.READY
return self.devices["camera"].raw()
-
# Take a still, find a face in it convolve it with a kernel and return only the face region
@cherrypy.expose
def face_conv(self):
if self.state == self.BUSY:
return '{ "status":9, "remark":"BUSY" }'
- self.state = self.BUSY
+ self.state = self.BUSY
self.devices["camera"].open()
self.devices["camera"].hid_read()
self.devices["camera"].haar("face.xml")
@@ -397,13 +513,12 @@ class yoloserv(object):
self.state = self.READY
return self.devices["camera"].raw()
-
# Take a still, find a palm in it and return only the palm region
@cherrypy.expose
def palm(self):
if self.state == self.BUSY:
return '{ "status":9, "remark":"BUSY" }'
- self.state = self.BUSY
+ self.state = self.BUSY
self.devices["camera"].open()
self.devices["camera"].hid_read()
self.devices["camera"].haar("palm.xml")
@@ -411,7 +526,6 @@ class yoloserv(object):
self.state = self.READY
return self.C.raw()
-
# Take a still, find a palm in it convolve with a kernel and return that
@cherrypy.expose
def palm_conv(self):
@@ -427,7 +541,7 @@ class yoloserv(object):
return self.devices["camera"].raw()
-if __name__ == '__main__':
+if __name__ == "__main__":
# Deal with the incoming call parameters
servport = int(sys.argv[1])
@@ -436,8 +550,8 @@ if __name__ == '__main__':
s.initialise()
s.indir = sys.argv[2]
s.outdir = sys.argv[3]
-
- cherrypy.config.update({'server.socket_host': '0.0.0.0',
- 'server.socket_port': servport})
- cherrypy.quickstart(s, '/')
-
+
+ cherrypy.config.update(
+ {"server.socket_host": "0.0.0.0", "server.socket_port": servport}
+ )
+ cherrypy.quickstart(s, "/")